]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
* gimple.h (gimple_build_assign_with_ops): Add unary arg overload.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
2d18b16d 1/* Statement simplification on GIMPLE.
3aea1f79 2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
2d18b16d 3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
9ed99284 26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
2d18b16d 30#include "flags.h"
a3020f2f 31#include "hashtab.h"
32#include "hash-set.h"
33#include "vec.h"
34#include "machmode.h"
35#include "hard-reg-set.h"
36#include "input.h"
2d18b16d 37#include "function.h"
b9ed1410 38#include "dumpfile.h"
073c1fd5 39#include "bitmap.h"
94ea8568 40#include "predict.h"
41#include "dominance.h"
bc61cadb 42#include "basic-block.h"
43#include "tree-ssa-alias.h"
44#include "internal-fn.h"
45#include "gimple-fold.h"
46#include "gimple-expr.h"
47#include "is-a.h"
073c1fd5 48#include "gimple.h"
a8783bee 49#include "gimplify.h"
dcf1a1ec 50#include "gimple-iterator.h"
073c1fd5 51#include "gimple-ssa.h"
52#include "tree-ssanames.h"
53#include "tree-into-ssa.h"
54#include "tree-dfa.h"
69ee5dbb 55#include "tree-ssa.h"
2d18b16d 56#include "tree-ssa-propagate.h"
2d18b16d 57#include "target.h"
1140c305 58#include "hash-map.h"
59#include "plugin-api.h"
60#include "ipa-ref.h"
61#include "cgraph.h"
10fba9c0 62#include "ipa-utils.h"
63#include "gimple-pretty-print.h"
424a4a92 64#include "tree-ssa-address.h"
0e80b01d 65#include "langhooks.h"
f6a34e3f 66#include "gimplify-me.h"
ceb49bba 67#include "dbgcnt.h"
f7715905 68#include "builtins.h"
b9ea678c 69#include "output.h"
55534d34 70#include "tree-eh.h"
71#include "gimple-match.h"
b768d5ee 72#include "tree-phinodes.h"
73#include "ssa-iterators.h"
2d18b16d 74
551732eb 75/* Return true when DECL can be referenced from current unit.
a65b88bf 76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
f1c35659 79
f1c35659 80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
551732eb 90 declaring the body.
91 3) COMDAT functions referred by external vtables that
125ac0ea 92 we devirtualize only during final compilation stage.
551732eb 93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
96
f1c35659 97static bool
a65b88bf 98can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
f1c35659 99{
098f44bc 100 varpool_node *vnode;
f1c35659 101 struct cgraph_node *node;
452659af 102 symtab_node *snode;
a65b88bf 103
16d41ae2 104 if (DECL_ABSTRACT_P (decl))
2246b6a9 105 return false;
106
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
111
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 {
59425620 115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
35ee1c66 117 if (symtab->function_flags_ready)
59425620 118 return true;
415d1b9a 119 snode = symtab_node::get (decl);
59425620 120 if (!snode || !snode->definition)
2246b6a9 121 return false;
13cbeaac 122 node = dyn_cast <cgraph_node *> (snode);
2246b6a9 123 return !node || !node->global.inlined_to;
124 }
125
8f1c7d19 126 /* We will later output the initializer, so we can refer to it.
a65b88bf 127 So we are concerned only when DECL comes from initializer of
59425620 128 external var or var that has been optimized out. */
a65b88bf 129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
59425620 131 || (!DECL_EXTERNAL (from_decl)
97221fd7 132 && (vnode = varpool_node::get (from_decl)) != NULL
59425620 133 && vnode->definition)
8f1c7d19 134 || (flag_ltrans
97221fd7 135 && (vnode = varpool_node::get (from_decl)) != NULL
fb5bde82 136 && vnode->in_other_partition))
a65b88bf 137 return true;
a65b88bf 138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
1bf69827 143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
415d1b9a 144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
a65b88bf 145 return false;
551732eb 146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
59425620 151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
a65b88bf 154
155 As observed in PR20991 for already optimized out comdat virtual functions
9d75589a 156 it may be tempting to not necessarily give up because the copy will be
a65b88bf 157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
35ee1c66 162 if (!symtab->function_flags_ready)
551732eb 163 return true;
a65b88bf 164
415d1b9a 165 snode = symtab_node::get (decl);
59425620 166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
f1c35659 173}
174
bb903e9c 175/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
a65b88bf 176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
15ba153a 178
179tree
a65b88bf 180canonicalize_constructor_val (tree cval, tree from_decl)
15ba153a 181{
cf649032 182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
704d7315 184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
15ba153a 186 {
704d7315 187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
15ba153a 195 }
196 if (TREE_CODE (cval) == ADDR_EXPR)
197 {
8edd00b2 198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
7843e4bc 200 {
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
204 }
8edd00b2 205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
e25d4891 207 if (!base)
208 return NULL_TREE;
551732eb 209
e25d4891 210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
a65b88bf 212 && !can_refer_decl_in_current_unit_p (base, from_decl))
f1c35659 213 return NULL_TREE;
e25d4891 214 if (TREE_CODE (base) == VAR_DECL)
b03e5397 215 TREE_ADDRESSABLE (base) = 1;
e25d4891 216 else if (TREE_CODE (base) == FUNCTION_DECL)
217 {
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
415d1b9a 221 cgraph_node::get_create (base);
e25d4891 222 }
bb903e9c 223 /* Fixup types in global initializers. */
0554476d 224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
cf649032 226
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
15ba153a 230 }
f5faab84 231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
cf649032 233 return orig_cval;
15ba153a 234}
2d18b16d 235
236/* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
238
239tree
240get_symbol_constant_value (tree sym)
241{
df8d3e89 242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
2d18b16d 244 {
2d18b16d 245 if (val)
246 {
8f266cd9 247 val = canonicalize_constructor_val (unshare_expr (val), sym);
f1c35659 248 if (val && is_gimple_min_invariant (val))
15ba153a 249 return val;
f1c35659 250 else
251 return NULL_TREE;
2d18b16d 252 }
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
2d18b16d 257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
385f3f36 259 return build_zero_cst (TREE_TYPE (sym));
2d18b16d 260 }
261
262 return NULL_TREE;
263}
264
265
2d18b16d 266
267/* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
271
272static tree
273maybe_fold_reference (tree expr, bool is_lhs)
274{
15ba153a 275 tree result;
2d18b16d 276
c701e5d5 277 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr) == REALPART_EXPR
279 || TREE_CODE (expr) == IMAGPART_EXPR)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr),
282 TREE_CODE (expr),
283 TREE_TYPE (expr),
284 TREE_OPERAND (expr, 0));
285 else if (TREE_CODE (expr) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr),
288 TREE_CODE (expr),
289 TREE_TYPE (expr),
290 TREE_OPERAND (expr, 0),
291 TREE_OPERAND (expr, 1),
292 TREE_OPERAND (expr, 2));
293
c701e5d5 294 if (!is_lhs
295 && (result = fold_const_aggregate_ref (expr))
296 && is_gimple_min_invariant (result))
297 return result;
2d18b16d 298
2d18b16d 299 return NULL_TREE;
300}
301
302
303/* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
306 folded. */
307
308static tree
309fold_gimple_assign (gimple_stmt_iterator *si)
310{
311 gimple stmt = gsi_stmt (*si);
312 enum tree_code subcode = gimple_assign_rhs_code (stmt);
313 location_t loc = gimple_location (stmt);
314
315 tree result = NULL_TREE;
316
317 switch (get_gimple_rhs_class (subcode))
318 {
319 case GIMPLE_SINGLE_RHS:
320 {
321 tree rhs = gimple_assign_rhs1 (stmt);
322
b43b0cfd 323 if (TREE_CLOBBER_P (rhs))
324 return NULL_TREE;
325
8a2caf10 326 if (REFERENCE_CLASS_P (rhs))
2d18b16d 327 return maybe_fold_reference (rhs, false);
328
0329fcdb 329 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
330 {
331 tree val = OBJ_TYPE_REF_EXPR (rhs);
332 if (is_gimple_min_invariant (val))
333 return val;
722b5983 334 else if (flag_devirtualize && virtual_method_call_p (rhs))
0329fcdb 335 {
336 bool final;
337 vec <cgraph_node *>targets
722b5983 338 = possible_polymorphic_call_targets (rhs, stmt, &final);
ceb49bba 339 if (final && targets.length () <= 1 && dbg_cnt (devirt))
0329fcdb 340 {
ceb49bba 341 if (dump_enabled_p ())
342 {
4c8041d7 343 location_t loc = gimple_location_safe (stmt);
ceb49bba 344 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
345 "resolving virtual function address "
346 "reference to function %s\n",
347 targets.length () == 1
348 ? targets[0]->name ()
6f364420 349 : "NULL");
ceb49bba 350 }
6f364420 351 if (targets.length () == 1)
352 {
353 val = fold_convert (TREE_TYPE (val),
354 build_fold_addr_expr_loc
355 (loc, targets[0]->decl));
356 STRIP_USELESS_TYPE_CONVERSION (val);
357 }
358 else
359 /* We can not use __builtin_unreachable here because it
360 can not have address taken. */
361 val = build_int_cst (TREE_TYPE (val), 0);
0329fcdb 362 return val;
363 }
364 }
365
366 }
2d18b16d 367 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 {
182cf5a9 369 tree ref = TREE_OPERAND (rhs, 0);
370 tree tem = maybe_fold_reference (ref, true);
371 if (tem
372 && TREE_CODE (tem) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem, 1)))
374 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
375 else if (tem)
2d18b16d 376 result = fold_convert (TREE_TYPE (rhs),
377 build_fold_addr_expr_loc (loc, tem));
182cf5a9 378 else if (TREE_CODE (ref) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
2d18b16d 381 }
382
383 else if (TREE_CODE (rhs) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
387 {
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
389 unsigned i;
390 tree val;
391
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
393 if (TREE_CODE (val) != INTEGER_CST
394 && TREE_CODE (val) != REAL_CST
395 && TREE_CODE (val) != FIXED_CST)
396 return NULL_TREE;
397
398 return build_vector_from_ctor (TREE_TYPE (rhs),
399 CONSTRUCTOR_ELTS (rhs));
400 }
401
402 else if (DECL_P (rhs))
8f266cd9 403 return get_symbol_constant_value (rhs);
2d18b16d 404
405 /* If we couldn't fold the RHS, hand over to the generic
406 fold routines. */
407 if (result == NULL_TREE)
408 result = fold (rhs);
409
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result);
414
415 if (result != rhs && valid_gimple_rhs_p (result))
416 return result;
417
418 return NULL_TREE;
419 }
420 break;
421
422 case GIMPLE_UNARY_RHS:
2d18b16d 423 break;
424
425 case GIMPLE_BINARY_RHS:
75200312 426 /* Try to canonicalize for boolean-typed X the comparisons
427 X == 0, X == 1, X != 0, and X != 1. */
704d7315 428 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
429 || gimple_assign_rhs_code (stmt) == NE_EXPR)
75200312 430 {
431 tree lhs = gimple_assign_lhs (stmt);
432 tree op1 = gimple_assign_rhs1 (stmt);
433 tree op2 = gimple_assign_rhs2 (stmt);
434 tree type = TREE_TYPE (op1);
435
436 /* Check whether the comparison operands are of the same boolean
437 type as the result type is.
438 Check that second operand is an integer-constant with value
439 one or zero. */
440 if (TREE_CODE (op2) == INTEGER_CST
441 && (integer_zerop (op2) || integer_onep (op2))
442 && useless_type_conversion_p (TREE_TYPE (lhs), type))
443 {
444 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
445 bool is_logical_not = false;
446
447 /* X == 0 and X != 1 is a logical-not.of X
448 X == 1 and X != 0 is X */
449 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
450 || (cmp_code == NE_EXPR && integer_onep (op2)))
451 is_logical_not = true;
452
453 if (is_logical_not == false)
454 result = op1;
455 /* Only for one-bit precision typed X the transformation
456 !X -> ~X is valied. */
457 else if (TYPE_PRECISION (type) == 1)
458 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
459 type, op1);
460 /* Otherwise we use !X -> X ^ 1. */
461 else
462 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
463 type, op1, build_int_cst (type, 1));
464
465 }
466 }
2d18b16d 467
468 if (!result)
469 result = fold_binary_loc (loc, subcode,
fbd25d42 470 TREE_TYPE (gimple_assign_lhs (stmt)),
471 gimple_assign_rhs1 (stmt),
472 gimple_assign_rhs2 (stmt));
2d18b16d 473
474 if (result)
475 {
476 STRIP_USELESS_TYPE_CONVERSION (result);
477 if (valid_gimple_rhs_p (result))
478 return result;
2d18b16d 479 }
480 break;
481
00f4f705 482 case GIMPLE_TERNARY_RHS:
8a2caf10 483 /* Try to fold a conditional expression. */
484 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
485 {
486 tree op0 = gimple_assign_rhs1 (stmt);
487 tree tem;
488 bool set = false;
489 location_t cond_loc = gimple_location (stmt);
490
491 if (COMPARISON_CLASS_P (op0))
492 {
493 fold_defer_overflow_warnings ();
494 tem = fold_binary_loc (cond_loc,
495 TREE_CODE (op0), TREE_TYPE (op0),
496 TREE_OPERAND (op0, 0),
497 TREE_OPERAND (op0, 1));
498 /* This is actually a conditional expression, not a GIMPLE
499 conditional statement, however, the valid_gimple_rhs_p
500 test still applies. */
501 set = (tem && is_gimple_condexpr (tem)
502 && valid_gimple_rhs_p (tem));
503 fold_undefer_overflow_warnings (set, stmt, 0);
504 }
505 else if (is_gimple_min_invariant (op0))
506 {
507 tem = op0;
508 set = true;
509 }
510 else
511 return NULL_TREE;
512
513 if (set)
514 result = fold_build3_loc (cond_loc, COND_EXPR,
515 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
516 gimple_assign_rhs2 (stmt),
517 gimple_assign_rhs3 (stmt));
518 }
519
520 if (!result)
521 result = fold_ternary_loc (loc, subcode,
522 TREE_TYPE (gimple_assign_lhs (stmt)),
523 gimple_assign_rhs1 (stmt),
524 gimple_assign_rhs2 (stmt),
525 gimple_assign_rhs3 (stmt));
00f4f705 526
527 if (result)
528 {
529 STRIP_USELESS_TYPE_CONVERSION (result);
530 if (valid_gimple_rhs_p (result))
531 return result;
00f4f705 532 }
533 break;
534
2d18b16d 535 case GIMPLE_INVALID_RHS:
536 gcc_unreachable ();
537 }
538
539 return NULL_TREE;
540}
541
542/* Attempt to fold a conditional statement. Return true if any changes were
543 made. We only attempt to fold the condition expression, and do not perform
544 any transformation that would require alteration of the cfg. It is
545 assumed that the operands have been previously folded. */
546
547static bool
548fold_gimple_cond (gimple stmt)
549{
550 tree result = fold_binary_loc (gimple_location (stmt),
551 gimple_cond_code (stmt),
552 boolean_type_node,
553 gimple_cond_lhs (stmt),
554 gimple_cond_rhs (stmt));
555
556 if (result)
557 {
558 STRIP_USELESS_TYPE_CONVERSION (result);
559 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
560 {
561 gimple_cond_set_condition_from_tree (stmt, result);
562 return true;
563 }
564 }
565
566 return false;
567}
568
b9ea678c 569
570/* Replace a statement at *SI_P with a sequence of statements in STMTS,
571 adjusting the replacement stmts location and virtual operands.
572 If the statement has a lhs the last stmt in the sequence is expected
573 to assign to that lhs. */
574
575static void
576gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
577{
578 gimple stmt = gsi_stmt (*si_p);
579
580 if (gimple_has_location (stmt))
581 annotate_all_with_location (stmts, gimple_location (stmt));
582
583 /* First iterate over the replacement statements backward, assigning
584 virtual operands to their defining statements. */
585 gimple laststore = NULL;
586 for (gimple_stmt_iterator i = gsi_last (stmts);
587 !gsi_end_p (i); gsi_prev (&i))
588 {
589 gimple new_stmt = gsi_stmt (i);
590 if ((gimple_assign_single_p (new_stmt)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
592 || (is_gimple_call (new_stmt)
593 && (gimple_call_flags (new_stmt)
594 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
595 {
596 tree vdef;
597 if (!laststore)
598 vdef = gimple_vdef (stmt);
599 else
600 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
601 gimple_set_vdef (new_stmt, vdef);
602 if (vdef && TREE_CODE (vdef) == SSA_NAME)
603 SSA_NAME_DEF_STMT (vdef) = new_stmt;
604 laststore = new_stmt;
605 }
606 }
607
608 /* Second iterate over the statements forward, assigning virtual
609 operands to their uses. */
610 tree reaching_vuse = gimple_vuse (stmt);
611 for (gimple_stmt_iterator i = gsi_start (stmts);
612 !gsi_end_p (i); gsi_next (&i))
613 {
614 gimple new_stmt = gsi_stmt (i);
615 /* If the new statement possibly has a VUSE, update it with exact SSA
616 name we know will reach this one. */
617 if (gimple_has_mem_ops (new_stmt))
618 gimple_set_vuse (new_stmt, reaching_vuse);
619 gimple_set_modified (new_stmt, true);
620 if (gimple_vdef (new_stmt))
621 reaching_vuse = gimple_vdef (new_stmt);
622 }
623
624 /* If the new sequence does not do a store release the virtual
625 definition of the original statement. */
626 if (reaching_vuse
627 && reaching_vuse == gimple_vuse (stmt))
628 {
629 tree vdef = gimple_vdef (stmt);
630 if (vdef
631 && TREE_CODE (vdef) == SSA_NAME)
632 {
633 unlink_stmt_vdef (stmt);
634 release_ssa_name (vdef);
635 }
636 }
637
638 /* Finally replace the original statement with the sequence. */
639 gsi_replace_with_seq (si_p, stmts, false);
640}
641
2d18b16d 642/* Convert EXPR into a GIMPLE value suitable for substitution on the
643 RHS of an assignment. Insert the necessary statements before
644 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
645 is replaced. If the call is expected to produces a result, then it
646 is replaced by an assignment of the new RHS to the result variable.
647 If the result is to be ignored, then the call is replaced by a
3aadae2d 648 GIMPLE_NOP. A proper VDEF chain is retained by making the first
649 VUSE and the last VDEF of the whole sequence be the same as the replaced
650 statement and using new SSA names for stores in between. */
2d18b16d 651
652void
653gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
654{
655 tree lhs;
2d18b16d 656 gimple stmt, new_stmt;
657 gimple_stmt_iterator i;
e3a19533 658 gimple_seq stmts = NULL;
2d18b16d 659
660 stmt = gsi_stmt (*si_p);
661
662 gcc_assert (is_gimple_call (stmt));
663
8a4a28a8 664 push_gimplify_context (gimple_in_ssa_p (cfun));
2d18b16d 665
34e73149 666 lhs = gimple_call_lhs (stmt);
2d18b16d 667 if (lhs == NULL_TREE)
a3087021 668 {
669 gimplify_and_add (expr, &stmts);
670 /* We can end up with folding a memcpy of an empty class assignment
671 which gets optimized away by C++ gimplification. */
672 if (gimple_seq_empty_p (stmts))
673 {
ec4a428c 674 pop_gimplify_context (NULL);
a3087021 675 if (gimple_in_ssa_p (cfun))
676 {
677 unlink_stmt_vdef (stmt);
678 release_defs (stmt);
679 }
ce9bb3b3 680 gsi_replace (si_p, gimple_build_nop (), true);
a3087021 681 return;
682 }
683 }
2d18b16d 684 else
34e73149 685 {
686 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
687 new_stmt = gimple_build_assign (lhs, tmp);
688 i = gsi_last (stmts);
689 gsi_insert_after_without_update (&i, new_stmt,
690 GSI_CONTINUE_LINKING);
691 }
2d18b16d 692
693 pop_gimplify_context (NULL);
694
b9ea678c 695 gsi_replace_with_seq_vops (si_p, stmts);
696}
2d18b16d 697
b9ea678c 698
699/* Replace the call at *GSI with the gimple value VAL. */
700
701static void
702replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
703{
704 gimple stmt = gsi_stmt (*gsi);
705 tree lhs = gimple_call_lhs (stmt);
706 gimple repl;
707 if (lhs)
34e73149 708 {
b9ea678c 709 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
710 val = fold_convert (TREE_TYPE (lhs), val);
711 repl = gimple_build_assign (lhs, val);
712 }
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 {
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
721 gsi_replace (gsi, repl, true);
722}
723
724/* Replace the call at *GSI with the new call REPL and fold that
725 again. */
726
727static void
728replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
729{
730 gimple stmt = gsi_stmt (*gsi);
731 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
732 gimple_set_location (repl, gimple_location (stmt));
733 if (gimple_vdef (stmt)
734 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
735 {
736 gimple_set_vdef (repl, gimple_vdef (stmt));
737 gimple_set_vuse (repl, gimple_vuse (stmt));
738 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
739 }
740 gsi_replace (gsi, repl, true);
741 fold_stmt (gsi);
742}
743
744/* Return true if VAR is a VAR_DECL or a component thereof. */
745
746static bool
747var_decl_component_p (tree var)
748{
749 tree inner = var;
750 while (handled_component_p (inner))
751 inner = TREE_OPERAND (inner, 0);
752 return SSA_VAR_P (inner);
753}
754
755/* Fold function call to builtin mem{{,p}cpy,move}. Return
756 NULL_TREE if no simplification can be made.
757 If ENDP is 0, return DEST (like memcpy).
758 If ENDP is 1, return DEST+LEN (like mempcpy).
759 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
760 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
761 (memmove). */
762
763static bool
764gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
765 tree dest, tree src, int endp)
766{
767 gimple stmt = gsi_stmt (*gsi);
768 tree lhs = gimple_call_lhs (stmt);
769 tree len = gimple_call_arg (stmt, 2);
770 tree destvar, srcvar;
771 location_t loc = gimple_location (stmt);
772
773 /* If the LEN parameter is zero, return DEST. */
774 if (integer_zerop (len))
775 {
776 gimple repl;
777 if (gimple_call_lhs (stmt))
778 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
779 else
780 repl = gimple_build_nop ();
781 tree vdef = gimple_vdef (stmt);
782 if (vdef && TREE_CODE (vdef) == SSA_NAME)
34e73149 783 {
b9ea678c 784 unlink_stmt_vdef (stmt);
785 release_ssa_name (vdef);
786 }
787 gsi_replace (gsi, repl, true);
788 return true;
789 }
790
791 /* If SRC and DEST are the same (and not volatile), return
792 DEST{,+LEN,+LEN-1}. */
793 if (operand_equal_p (src, dest, 0))
794 {
795 unlink_stmt_vdef (stmt);
796 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
797 release_ssa_name (gimple_vdef (stmt));
798 if (!lhs)
799 {
800 gsi_replace (gsi, gimple_build_nop (), true);
801 return true;
802 }
803 goto done;
804 }
805 else
806 {
807 tree srctype, desttype;
808 unsigned int src_align, dest_align;
809 tree off0;
810
811 /* Build accesses at offset zero with a ref-all character type. */
812 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
813 ptr_mode, true), 0);
814
815 /* If we can perform the copy efficiently with first doing all loads
816 and then all stores inline it that way. Currently efficiently
817 means that we can load all the memory into a single integer
818 register which is what MOVE_MAX gives us. */
819 src_align = get_pointer_alignment (src);
820 dest_align = get_pointer_alignment (dest);
821 if (tree_fits_uhwi_p (len)
822 && compare_tree_int (len, MOVE_MAX) <= 0
823 /* ??? Don't transform copies from strings with known length this
824 confuses the tree-ssa-strlen.c. This doesn't handle
825 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
826 reason. */
827 && !c_strlen (src, 2))
828 {
829 unsigned ilen = tree_to_uhwi (len);
830 if (exact_log2 (ilen) != -1)
831 {
832 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
833 if (type
834 && TYPE_MODE (type) != BLKmode
835 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
836 == ilen * 8)
837 /* If the destination pointer is not aligned we must be able
838 to emit an unaligned store. */
839 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
840 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
841 {
842 tree srctype = type;
843 tree desttype = type;
844 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
845 srctype = build_aligned_type (type, src_align);
846 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
847 tree tem = fold_const_aggregate_ref (srcmem);
848 if (tem)
849 srcmem = tem;
850 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
851 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
852 src_align))
853 srcmem = NULL_TREE;
854 if (srcmem)
855 {
856 gimple new_stmt;
857 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
858 {
859 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
860 if (gimple_in_ssa_p (cfun))
861 srcmem = make_ssa_name (TREE_TYPE (srcmem),
862 new_stmt);
863 else
864 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
865 NULL);
866 gimple_assign_set_lhs (new_stmt, srcmem);
867 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
868 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
869 }
870 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
871 desttype = build_aligned_type (type, dest_align);
872 new_stmt
873 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
874 dest, off0),
875 srcmem);
876 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
877 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
878 if (gimple_vdef (new_stmt)
879 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
881 if (!lhs)
882 {
883 gsi_replace (gsi, new_stmt, true);
884 return true;
885 }
886 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
887 goto done;
888 }
889 }
890 }
891 }
892
893 if (endp == 3)
894 {
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
897 really mandatory?
898
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align || !src_align)
901 return false;
902 if (readonly_data_expr (src)
903 || (tree_fits_uhwi_p (len)
904 && (MIN (src_align, dest_align) / BITS_PER_UNIT
905 >= tree_to_uhwi (len))))
906 {
907 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src) == ADDR_EXPR
919 && TREE_CODE (dest) == ADDR_EXPR)
920 {
921 tree src_base, dest_base, fn;
922 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
923 HOST_WIDE_INT size = -1;
924 HOST_WIDE_INT maxsize = -1;
925
926 srcvar = TREE_OPERAND (src, 0);
927 src_base = get_ref_base_and_extent (srcvar, &src_offset,
928 &size, &maxsize);
929 destvar = TREE_OPERAND (dest, 0);
930 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
931 &size, &maxsize);
932 if (tree_fits_uhwi_p (len))
933 maxsize = tree_to_uhwi (len);
934 else
935 maxsize = -1;
936 src_offset /= BITS_PER_UNIT;
937 dest_offset /= BITS_PER_UNIT;
938 if (SSA_VAR_P (src_base)
939 && SSA_VAR_P (dest_base))
940 {
941 if (operand_equal_p (src_base, dest_base, 0)
942 && ranges_overlap_p (src_offset, maxsize,
943 dest_offset, maxsize))
944 return false;
945 }
946 else if (TREE_CODE (src_base) == MEM_REF
947 && TREE_CODE (dest_base) == MEM_REF)
948 {
949 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
950 TREE_OPERAND (dest_base, 0), 0))
951 return false;
952 offset_int off = mem_ref_offset (src_base) + src_offset;
953 if (!wi::fits_shwi_p (off))
954 return false;
955 src_offset = off.to_shwi ();
956
957 off = mem_ref_offset (dest_base) + dest_offset;
958 if (!wi::fits_shwi_p (off))
959 return false;
960 dest_offset = off.to_shwi ();
961 if (ranges_overlap_p (src_offset, maxsize,
962 dest_offset, maxsize))
963 return false;
964 }
965 else
966 return false;
967
968 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
969 if (!fn)
970 return false;
971 gimple_call_set_fndecl (stmt, fn);
972 gimple_call_set_arg (stmt, 0, dest);
973 gimple_call_set_arg (stmt, 1, src);
974 fold_stmt (gsi);
975 return true;
976 }
977
978 /* If the destination and source do not alias optimize into
979 memcpy as well. */
980 if ((is_gimple_min_invariant (dest)
981 || TREE_CODE (dest) == SSA_NAME)
982 && (is_gimple_min_invariant (src)
983 || TREE_CODE (src) == SSA_NAME))
984 {
985 ao_ref destr, srcr;
986 ao_ref_init_from_ptr_and_size (&destr, dest, len);
987 ao_ref_init_from_ptr_and_size (&srcr, src, len);
988 if (!refs_may_alias_p_1 (&destr, &srcr, false))
989 {
990 tree fn;
991 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
992 if (!fn)
993 return false;
994 gimple_call_set_fndecl (stmt, fn);
995 gimple_call_set_arg (stmt, 0, dest);
996 gimple_call_set_arg (stmt, 1, src);
997 fold_stmt (gsi);
998 return true;
999 }
1000 }
1001
1002 return false;
1003 }
1004
1005 if (!tree_fits_shwi_p (len))
1006 return false;
1007 /* FIXME:
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1011 STRIP_NOPS (src);
1012 STRIP_NOPS (dest);
1013 if (!POINTER_TYPE_P (TREE_TYPE (src))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1015 return false;
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1024 {
1025 tree tem = TREE_OPERAND (src, 0);
1026 STRIP_NOPS (tem);
1027 if (tem != TREE_OPERAND (src, 0))
1028 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1029 }
1030 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1031 {
1032 tree tem = TREE_OPERAND (dest, 0);
1033 STRIP_NOPS (tem);
1034 if (tem != TREE_OPERAND (dest, 0))
1035 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1036 }
1037 srctype = TREE_TYPE (TREE_TYPE (src));
1038 if (TREE_CODE (srctype) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1040 {
1041 srctype = TREE_TYPE (srctype);
1042 STRIP_NOPS (src);
1043 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1044 }
1045 desttype = TREE_TYPE (TREE_TYPE (dest));
1046 if (TREE_CODE (desttype) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1048 {
1049 desttype = TREE_TYPE (desttype);
1050 STRIP_NOPS (dest);
1051 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1052 }
1053 if (TREE_ADDRESSABLE (srctype)
1054 || TREE_ADDRESSABLE (desttype))
1055 return false;
1056
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1060 || TREE_CODE (desttype) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1062 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1064 || TREE_CODE (srctype) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1066 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1067 if (!srctype)
1068 srctype = desttype;
1069 if (!desttype)
1070 desttype = srctype;
1071 if (!srctype)
1072 return false;
1073
1074 src_align = get_pointer_alignment (src);
1075 dest_align = get_pointer_alignment (dest);
1076 if (dest_align < TYPE_ALIGN (desttype)
1077 || src_align < TYPE_ALIGN (srctype))
1078 return false;
1079
1080 destvar = dest;
1081 STRIP_NOPS (destvar);
1082 if (TREE_CODE (destvar) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1085 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1086 else
1087 destvar = NULL_TREE;
1088
1089 srcvar = src;
1090 STRIP_NOPS (srcvar);
1091 if (TREE_CODE (srcvar) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1094 {
1095 if (!destvar
1096 || src_align >= TYPE_ALIGN (desttype))
1097 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1098 srcvar, off0);
1099 else if (!STRICT_ALIGNMENT)
1100 {
1101 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1102 src_align);
1103 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1104 }
34e73149 1105 else
b9ea678c 1106 srcvar = NULL_TREE;
1107 }
1108 else
1109 srcvar = NULL_TREE;
1110
1111 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1112 return false;
1113
1114 if (srcvar == NULL_TREE)
1115 {
1116 STRIP_NOPS (src);
1117 if (src_align >= TYPE_ALIGN (desttype))
1118 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1119 else
1120 {
1121 if (STRICT_ALIGNMENT)
1122 return false;
1123 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1124 src_align);
1125 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1126 }
1127 }
1128 else if (destvar == NULL_TREE)
1129 {
1130 STRIP_NOPS (dest);
1131 if (dest_align >= TYPE_ALIGN (srctype))
1132 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1133 else
1134 {
1135 if (STRICT_ALIGNMENT)
1136 return false;
1137 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1138 dest_align);
1139 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1140 }
1141 }
1142
1143 gimple new_stmt;
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1145 {
1146 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1147 if (gimple_in_ssa_p (cfun))
1148 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1149 else
1150 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1151 gimple_assign_set_lhs (new_stmt, srcvar);
1152 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1153 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1154 }
1155 new_stmt = gimple_build_assign (destvar, srcvar);
1156 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1157 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1158 if (gimple_vdef (new_stmt)
1159 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1161 if (!lhs)
1162 {
1163 gsi_replace (gsi, new_stmt, true);
1164 return true;
1165 }
1166 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1167 }
1168
1169done:
1170 if (endp == 0 || endp == 3)
1171 len = NULL_TREE;
1172 else if (endp == 2)
1173 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1174 ssize_int (1));
1175 if (endp == 2 || endp == 1)
1176 dest = fold_build_pointer_plus_loc (loc, dest, len);
1177
1178 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1179 GSI_SAME_STMT);
1180 gimple repl = gimple_build_assign (lhs, dest);
1181 gsi_replace (gsi, repl, true);
1182 return true;
1183}
1184
1185/* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1187
1188static bool
1189gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1190{
1191 gimple stmt = gsi_stmt (*gsi);
1192 tree etype;
1193 unsigned HOST_WIDE_INT length, cval;
1194
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len))
1197 {
1198 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1199 return true;
1200 }
1201
1202 if (! tree_fits_uhwi_p (len))
1203 return false;
1204
1205 if (TREE_CODE (c) != INTEGER_CST)
1206 return false;
1207
1208 tree dest = gimple_call_arg (stmt, 0);
1209 tree var = dest;
1210 if (TREE_CODE (var) != ADDR_EXPR)
1211 return false;
1212
1213 var = TREE_OPERAND (var, 0);
1214 if (TREE_THIS_VOLATILE (var))
1215 return false;
1216
1217 etype = TREE_TYPE (var);
1218 if (TREE_CODE (etype) == ARRAY_TYPE)
1219 etype = TREE_TYPE (etype);
1220
1221 if (!INTEGRAL_TYPE_P (etype)
1222 && !POINTER_TYPE_P (etype))
1223 return NULL_TREE;
1224
1225 if (! var_decl_component_p (var))
1226 return NULL_TREE;
1227
1228 length = tree_to_uhwi (len);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1230 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1231 return NULL_TREE;
1232
1233 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1234 return NULL_TREE;
1235
1236 if (integer_zerop (c))
1237 cval = 0;
1238 else
1239 {
1240 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1241 return NULL_TREE;
1242
1243 cval = TREE_INT_CST_LOW (c);
1244 cval &= 0xff;
1245 cval |= cval << 8;
1246 cval |= cval << 16;
1247 cval |= (cval << 31) << 1;
1248 }
1249
1250 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1251 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1252 gimple_set_vuse (store, gimple_vuse (stmt));
1253 tree vdef = gimple_vdef (stmt);
1254 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1255 {
1256 gimple_set_vdef (store, gimple_vdef (stmt));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1258 }
1259 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1260 if (gimple_call_lhs (stmt))
1261 {
1262 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1263 gsi_replace (gsi, asgn, true);
1264 }
1265 else
1266 {
1267 gimple_stmt_iterator gsi2 = *gsi;
1268 gsi_prev (gsi);
1269 gsi_remove (&gsi2, true);
1270 }
1271
1272 return true;
1273}
1274
1275
1276/* Return the string length, maximum string length or maximum value of
1277 ARG in LENGTH.
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1284
1285static bool
2ccb7428 1286get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
b9ea678c 1287{
1288 tree var, val;
1289 gimple def_stmt;
1290
1291 if (TREE_CODE (arg) != SSA_NAME)
1292 {
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1297 {
1298 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1299 if (TREE_CODE (aop0) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1301 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1302 length, visited, type);
1303 }
1304
1305 if (type == 2)
1306 {
1307 val = arg;
1308 if (TREE_CODE (val) != INTEGER_CST
1309 || tree_int_cst_sgn (val) < 0)
1310 return false;
1311 }
1312 else
1313 val = c_strlen (arg, 1);
1314 if (!val)
1315 return false;
1316
1317 if (*length)
1318 {
1319 if (type > 0)
1320 {
1321 if (TREE_CODE (*length) != INTEGER_CST
1322 || TREE_CODE (val) != INTEGER_CST)
1323 return false;
1324
1325 if (tree_int_cst_lt (*length, val))
1326 *length = val;
1327 return true;
1328 }
1329 else if (simple_cst_equal (val, *length) != 1)
1330 return false;
1331 }
1332
1333 *length = val;
1334 return true;
1335 }
1336
1337 /* If ARG is registered for SSA update we cannot look at its defining
1338 statement. */
1339 if (name_registered_for_update_p (arg))
1340 return false;
1341
1342 /* If we were already here, break the infinite cycle. */
2ccb7428 1343 if (!*visited)
1344 *visited = BITMAP_ALLOC (NULL);
1345 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
b9ea678c 1346 return true;
1347
1348 var = arg;
1349 def_stmt = SSA_NAME_DEF_STMT (var);
1350
1351 switch (gimple_code (def_stmt))
1352 {
1353 case GIMPLE_ASSIGN:
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1356 length. */
1357 if (gimple_assign_single_p (def_stmt)
1358 || gimple_assign_unary_nop_p (def_stmt))
1359 {
1360 tree rhs = gimple_assign_rhs1 (def_stmt);
1361 return get_maxval_strlen (rhs, length, visited, type);
1362 }
1363 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1364 {
1365 tree op2 = gimple_assign_rhs2 (def_stmt);
1366 tree op3 = gimple_assign_rhs3 (def_stmt);
1367 return get_maxval_strlen (op2, length, visited, type)
1368 && get_maxval_strlen (op3, length, visited, type);
1369 }
1370 return false;
1371
1372 case GIMPLE_PHI:
1373 {
1374 /* All the arguments of the PHI node must have the same constant
1375 length. */
1376 unsigned i;
1377
1378 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1379 {
1380 tree arg = gimple_phi_arg (def_stmt, i)->def;
1381
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg == gimple_phi_result (def_stmt))
1389 continue;
1390
1391 if (!get_maxval_strlen (arg, length, visited, type))
1392 return false;
1393 }
1394 }
1395 return true;
1396
1397 default:
1398 return false;
1399 }
1400}
1401
2ccb7428 1402tree
1403get_maxval_strlen (tree arg, int type)
1404{
1405 bitmap visited = NULL;
1406 tree len = NULL_TREE;
1407 if (!get_maxval_strlen (arg, &len, &visited, type))
1408 len = NULL_TREE;
1409 if (visited)
1410 BITMAP_FREE (visited);
1411
1412 return len;
1413}
1414
b9ea678c 1415
1416/* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1419
1420static bool
1421gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2ccb7428 1422 tree dest, tree src)
b9ea678c 1423{
2ccb7428 1424 location_t loc = gimple_location (gsi_stmt (*gsi));
b9ea678c 1425 tree fn;
1426
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src, dest, 0))
1429 {
1430 replace_call_with_value (gsi, dest);
1431 return true;
1432 }
1433
1434 if (optimize_function_for_size_p (cfun))
1435 return false;
1436
1437 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1438 if (!fn)
1439 return false;
1440
c1920e9d 1441 tree len = get_maxval_strlen (src, 0);
b9ea678c 1442 if (!len)
2ccb7428 1443 return false;
b9ea678c 1444
1445 len = fold_convert_loc (loc, size_type_node, len);
1446 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1447 len = force_gimple_operand_gsi (gsi, len, true,
1448 NULL_TREE, true, GSI_SAME_STMT);
1449 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1450 replace_call_with_call_and_fold (gsi, repl);
1451 return true;
1452}
1453
1454/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1457
1458static bool
2ccb7428 1459gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1460 tree dest, tree src, tree len)
b9ea678c 1461{
2ccb7428 1462 location_t loc = gimple_location (gsi_stmt (*gsi));
b9ea678c 1463 tree fn;
1464
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len))
1467 {
1468 replace_call_with_value (gsi, dest);
1469 return true;
1470 }
1471
1472 /* We can't compare slen with len as constants below if len is not a
1473 constant. */
2ccb7428 1474 if (TREE_CODE (len) != INTEGER_CST)
b9ea678c 1475 return false;
1476
b9ea678c 1477 /* Now, we must be passed a constant src ptr parameter. */
c1920e9d 1478 tree slen = get_maxval_strlen (src, 0);
2ccb7428 1479 if (!slen || TREE_CODE (slen) != INTEGER_CST)
b9ea678c 1480 return false;
1481
1482 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1483
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen, len))
1488 return false;
1489
1490 /* OK transform into builtin memcpy. */
1491 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1492 if (!fn)
1493 return false;
1494
1495 len = fold_convert_loc (loc, size_type_node, len);
1496 len = force_gimple_operand_gsi (gsi, len, true,
1497 NULL_TREE, true, GSI_SAME_STMT);
1498 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1499 replace_call_with_call_and_fold (gsi, repl);
1500 return true;
1501}
1502
1503/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1504 to the call.
1505
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1508
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1512
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1520
1521static bool
2ccb7428 1522gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
b9ea678c 1523{
1524 gimple stmt = gsi_stmt (*gsi);
2ccb7428 1525 location_t loc = gimple_location (stmt);
b9ea678c 1526
1527 const char *p = c_getstr (src);
1528
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p && *p == '\0')
1531 {
1532 replace_call_with_value (gsi, dst);
1533 return true;
1534 }
1535
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1537 return false;
1538
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1540 tree newdst;
1541 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1542 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1543
1544 if (!strlen_fn || !memcpy_fn)
1545 return false;
1546
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
2ccb7428 1549 tree len = get_maxval_strlen (src, 0);
b9ea678c 1550 if (! len)
b9ea678c 1551 return false;
1552
1553 /* Create strlen (dst). */
1554 gimple_seq stmts = NULL, stmts2;
1555 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1556 gimple_set_location (repl, loc);
1557 if (gimple_in_ssa_p (cfun))
1558 newdst = make_ssa_name (size_type_node, NULL);
1559 else
1560 newdst = create_tmp_reg (size_type_node, NULL);
1561 gimple_call_set_lhs (repl, newdst);
1562 gimple_seq_add_stmt_without_update (&stmts, repl);
1563
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1566 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1567 gimple_seq_add_seq_without_update (&stmts, stmts2);
1568
1569 len = fold_convert_loc (loc, size_type_node, len);
1570 len = size_binop_loc (loc, PLUS_EXPR, len,
1571 build_int_cst (size_type_node, 1));
1572 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1573 gimple_seq_add_seq_without_update (&stmts, stmts2);
1574
1575 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1576 gimple_seq_add_stmt_without_update (&stmts, repl);
1577 if (gimple_call_lhs (stmt))
1578 {
1579 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1580 gimple_seq_add_stmt_without_update (&stmts, repl);
1581 gsi_replace_with_seq_vops (gsi, stmts);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2 = *gsi;
1587 gsi_prev (&gsi2);
1588 fold_stmt (&gsi2);
1589 }
1590 else
1591 {
1592 gsi_replace_with_seq_vops (gsi, stmts);
1593 fold_stmt (gsi);
1594 }
1595 return true;
1596}
1597
95f5ab8d 1598/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1600
1601static bool
1602gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1603{
1604 gimple stmt = gsi_stmt (*gsi);
1605 tree dest = gimple_call_arg (stmt, 0);
1606 tree src = gimple_call_arg (stmt, 1);
1607 tree size = gimple_call_arg (stmt, 2);
1608 tree fn;
1609 const char *p;
1610
1611
1612 p = c_getstr (src);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p && *p == '\0')
1615 {
1616 replace_call_with_value (gsi, dest);
1617 return true;
1618 }
1619
1620 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1621 return false;
1622
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1625 if (!fn)
1626 return false;
1627
1628 gimple repl = gimple_build_call (fn, 2, dest, src);
1629 replace_call_with_call_and_fold (gsi, repl);
1630 return true;
1631}
1632
396b19bc 1633/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1634 LEN, and SIZE. */
1635
1636static bool
1637gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1638{
1639 gimple stmt = gsi_stmt (*gsi);
1640 tree dest = gimple_call_arg (stmt, 0);
1641 tree src = gimple_call_arg (stmt, 1);
1642 tree len = gimple_call_arg (stmt, 2);
1643 tree size = gimple_call_arg (stmt, 3);
1644 tree fn;
1645 const char *p;
1646
1647 p = c_getstr (src);
1648 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1649 if ((p && *p == '\0')
1650 || integer_zerop (len))
1651 {
1652 replace_call_with_value (gsi, dest);
1653 return true;
1654 }
1655
1656 if (! tree_fits_uhwi_p (size))
1657 return false;
1658
1659 if (! integer_all_onesp (size))
1660 {
1661 tree src_len = c_strlen (src, 1);
1662 if (src_len
1663 && tree_fits_uhwi_p (src_len)
1664 && tree_fits_uhwi_p (len)
1665 && ! tree_int_cst_lt (len, src_len))
1666 {
1667 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1668 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1669 if (!fn)
1670 return false;
1671
1672 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1673 replace_call_with_call_and_fold (gsi, repl);
1674 return true;
1675 }
1676 return false;
1677 }
1678
1679 /* If __builtin_strncat_chk is used, assume strncat is available. */
1680 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1681 if (!fn)
1682 return false;
1683
1684 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1685 replace_call_with_call_and_fold (gsi, repl);
1686 return true;
1687}
1688
b9ea678c 1689/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1690 to the call. IGNORE is true if the value returned
1691 by the builtin will be ignored. UNLOCKED is true is true if this
1692 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1693 the known length of the string. Return NULL_TREE if no simplification
1694 was possible. */
1695
1696static bool
1697gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
b9ea678c 1698 tree arg0, tree arg1,
2ccb7428 1699 bool unlocked)
b9ea678c 1700{
2ccb7428 1701 gimple stmt = gsi_stmt (*gsi);
1702
b9ea678c 1703 /* If we're using an unlocked function, assume the other unlocked
1704 functions exist explicitly. */
1705 tree const fn_fputc = (unlocked
1706 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1707 : builtin_decl_implicit (BUILT_IN_FPUTC));
1708 tree const fn_fwrite = (unlocked
1709 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1710 : builtin_decl_implicit (BUILT_IN_FWRITE));
1711
1712 /* If the return value is used, don't do the transformation. */
2ccb7428 1713 if (gimple_call_lhs (stmt))
b9ea678c 1714 return false;
1715
b9ea678c 1716 /* Get the length of the string passed to fputs. If the length
1717 can't be determined, punt. */
2ccb7428 1718 tree len = get_maxval_strlen (arg0, 0);
b9ea678c 1719 if (!len
1720 || TREE_CODE (len) != INTEGER_CST)
1721 return false;
1722
1723 switch (compare_tree_int (len, 1))
1724 {
1725 case -1: /* length is 0, delete the call entirely . */
1726 replace_call_with_value (gsi, integer_zero_node);
1727 return true;
1728
1729 case 0: /* length is 1, call fputc. */
1730 {
1731 const char *p = c_getstr (arg0);
1732 if (p != NULL)
1733 {
1734 if (!fn_fputc)
1735 return false;
1736
1737 gimple repl = gimple_build_call (fn_fputc, 2,
1738 build_int_cst
1739 (integer_type_node, p[0]), arg1);
1740 replace_call_with_call_and_fold (gsi, repl);
1741 return true;
1742 }
1743 }
1744 /* FALLTHROUGH */
1745 case 1: /* length is greater than 1, call fwrite. */
1746 {
1747 /* If optimizing for size keep fputs. */
1748 if (optimize_function_for_size_p (cfun))
1749 return false;
1750 /* New argument list transforming fputs(string, stream) to
1751 fwrite(string, 1, len, stream). */
1752 if (!fn_fwrite)
1753 return false;
1754
1755 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1756 size_one_node, len, arg1);
1757 replace_call_with_call_and_fold (gsi, repl);
1758 return true;
1759 }
1760 default:
1761 gcc_unreachable ();
1762 }
1763 return false;
1764}
1765
1766/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1767 DEST, SRC, LEN, and SIZE are the arguments to the call.
1768 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1769 code of the builtin. If MAXLEN is not NULL, it is maximum length
1770 passed as third argument. */
1771
1772static bool
1773gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
b9ea678c 1774 tree dest, tree src, tree len, tree size,
b9ea678c 1775 enum built_in_function fcode)
1776{
2ccb7428 1777 gimple stmt = gsi_stmt (*gsi);
1778 location_t loc = gimple_location (stmt);
1779 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
b9ea678c 1780 tree fn;
1781
1782 /* If SRC and DEST are the same (and not volatile), return DEST
1783 (resp. DEST+LEN for __mempcpy_chk). */
1784 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1785 {
1786 if (fcode != BUILT_IN_MEMPCPY_CHK)
1787 {
1788 replace_call_with_value (gsi, dest);
1789 return true;
1790 }
1791 else
1792 {
1793 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1794 temp = force_gimple_operand_gsi (gsi, temp,
1795 false, NULL_TREE, true,
1796 GSI_SAME_STMT);
1797 replace_call_with_value (gsi, temp);
1798 return true;
1799 }
1800 }
1801
1802 if (! tree_fits_uhwi_p (size))
1803 return false;
1804
2ccb7428 1805 tree maxlen = get_maxval_strlen (len, 2);
b9ea678c 1806 if (! integer_all_onesp (size))
1807 {
1808 if (! tree_fits_uhwi_p (len))
1809 {
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1814 {
1815 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1816 {
1817 /* (void) __mempcpy_chk () can be optimized into
1818 (void) __memcpy_chk (). */
1819 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1820 if (!fn)
1821 return false;
1822
1823 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1824 replace_call_with_call_and_fold (gsi, repl);
1825 return true;
1826 }
1827 return false;
1828 }
1829 }
1830 else
1831 maxlen = len;
1832
1833 if (tree_int_cst_lt (size, maxlen))
1834 return false;
1835 }
1836
1837 fn = NULL_TREE;
1838 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1839 mem{cpy,pcpy,move,set} is available. */
1840 switch (fcode)
1841 {
1842 case BUILT_IN_MEMCPY_CHK:
1843 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1844 break;
1845 case BUILT_IN_MEMPCPY_CHK:
1846 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1847 break;
1848 case BUILT_IN_MEMMOVE_CHK:
1849 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1850 break;
1851 case BUILT_IN_MEMSET_CHK:
1852 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1853 break;
1854 default:
1855 break;
1856 }
1857
1858 if (!fn)
1859 return false;
1860
1861 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864}
1865
1866/* Fold a call to the __st[rp]cpy_chk builtin.
1867 DEST, SRC, and SIZE are the arguments to the call.
1868 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1870 strings passed as second argument. */
1871
1872static bool
1873gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2ccb7428 1874 tree dest,
b9ea678c 1875 tree src, tree size,
b9ea678c 1876 enum built_in_function fcode)
1877{
2ccb7428 1878 gimple stmt = gsi_stmt (*gsi);
1879 location_t loc = gimple_location (stmt);
1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
b9ea678c 1881 tree len, fn;
1882
1883 /* If SRC and DEST are the same (and not volatile), return DEST. */
1884 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1885 {
1886 replace_call_with_value (gsi, dest);
1887 return true;
1888 }
1889
1890 if (! tree_fits_uhwi_p (size))
1891 return false;
1892
2ccb7428 1893 tree maxlen = get_maxval_strlen (src, 1);
b9ea678c 1894 if (! integer_all_onesp (size))
1895 {
1896 len = c_strlen (src, 1);
1897 if (! len || ! tree_fits_uhwi_p (len))
1898 {
1899 /* If LEN is not constant, try MAXLEN too.
1900 For MAXLEN only allow optimizing into non-_ocs function
1901 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1902 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1903 {
1904 if (fcode == BUILT_IN_STPCPY_CHK)
1905 {
1906 if (! ignore)
1907 return false;
1908
1909 /* If return value of __stpcpy_chk is ignored,
1910 optimize into __strcpy_chk. */
1911 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1912 if (!fn)
1913 return false;
1914
1915 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1916 replace_call_with_call_and_fold (gsi, repl);
1917 return true;
1918 }
1919
1920 if (! len || TREE_SIDE_EFFECTS (len))
1921 return false;
1922
1923 /* If c_strlen returned something, but not a constant,
1924 transform __strcpy_chk into __memcpy_chk. */
1925 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1926 if (!fn)
1927 return false;
1928
1929 len = fold_convert_loc (loc, size_type_node, len);
1930 len = size_binop_loc (loc, PLUS_EXPR, len,
1931 build_int_cst (size_type_node, 1));
1932 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1933 true, GSI_SAME_STMT);
1934 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1935 replace_call_with_call_and_fold (gsi, repl);
1936 return true;
1937 }
34e73149 1938 }
b9ea678c 1939 else
1940 maxlen = len;
1941
1942 if (! tree_int_cst_lt (maxlen, size))
1943 return false;
34e73149 1944 }
1945
b9ea678c 1946 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1947 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1948 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1949 if (!fn)
1950 return false;
1951
1952 gimple repl = gimple_build_call (fn, 2, dest, src);
1953 replace_call_with_call_and_fold (gsi, repl);
1954 return true;
1955}
1956
1957/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1958 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1959 length passed as third argument. IGNORE is true if return value can be
1960 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1961
1962static bool
1963gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1964 tree dest, tree src,
2ccb7428 1965 tree len, tree size,
b9ea678c 1966 enum built_in_function fcode)
1967{
2ccb7428 1968 gimple stmt = gsi_stmt (*gsi);
1969 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
b9ea678c 1970 tree fn;
1971
1972 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2d18b16d 1973 {
b9ea678c 1974 /* If return value of __stpncpy_chk is ignored,
1975 optimize into __strncpy_chk. */
1976 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1977 if (fn)
1978 {
1979 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1980 replace_call_with_call_and_fold (gsi, repl);
1981 return true;
1982 }
2d18b16d 1983 }
1984
b9ea678c 1985 if (! tree_fits_uhwi_p (size))
1986 return false;
1987
2ccb7428 1988 tree maxlen = get_maxval_strlen (len, 2);
b9ea678c 1989 if (! integer_all_onesp (size))
2d18b16d 1990 {
b9ea678c 1991 if (! tree_fits_uhwi_p (len))
3aadae2d 1992 {
b9ea678c 1993 /* If LEN is not constant, try MAXLEN too.
1994 For MAXLEN only allow optimizing into non-_ocs function
1995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1996 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1997 return false;
78a0527a 1998 }
b9ea678c 1999 else
2000 maxlen = len;
2001
2002 if (tree_int_cst_lt (size, maxlen))
2003 return false;
2d18b16d 2004 }
2005
b9ea678c 2006 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2007 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2008 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2009 if (!fn)
2010 return false;
2011
2012 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2013 replace_call_with_call_and_fold (gsi, repl);
2014 return true;
2d18b16d 2015}
2016
b9ea678c 2017/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2018 NULL_TREE if a normal call should be emitted rather than expanding
2019 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2020 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2021 passed as second argument. */
2d18b16d 2022
2023static bool
b9ea678c 2024gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2ccb7428 2025 enum built_in_function fcode)
2d18b16d 2026{
b9ea678c 2027 gimple stmt = gsi_stmt (*gsi);
2028 tree dest, size, len, fn, fmt, flag;
2029 const char *fmt_str;
2d18b16d 2030
b9ea678c 2031 /* Verify the required arguments in the original call. */
2032 if (gimple_call_num_args (stmt) < 5)
2033 return false;
2d18b16d 2034
b9ea678c 2035 dest = gimple_call_arg (stmt, 0);
2036 len = gimple_call_arg (stmt, 1);
2037 flag = gimple_call_arg (stmt, 2);
2038 size = gimple_call_arg (stmt, 3);
2039 fmt = gimple_call_arg (stmt, 4);
2040
2041 if (! tree_fits_uhwi_p (size))
2042 return false;
2043
2044 if (! integer_all_onesp (size))
2045 {
2ccb7428 2046 tree maxlen = get_maxval_strlen (len, 2);
b9ea678c 2047 if (! tree_fits_uhwi_p (len))
2d18b16d 2048 {
b9ea678c 2049 /* If LEN is not constant, try MAXLEN too.
2050 For MAXLEN only allow optimizing into non-_ocs function
2051 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2052 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2d18b16d 2053 return false;
2054 }
2055 else
b9ea678c 2056 maxlen = len;
2d18b16d 2057
b9ea678c 2058 if (tree_int_cst_lt (size, maxlen))
2059 return false;
2060 }
2d18b16d 2061
b9ea678c 2062 if (!init_target_chars ())
2063 return false;
2d18b16d 2064
b9ea678c 2065 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2066 or if format doesn't contain % chars or is "%s". */
2067 if (! integer_zerop (flag))
2068 {
2069 fmt_str = c_getstr (fmt);
2070 if (fmt_str == NULL)
2071 return false;
2072 if (strchr (fmt_str, target_percent) != NULL
2073 && strcmp (fmt_str, target_percent_s))
2074 return false;
2d18b16d 2075 }
2076
b9ea678c 2077 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2078 available. */
2079 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2080 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2081 if (!fn)
1a06b503 2082 return false;
2083
b9ea678c 2084 /* Replace the called function and the first 5 argument by 3 retaining
2085 trailing varargs. */
2086 gimple_call_set_fndecl (stmt, fn);
2087 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2088 gimple_call_set_arg (stmt, 0, dest);
2089 gimple_call_set_arg (stmt, 1, len);
2090 gimple_call_set_arg (stmt, 2, fmt);
2091 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2092 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2093 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2094 fold_stmt (gsi);
2095 return true;
2096}
2d18b16d 2097
b9ea678c 2098/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2099 Return NULL_TREE if a normal call should be emitted rather than
2100 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2101 or BUILT_IN_VSPRINTF_CHK. */
2d18b16d 2102
b9ea678c 2103static bool
2104gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2105 enum built_in_function fcode)
2106{
2107 gimple stmt = gsi_stmt (*gsi);
2108 tree dest, size, len, fn, fmt, flag;
2109 const char *fmt_str;
2110 unsigned nargs = gimple_call_num_args (stmt);
2d18b16d 2111
b9ea678c 2112 /* Verify the required arguments in the original call. */
2113 if (nargs < 4)
2114 return false;
2115 dest = gimple_call_arg (stmt, 0);
2116 flag = gimple_call_arg (stmt, 1);
2117 size = gimple_call_arg (stmt, 2);
2118 fmt = gimple_call_arg (stmt, 3);
2119
2120 if (! tree_fits_uhwi_p (size))
2121 return false;
2122
2123 len = NULL_TREE;
2124
2125 if (!init_target_chars ())
2126 return false;
2127
2128 /* Check whether the format is a literal string constant. */
2129 fmt_str = c_getstr (fmt);
2130 if (fmt_str != NULL)
2131 {
2132 /* If the format doesn't contain % args or %%, we know the size. */
2133 if (strchr (fmt_str, target_percent) == 0)
2d18b16d 2134 {
b9ea678c 2135 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2136 len = build_int_cstu (size_type_node, strlen (fmt_str));
2137 }
2138 /* If the format is "%s" and first ... argument is a string literal,
2139 we know the size too. */
2140 else if (fcode == BUILT_IN_SPRINTF_CHK
2141 && strcmp (fmt_str, target_percent_s) == 0)
2142 {
2143 tree arg;
2d18b16d 2144
b9ea678c 2145 if (nargs == 5)
2146 {
2147 arg = gimple_call_arg (stmt, 4);
2148 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2149 {
2150 len = c_strlen (arg, 1);
2151 if (! len || ! tree_fits_uhwi_p (len))
2152 len = NULL_TREE;
2153 }
2154 }
2155 }
2156 }
2d18b16d 2157
b9ea678c 2158 if (! integer_all_onesp (size))
2159 {
2160 if (! len || ! tree_int_cst_lt (len, size))
2161 return false;
2162 }
2d18b16d 2163
b9ea678c 2164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2165 or if format doesn't contain % chars or is "%s". */
2166 if (! integer_zerop (flag))
2167 {
2168 if (fmt_str == NULL)
2169 return false;
2170 if (strchr (fmt_str, target_percent) != NULL
2171 && strcmp (fmt_str, target_percent_s))
2172 return false;
2173 }
2d18b16d 2174
b9ea678c 2175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2176 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2177 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2178 if (!fn)
2179 return false;
2180
2181 /* Replace the called function and the first 4 argument by 2 retaining
2182 trailing varargs. */
2183 gimple_call_set_fndecl (stmt, fn);
2184 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2185 gimple_call_set_arg (stmt, 0, dest);
2186 gimple_call_set_arg (stmt, 1, fmt);
2187 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2188 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2189 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2190 fold_stmt (gsi);
2191 return true;
2192}
2193
1888f376 2194/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2195 ORIG may be null if this is a 2-argument call. We don't attempt to
2196 simplify calls with more than 3 arguments.
2197
2198 Return NULL_TREE if no simplification was possible, otherwise return the
2199 simplified form of the call as a tree. If IGNORED is true, it means that
2200 the caller does not use the returned value of the function. */
2201
2202static bool
2ccb7428 2203gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
1888f376 2204{
2205 gimple stmt = gsi_stmt (*gsi);
2206 tree dest = gimple_call_arg (stmt, 0);
2207 tree fmt = gimple_call_arg (stmt, 1);
2208 tree orig = NULL_TREE;
2209 const char *fmt_str = NULL;
2210
2211 /* Verify the required arguments in the original call. We deal with two
2212 types of sprintf() calls: 'sprintf (str, fmt)' and
2213 'sprintf (dest, "%s", orig)'. */
2214 if (gimple_call_num_args (stmt) > 3)
2215 return false;
2216
2217 if (gimple_call_num_args (stmt) == 3)
2218 orig = gimple_call_arg (stmt, 2);
2219
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str = c_getstr (fmt);
2222 if (fmt_str == NULL)
2223 return false;
2224
2225 if (!init_target_chars ())
2226 return false;
2227
2228 /* If the format doesn't contain % args or %%, use strcpy. */
2229 if (strchr (fmt_str, target_percent) == NULL)
2230 {
2231 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2232
2233 if (!fn)
2234 return false;
2235
2236 /* Don't optimize sprintf (buf, "abc", ptr++). */
2237 if (orig)
2238 return false;
2239
2240 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2241 'format' is known to contain no % formats. */
2242 gimple_seq stmts = NULL;
2243 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2244 gimple_seq_add_stmt_without_update (&stmts, repl);
2245 if (gimple_call_lhs (stmt))
2246 {
2247 repl = gimple_build_assign (gimple_call_lhs (stmt),
2248 build_int_cst (integer_type_node,
2249 strlen (fmt_str)));
2250 gimple_seq_add_stmt_without_update (&stmts, repl);
2251 gsi_replace_with_seq_vops (gsi, stmts);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the memcpy call.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2 = *gsi;
2257 gsi_prev (&gsi2);
2258 fold_stmt (&gsi2);
2259 }
2260 else
2261 {
2262 gsi_replace_with_seq_vops (gsi, stmts);
2263 fold_stmt (gsi);
2264 }
2265 return true;
2266 }
2267
2268 /* If the format is "%s", use strcpy if the result isn't used. */
2269 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2270 {
2271 tree fn;
2272 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2273
2274 if (!fn)
2275 return false;
2276
2277 /* Don't crash on sprintf (str1, "%s"). */
2278 if (!orig)
2279 return false;
2280
2ccb7428 2281 tree orig_len = NULL_TREE;
2282 if (gimple_call_lhs (stmt))
1888f376 2283 {
2ccb7428 2284 orig_len = get_maxval_strlen (orig, 0);
95e631b8 2285 if (!orig_len)
1888f376 2286 return false;
2287 }
2288
2289 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2290 gimple_seq stmts = NULL;
2291 gimple repl = gimple_build_call (fn, 2, dest, orig);
2292 gimple_seq_add_stmt_without_update (&stmts, repl);
2293 if (gimple_call_lhs (stmt))
2294 {
95e631b8 2295 if (!useless_type_conversion_p (integer_type_node,
2296 TREE_TYPE (orig_len)))
2297 orig_len = fold_convert (integer_type_node, orig_len);
2298 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
1888f376 2299 gimple_seq_add_stmt_without_update (&stmts, repl);
2300 gsi_replace_with_seq_vops (gsi, stmts);
2301 /* gsi now points at the assignment to the lhs, get a
2302 stmt iterator to the memcpy call.
2303 ??? We can't use gsi_for_stmt as that doesn't work when the
2304 CFG isn't built yet. */
2305 gimple_stmt_iterator gsi2 = *gsi;
2306 gsi_prev (&gsi2);
2307 fold_stmt (&gsi2);
2308 }
2309 else
2310 {
2311 gsi_replace_with_seq_vops (gsi, stmts);
2312 fold_stmt (gsi);
2313 }
2314 return true;
2315 }
2316 return false;
2317}
2318
95e631b8 2319/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2320 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2321 attempt to simplify calls with more than 4 arguments.
1888f376 2322
95e631b8 2323 Return NULL_TREE if no simplification was possible, otherwise return the
2324 simplified form of the call as a tree. If IGNORED is true, it means that
2325 the caller does not use the returned value of the function. */
2326
2327static bool
2ccb7428 2328gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
95e631b8 2329{
2330 gimple stmt = gsi_stmt (*gsi);
2331 tree dest = gimple_call_arg (stmt, 0);
2332 tree destsize = gimple_call_arg (stmt, 1);
2333 tree fmt = gimple_call_arg (stmt, 2);
2334 tree orig = NULL_TREE;
2335 const char *fmt_str = NULL;
2336
2337 if (gimple_call_num_args (stmt) > 4)
2338 return false;
2339
2340 if (gimple_call_num_args (stmt) == 4)
2341 orig = gimple_call_arg (stmt, 3);
2342
2343 if (!tree_fits_uhwi_p (destsize))
2344 return false;
2345 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2346
2347 /* Check whether the format is a literal string constant. */
2348 fmt_str = c_getstr (fmt);
2349 if (fmt_str == NULL)
2350 return false;
2351
2352 if (!init_target_chars ())
2353 return false;
2354
2355 /* If the format doesn't contain % args or %%, use strcpy. */
2356 if (strchr (fmt_str, target_percent) == NULL)
2357 {
2358 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2359 if (!fn)
2360 return false;
2361
2362 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2363 if (orig)
2364 return false;
2365
2366 /* We could expand this as
2367 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2368 or to
2369 memcpy (str, fmt_with_nul_at_cstm1, cst);
2370 but in the former case that might increase code size
2371 and in the latter case grow .rodata section too much.
2372 So punt for now. */
2373 size_t len = strlen (fmt_str);
2374 if (len >= destlen)
2375 return false;
2376
2377 gimple_seq stmts = NULL;
2378 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2380 if (gimple_call_lhs (stmt))
2381 {
2382 repl = gimple_build_assign (gimple_call_lhs (stmt),
2383 build_int_cst (integer_type_node, len));
2384 gimple_seq_add_stmt_without_update (&stmts, repl);
2385 gsi_replace_with_seq_vops (gsi, stmts);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2 = *gsi;
2391 gsi_prev (&gsi2);
2392 fold_stmt (&gsi2);
2393 }
2394 else
2395 {
2396 gsi_replace_with_seq_vops (gsi, stmts);
2397 fold_stmt (gsi);
2398 }
2399 return true;
2400 }
2401
2402 /* If the format is "%s", use strcpy if the result isn't used. */
2403 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2404 {
2405 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2406 if (!fn)
2407 return false;
2408
2409 /* Don't crash on snprintf (str1, cst, "%s"). */
2410 if (!orig)
2411 return false;
2412
2ccb7428 2413 tree orig_len = get_maxval_strlen (orig, 0);
95e631b8 2414 if (!orig_len)
2ccb7428 2415 return false;
95e631b8 2416
2417 /* We could expand this as
2418 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2419 or to
2420 memcpy (str1, str2_with_nul_at_cstm1, cst);
2421 but in the former case that might increase code size
2422 and in the latter case grow .rodata section too much.
2423 So punt for now. */
2424 if (compare_tree_int (orig_len, destlen) >= 0)
2425 return false;
2426
2427 /* Convert snprintf (str1, cst, "%s", str2) into
2428 strcpy (str1, str2) if strlen (str2) < cst. */
2429 gimple_seq stmts = NULL;
2430 gimple repl = gimple_build_call (fn, 2, dest, orig);
2431 gimple_seq_add_stmt_without_update (&stmts, repl);
2432 if (gimple_call_lhs (stmt))
2433 {
2434 if (!useless_type_conversion_p (integer_type_node,
2435 TREE_TYPE (orig_len)))
2436 orig_len = fold_convert (integer_type_node, orig_len);
2437 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2438 gimple_seq_add_stmt_without_update (&stmts, repl);
2439 gsi_replace_with_seq_vops (gsi, stmts);
2440 /* gsi now points at the assignment to the lhs, get a
2441 stmt iterator to the memcpy call.
2442 ??? We can't use gsi_for_stmt as that doesn't work when the
2443 CFG isn't built yet. */
2444 gimple_stmt_iterator gsi2 = *gsi;
2445 gsi_prev (&gsi2);
2446 fold_stmt (&gsi2);
2447 }
2448 else
2449 {
2450 gsi_replace_with_seq_vops (gsi, stmts);
2451 fold_stmt (gsi);
2452 }
2453 return true;
2454 }
2455 return false;
2456}
1888f376 2457
b9ea678c 2458
2459/* Fold a call to __builtin_strlen with known length LEN. */
2460
2461static bool
2ccb7428 2462gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
b9ea678c 2463{
2ccb7428 2464 gimple stmt = gsi_stmt (*gsi);
2465 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
b9ea678c 2466 if (!len)
2467 return false;
7f96f652 2468 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
b9ea678c 2469 replace_call_with_value (gsi, len);
2470 return true;
2d18b16d 2471}
2472
2473
2ccb7428 2474/* Fold the non-target builtin at *GSI and return whether any simplification
2475 was made. */
2d18b16d 2476
b9ea678c 2477static bool
2ccb7428 2478gimple_fold_builtin (gimple_stmt_iterator *gsi)
2d18b16d 2479{
b9ea678c 2480 gimple stmt = gsi_stmt (*gsi);
b9ea678c 2481 tree callee = gimple_call_fndecl (stmt);
2d18b16d 2482
2ccb7428 2483 /* Give up for always_inline inline builtins until they are
2484 inlined. */
2485 if (avoid_folding_inline_builtin (callee))
2486 return false;
2d18b16d 2487
2d18b16d 2488 switch (DECL_FUNCTION_CODE (callee))
2489 {
2ccb7428 2490 case BUILT_IN_BZERO:
2491 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2492 gimple_call_arg (stmt, 1));
2493 case BUILT_IN_MEMSET:
2494 return gimple_fold_builtin_memset (gsi,
2495 gimple_call_arg (stmt, 1),
2496 gimple_call_arg (stmt, 2));
2497 case BUILT_IN_BCOPY:
2498 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2499 gimple_call_arg (stmt, 0), 3);
2500 case BUILT_IN_MEMCPY:
2501 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2502 gimple_call_arg (stmt, 1), 0);
2503 case BUILT_IN_MEMPCPY:
2504 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2505 gimple_call_arg (stmt, 1), 1);
2506 case BUILT_IN_MEMMOVE:
2507 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2508 gimple_call_arg (stmt, 1), 3);
2509 case BUILT_IN_SPRINTF_CHK:
2510 case BUILT_IN_VSPRINTF_CHK:
2511 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2512 case BUILT_IN_STRCAT_CHK:
2513 return gimple_fold_builtin_strcat_chk (gsi);
396b19bc 2514 case BUILT_IN_STRNCAT_CHK:
2515 return gimple_fold_builtin_strncat_chk (gsi);
2d18b16d 2516 case BUILT_IN_STRLEN:
2ccb7428 2517 return gimple_fold_builtin_strlen (gsi);
2d18b16d 2518 case BUILT_IN_STRCPY:
2ccb7428 2519 return gimple_fold_builtin_strcpy (gsi,
b9ea678c 2520 gimple_call_arg (stmt, 0),
2ccb7428 2521 gimple_call_arg (stmt, 1));
2d18b16d 2522 case BUILT_IN_STRNCPY:
2ccb7428 2523 return gimple_fold_builtin_strncpy (gsi,
b9ea678c 2524 gimple_call_arg (stmt, 0),
2525 gimple_call_arg (stmt, 1),
2ccb7428 2526 gimple_call_arg (stmt, 2));
b65555c1 2527 case BUILT_IN_STRCAT:
2ccb7428 2528 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2529 gimple_call_arg (stmt, 1));
2d18b16d 2530 case BUILT_IN_FPUTS:
2ccb7428 2531 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2532 gimple_call_arg (stmt, 1), false);
2d18b16d 2533 case BUILT_IN_FPUTS_UNLOCKED:
2ccb7428 2534 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2535 gimple_call_arg (stmt, 1), true);
2d18b16d 2536 case BUILT_IN_MEMCPY_CHK:
2537 case BUILT_IN_MEMPCPY_CHK:
2538 case BUILT_IN_MEMMOVE_CHK:
2539 case BUILT_IN_MEMSET_CHK:
2ccb7428 2540 return gimple_fold_builtin_memory_chk (gsi,
b9ea678c 2541 gimple_call_arg (stmt, 0),
2542 gimple_call_arg (stmt, 1),
2543 gimple_call_arg (stmt, 2),
2544 gimple_call_arg (stmt, 3),
b9ea678c 2545 DECL_FUNCTION_CODE (callee));
2d18b16d 2546 case BUILT_IN_STRCPY_CHK:
2547 case BUILT_IN_STPCPY_CHK:
2ccb7428 2548 return gimple_fold_builtin_stxcpy_chk (gsi,
b9ea678c 2549 gimple_call_arg (stmt, 0),
2550 gimple_call_arg (stmt, 1),
2551 gimple_call_arg (stmt, 2),
b9ea678c 2552 DECL_FUNCTION_CODE (callee));
2d18b16d 2553 case BUILT_IN_STRNCPY_CHK:
1063acde 2554 case BUILT_IN_STPNCPY_CHK:
b9ea678c 2555 return gimple_fold_builtin_stxncpy_chk (gsi,
2556 gimple_call_arg (stmt, 0),
2557 gimple_call_arg (stmt, 1),
2558 gimple_call_arg (stmt, 2),
2559 gimple_call_arg (stmt, 3),
b9ea678c 2560 DECL_FUNCTION_CODE (callee));
2d18b16d 2561 case BUILT_IN_SNPRINTF_CHK:
2562 case BUILT_IN_VSNPRINTF_CHK:
2ccb7428 2563 return gimple_fold_builtin_snprintf_chk (gsi,
b9ea678c 2564 DECL_FUNCTION_CODE (callee));
95e631b8 2565 case BUILT_IN_SNPRINTF:
2ccb7428 2566 return gimple_fold_builtin_snprintf (gsi);
95e631b8 2567 case BUILT_IN_SPRINTF:
2ccb7428 2568 return gimple_fold_builtin_sprintf (gsi);
b9ea678c 2569 default:;
2570 }
2571
2572 /* Try the generic builtin folder. */
2573 bool ignore = (gimple_call_lhs (stmt) == NULL);
2574 tree result = fold_call_stmt (stmt, ignore);
2575 if (result)
2576 {
2577 if (ignore)
2578 STRIP_NOPS (result);
2579 else
2580 result = fold_convert (gimple_call_return_type (stmt), result);
2581 if (!update_call_from_tree (gsi, result))
2582 gimplify_and_update_call_from_tree (gsi, result);
2583 return true;
2584 }
2585
2586 return false;
2587}
2588
0c93c8a9 2589/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2590 doesn't fit into TYPE. The test for overflow should be regardless of
2591 -fwrapv, and even for unsigned types. */
2592
2593bool
2594arith_overflowed_p (enum tree_code code, const_tree type,
2595 const_tree arg0, const_tree arg1)
2596{
2597 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2598 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2599 widest2_int_cst;
2600 widest2_int warg0 = widest2_int_cst (arg0);
2601 widest2_int warg1 = widest2_int_cst (arg1);
2602 widest2_int wres;
2603 switch (code)
2604 {
2605 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2606 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2607 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2608 default: gcc_unreachable ();
2609 }
2610 signop sign = TYPE_SIGN (type);
2611 if (sign == UNSIGNED && wi::neg_p (wres))
2612 return true;
2613 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2614}
2615
2d18b16d 2616/* Attempt to fold a call statement referenced by the statement iterator GSI.
2617 The statement may be replaced by another statement, e.g., if the call
2618 simplifies to a constant value. Return true if any changes were made.
2619 It is assumed that the operands have been previously folded. */
2620
3aa6ac67 2621static bool
3fd0ca33 2622gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
2d18b16d 2623{
2624 gimple stmt = gsi_stmt (*gsi);
6fec5449 2625 tree callee;
3aa6ac67 2626 bool changed = false;
2627 unsigned i;
2d18b16d 2628
3aa6ac67 2629 /* Fold *& in call arguments. */
2630 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2631 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2632 {
2633 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2634 if (tmp)
2635 {
2636 gimple_call_set_arg (stmt, i, tmp);
2637 changed = true;
2638 }
2639 }
6fec5449 2640
2641 /* Check for virtual calls that became direct calls. */
2642 callee = gimple_call_fn (stmt);
fb049fba 2643 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
6fec5449 2644 {
3658fd1d 2645 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2646 {
10fba9c0 2647 if (dump_file && virtual_method_call_p (callee)
2648 && !possible_polymorphic_call_target_p
379f6698 2649 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2650 (OBJ_TYPE_REF_EXPR (callee)))))
10fba9c0 2651 {
2652 fprintf (dump_file,
f70f513f 2653 "Type inheritance inconsistent devirtualization of ");
10fba9c0 2654 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2655 fprintf (dump_file, " to ");
2656 print_generic_expr (dump_file, callee, TDF_SLIM);
2657 fprintf (dump_file, "\n");
2658 }
2659
3658fd1d 2660 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3aa6ac67 2661 changed = true;
2662 }
f70f513f 2663 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3aa6ac67 2664 {
76058579 2665 bool final;
2666 vec <cgraph_node *>targets
1b613a0a 2667 = possible_polymorphic_call_targets (callee, stmt, &final);
ceb49bba 2668 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3aa6ac67 2669 {
f70f513f 2670 tree lhs = gimple_call_lhs (stmt);
ceb49bba 2671 if (dump_enabled_p ())
2672 {
4c8041d7 2673 location_t loc = gimple_location_safe (stmt);
ceb49bba 2674 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2675 "folding virtual function call to %s\n",
2676 targets.length () == 1
2677 ? targets[0]->name ()
2678 : "__builtin_unreachable");
2679 }
76058579 2680 if (targets.length () == 1)
3aa6ac67 2681 {
68b0b56c 2682 gimple_call_set_fndecl (stmt, targets[0]->decl);
3aa6ac67 2683 changed = true;
f70f513f 2684 /* If the call becomes noreturn, remove the lhs. */
2685 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2686 {
2687 if (TREE_CODE (lhs) == SSA_NAME)
2688 {
2689 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2690 tree def = get_or_create_ssa_default_def (cfun, var);
2691 gimple new_stmt = gimple_build_assign (lhs, def);
2692 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2693 }
2694 gimple_call_set_lhs (stmt, NULL_TREE);
2695 }
3aa6ac67 2696 }
f70f513f 2697 else
68b0b56c 2698 {
2699 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2700 gimple new_stmt = gimple_build_call (fndecl, 0);
2701 gimple_set_location (new_stmt, gimple_location (stmt));
f70f513f 2702 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2703 {
2704 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2705 tree def = get_or_create_ssa_default_def (cfun, var);
ac3cd7ce 2706
2707 /* To satisfy condition for
2708 cgraph_update_edges_for_call_stmt_node,
2709 we need to preserve GIMPLE_CALL statement
2710 at position of GSI iterator. */
f70f513f 2711 update_call_from_tree (gsi, def);
ac3cd7ce 2712 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
f70f513f 2713 }
2714 else
7f1b049e 2715 {
2716 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2717 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2718 gsi_replace (gsi, new_stmt, false);
2719 }
68b0b56c 2720 return true;
2721 }
3aa6ac67 2722 }
3658fd1d 2723 }
3aa6ac67 2724 }
3658fd1d 2725
156cc902 2726 /* Check for indirect calls that became direct calls, and then
2727 no longer require a static chain. */
2728 if (gimple_call_chain (stmt))
2729 {
2730 tree fn = gimple_call_fndecl (stmt);
2731 if (fn && !DECL_STATIC_CHAIN (fn))
2732 {
2733 gimple_call_set_chain (stmt, NULL);
2734 changed = true;
2735 }
2736 else
2737 {
2738 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
2739 if (tmp)
2740 {
2741 gimple_call_set_chain (stmt, tmp);
2742 changed = true;
2743 }
2744 }
2745 }
2746
3aa6ac67 2747 if (inplace)
2748 return changed;
2749
2750 /* Check for builtins that CCP can handle using information not
2751 available in the generic fold routines. */
b9ea678c 2752 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2753 {
2754 if (gimple_fold_builtin (gsi))
2755 changed = true;
2756 }
2757 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3aa6ac67 2758 {
29cad3b6 2759 changed |= targetm.gimple_fold_builtin (gsi);
6fec5449 2760 }
ce8e6661 2761 else if (gimple_call_internal_p (stmt))
c83059be 2762 {
ce8e6661 2763 enum tree_code subcode = ERROR_MARK;
2764 tree result = NULL_TREE;
0c93c8a9 2765 bool cplx_result = false;
2766 tree overflow = NULL_TREE;
ce8e6661 2767 switch (gimple_call_internal_fn (stmt))
2768 {
2769 case IFN_BUILTIN_EXPECT:
2770 result = fold_builtin_expect (gimple_location (stmt),
2771 gimple_call_arg (stmt, 0),
2772 gimple_call_arg (stmt, 1),
2773 gimple_call_arg (stmt, 2));
2774 break;
0b45f2d1 2775 case IFN_UBSAN_OBJECT_SIZE:
2776 if (integer_all_onesp (gimple_call_arg (stmt, 2))
2777 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
2778 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
2779 && tree_int_cst_le (gimple_call_arg (stmt, 1),
2780 gimple_call_arg (stmt, 2))))
2781 {
2782 gsi_replace (gsi, gimple_build_nop (), true);
2783 unlink_stmt_vdef (stmt);
2784 release_defs (stmt);
2785 return true;
2786 }
2787 break;
ce8e6661 2788 case IFN_UBSAN_CHECK_ADD:
2789 subcode = PLUS_EXPR;
2790 break;
2791 case IFN_UBSAN_CHECK_SUB:
2792 subcode = MINUS_EXPR;
2793 break;
2794 case IFN_UBSAN_CHECK_MUL:
2795 subcode = MULT_EXPR;
2796 break;
0c93c8a9 2797 case IFN_ADD_OVERFLOW:
2798 subcode = PLUS_EXPR;
2799 cplx_result = true;
2800 break;
2801 case IFN_SUB_OVERFLOW:
2802 subcode = MINUS_EXPR;
2803 cplx_result = true;
2804 break;
2805 case IFN_MUL_OVERFLOW:
2806 subcode = MULT_EXPR;
2807 cplx_result = true;
2808 break;
ce8e6661 2809 default:
2810 break;
2811 }
2812 if (subcode != ERROR_MARK)
2813 {
2814 tree arg0 = gimple_call_arg (stmt, 0);
2815 tree arg1 = gimple_call_arg (stmt, 1);
0c93c8a9 2816 tree type = TREE_TYPE (arg0);
2817 if (cplx_result)
2818 {
2819 tree lhs = gimple_call_lhs (stmt);
2820 if (lhs == NULL_TREE)
2821 type = NULL_TREE;
2822 else
2823 type = TREE_TYPE (TREE_TYPE (lhs));
2824 }
2825 if (type == NULL_TREE)
2826 ;
ce8e6661 2827 /* x = y + 0; x = y - 0; x = y * 0; */
0c93c8a9 2828 else if (integer_zerop (arg1))
2829 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
ce8e6661 2830 /* x = 0 + y; x = 0 * y; */
2831 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
0c93c8a9 2832 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
ce8e6661 2833 /* x = y - y; */
2834 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
0c93c8a9 2835 result = integer_zero_node;
ce8e6661 2836 /* x = y * 1; x = 1 * y; */
0c93c8a9 2837 else if (subcode == MULT_EXPR && integer_onep (arg1))
2838 result = arg0;
2839 else if (subcode == MULT_EXPR && integer_onep (arg0))
2840 result = arg1;
2841 else if (TREE_CODE (arg0) == INTEGER_CST
2842 && TREE_CODE (arg1) == INTEGER_CST)
ce8e6661 2843 {
0c93c8a9 2844 if (cplx_result)
2845 result = int_const_binop (subcode, fold_convert (type, arg0),
2846 fold_convert (type, arg1));
2847 else
2848 result = int_const_binop (subcode, arg0, arg1);
2849 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
2850 {
2851 if (cplx_result)
2852 overflow = build_one_cst (type);
2853 else
2854 result = NULL_TREE;
2855 }
2856 }
2857 if (result)
2858 {
2859 if (result == integer_zero_node)
2860 result = build_zero_cst (type);
2861 else if (cplx_result && TREE_TYPE (result) != type)
2862 {
2863 if (TREE_CODE (result) == INTEGER_CST)
2864 {
2865 if (arith_overflowed_p (PLUS_EXPR, type, result,
2866 integer_zero_node))
2867 overflow = build_one_cst (type);
2868 }
2869 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
2870 && TYPE_UNSIGNED (type))
2871 || (TYPE_PRECISION (type)
2872 < (TYPE_PRECISION (TREE_TYPE (result))
2873 + (TYPE_UNSIGNED (TREE_TYPE (result))
2874 && !TYPE_UNSIGNED (type)))))
2875 result = NULL_TREE;
2876 if (result)
2877 result = fold_convert (type, result);
2878 }
ce8e6661 2879 }
2880 }
0c93c8a9 2881
c83059be 2882 if (result)
2883 {
0c93c8a9 2884 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
2885 result = drop_tree_overflow (result);
2886 if (cplx_result)
2887 {
2888 if (overflow == NULL_TREE)
2889 overflow = build_zero_cst (TREE_TYPE (result));
2890 tree ctype = build_complex_type (TREE_TYPE (result));
2891 if (TREE_CODE (result) == INTEGER_CST
2892 && TREE_CODE (overflow) == INTEGER_CST)
2893 result = build_complex (ctype, result, overflow);
2894 else
2895 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
2896 ctype, result, overflow);
2897 }
c83059be 2898 if (!update_call_from_tree (gsi, result))
2899 gimplify_and_update_call_from_tree (gsi, result);
2900 changed = true;
2901 }
2902 }
6fec5449 2903
3aa6ac67 2904 return changed;
2d18b16d 2905}
2906
55534d34 2907
2908/* Worker for fold_stmt_1 dispatch to pattern based folding with
2909 gimple_simplify.
2910
2911 Replaces *GSI with the simplification result in RCODE and OPS
2912 and the associated statements in *SEQ. Does the replacement
2913 according to INPLACE and returns true if the operation succeeded. */
2914
2915static bool
2916replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
2917 code_helper rcode, tree *ops,
2918 gimple_seq *seq, bool inplace)
2919{
2920 gimple stmt = gsi_stmt (*gsi);
2921
2922 /* Play safe and do not allow abnormals to be mentioned in
2923 newly created statements. See also maybe_push_res_to_seq. */
2924 if ((TREE_CODE (ops[0]) == SSA_NAME
2925 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
2926 || (ops[1]
2927 && TREE_CODE (ops[1]) == SSA_NAME
2928 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
2929 || (ops[2]
2930 && TREE_CODE (ops[2]) == SSA_NAME
2931 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
2932 return false;
2933
2934 if (gimple_code (stmt) == GIMPLE_COND)
2935 {
2936 gcc_assert (rcode.is_tree_code ());
2937 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
2938 /* GIMPLE_CONDs condition may not throw. */
2939 && (!flag_exceptions
2940 || !cfun->can_throw_non_call_exceptions
2941 || !operation_could_trap_p (rcode,
2942 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
2943 false, NULL_TREE)))
2944 gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
2945 else if (rcode == SSA_NAME)
2946 gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
2947 build_zero_cst (TREE_TYPE (ops[0])));
2948 else if (rcode == INTEGER_CST)
2949 {
2950 if (integer_zerop (ops[0]))
2951 gimple_cond_make_false (stmt);
2952 else
2953 gimple_cond_make_true (stmt);
2954 }
2955 else if (!inplace)
2956 {
2957 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
2958 ops, seq);
2959 if (!res)
2960 return false;
2961 gimple_cond_set_condition (stmt, NE_EXPR, res,
2962 build_zero_cst (TREE_TYPE (res)));
2963 }
2964 else
2965 return false;
2966 if (dump_file && (dump_flags & TDF_DETAILS))
2967 {
2968 fprintf (dump_file, "gimple_simplified to ");
2969 if (!gimple_seq_empty_p (*seq))
2970 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2971 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2972 0, TDF_SLIM);
2973 }
2974 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2975 return true;
2976 }
2977 else if (is_gimple_assign (stmt)
2978 && rcode.is_tree_code ())
2979 {
2980 if (!inplace
2ad7e37a 2981 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
55534d34 2982 {
2983 maybe_build_generic_op (rcode,
2984 TREE_TYPE (gimple_assign_lhs (stmt)),
2985 &ops[0], ops[1], ops[2]);
806413d2 2986 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
55534d34 2987 if (dump_file && (dump_flags & TDF_DETAILS))
2988 {
2989 fprintf (dump_file, "gimple_simplified to ");
2990 if (!gimple_seq_empty_p (*seq))
2991 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
2992 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
2993 0, TDF_SLIM);
2994 }
2995 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
2996 return true;
2997 }
2998 }
2999 else if (!inplace)
3000 {
3001 if (gimple_has_lhs (stmt))
3002 {
3003 tree lhs = gimple_get_lhs (stmt);
3004 maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3005 ops, seq, lhs);
3006 if (dump_file && (dump_flags & TDF_DETAILS))
3007 {
3008 fprintf (dump_file, "gimple_simplified to ");
3009 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3010 }
3011 gsi_replace_with_seq_vops (gsi, *seq);
3012 return true;
3013 }
3014 else
3015 gcc_unreachable ();
3016 }
3017
3018 return false;
3019}
3020
1bd2e4a0 3021/* Canonicalize MEM_REFs invariant address operand after propagation. */
3022
3023static bool
3024maybe_canonicalize_mem_ref_addr (tree *t)
3025{
3026 bool res = false;
3027
3028 if (TREE_CODE (*t) == ADDR_EXPR)
3029 t = &TREE_OPERAND (*t, 0);
3030
3031 while (handled_component_p (*t))
3032 t = &TREE_OPERAND (*t, 0);
3033
3034 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3035 of invariant addresses into a SSA name MEM_REF address. */
3036 if (TREE_CODE (*t) == MEM_REF
3037 || TREE_CODE (*t) == TARGET_MEM_REF)
3038 {
3039 tree addr = TREE_OPERAND (*t, 0);
3040 if (TREE_CODE (addr) == ADDR_EXPR
3041 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3042 || handled_component_p (TREE_OPERAND (addr, 0))))
3043 {
3044 tree base;
3045 HOST_WIDE_INT coffset;
3046 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3047 &coffset);
3048 if (!base)
3049 gcc_unreachable ();
3050
3051 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3052 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3053 TREE_OPERAND (*t, 1),
3054 size_int (coffset));
3055 res = true;
3056 }
3057 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3058 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3059 }
3060
3061 /* Canonicalize back MEM_REFs to plain reference trees if the object
3062 accessed is a decl that has the same access semantics as the MEM_REF. */
3063 if (TREE_CODE (*t) == MEM_REF
3064 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3065 && integer_zerop (TREE_OPERAND (*t, 1)))
3066 {
3067 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3068 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3069 if (/* Same volatile qualification. */
3070 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3071 /* Same TBAA behavior with -fstrict-aliasing. */
3072 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3073 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3074 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3075 /* Same alignment. */
3076 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3077 /* We have to look out here to not drop a required conversion
3078 from the rhs to the lhs if *t appears on the lhs or vice-versa
3079 if it appears on the rhs. Thus require strict type
3080 compatibility. */
3081 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3082 {
3083 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3084 res = true;
3085 }
3086 }
3087
3088 /* Canonicalize TARGET_MEM_REF in particular with respect to
3089 the indexes becoming constant. */
3090 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3091 {
3092 tree tem = maybe_fold_tmr (*t);
3093 if (tem)
3094 {
3095 *t = tem;
3096 res = true;
3097 }
3098 }
3099
3100 return res;
3101}
3102
2d18b16d 3103/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3104 distinguishes both cases. */
3105
3106static bool
55534d34 3107fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
2d18b16d 3108{
3109 bool changed = false;
3110 gimple stmt = gsi_stmt (*gsi);
3111 unsigned i;
3112
1bd2e4a0 3113 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3114 after propagation.
3115 ??? This shouldn't be done in generic folding but in the
3116 propagation helpers which also know whether an address was
3117 propagated. */
3118 switch (gimple_code (stmt))
3119 {
3120 case GIMPLE_ASSIGN:
3121 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3122 {
3123 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3124 if ((REFERENCE_CLASS_P (*rhs)
3125 || TREE_CODE (*rhs) == ADDR_EXPR)
3126 && maybe_canonicalize_mem_ref_addr (rhs))
3127 changed = true;
3128 tree *lhs = gimple_assign_lhs_ptr (stmt);
3129 if (REFERENCE_CLASS_P (*lhs)
3130 && maybe_canonicalize_mem_ref_addr (lhs))
3131 changed = true;
3132 }
3133 break;
3134 case GIMPLE_CALL:
3135 {
3136 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3137 {
3138 tree *arg = gimple_call_arg_ptr (stmt, i);
3139 if (REFERENCE_CLASS_P (*arg)
3140 && maybe_canonicalize_mem_ref_addr (arg))
3141 changed = true;
3142 }
3143 tree *lhs = gimple_call_lhs_ptr (stmt);
3144 if (*lhs
3145 && REFERENCE_CLASS_P (*lhs)
3146 && maybe_canonicalize_mem_ref_addr (lhs))
3147 changed = true;
3148 break;
3149 }
3150 case GIMPLE_ASM:
3151 {
3152 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3153 {
3154 tree link = gimple_asm_output_op (stmt, i);
3155 tree op = TREE_VALUE (link);
3156 if (REFERENCE_CLASS_P (op)
3157 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3158 changed = true;
3159 }
3160 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3161 {
3162 tree link = gimple_asm_input_op (stmt, i);
3163 tree op = TREE_VALUE (link);
3164 if ((REFERENCE_CLASS_P (op)
3165 || TREE_CODE (op) == ADDR_EXPR)
3166 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3167 changed = true;
3168 }
3169 }
3170 break;
3171 case GIMPLE_DEBUG:
3172 if (gimple_debug_bind_p (stmt))
3173 {
3174 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3175 if (*val
3176 && (REFERENCE_CLASS_P (*val)
3177 || TREE_CODE (*val) == ADDR_EXPR)
3178 && maybe_canonicalize_mem_ref_addr (val))
3179 changed = true;
3180 }
3181 break;
3182 default:;
3183 }
3184
55534d34 3185 /* Dispatch to pattern-based folding. */
3186 if (!inplace
3187 || is_gimple_assign (stmt)
3188 || gimple_code (stmt) == GIMPLE_COND)
3189 {
3190 gimple_seq seq = NULL;
3191 code_helper rcode;
3192 tree ops[3] = {};
3193 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3194 {
3195 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3196 changed = true;
3197 else
3198 gimple_seq_discard (seq);
3199 }
3200 }
3201
3202 stmt = gsi_stmt (*gsi);
3203
2d18b16d 3204 /* Fold the main computation performed by the statement. */
3205 switch (gimple_code (stmt))
3206 {
3207 case GIMPLE_ASSIGN:
3208 {
3209 unsigned old_num_ops = gimple_num_ops (stmt);
fbd25d42 3210 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2d18b16d 3211 tree lhs = gimple_assign_lhs (stmt);
fbd25d42 3212 tree new_rhs;
3213 /* First canonicalize operand order. This avoids building new
3214 trees if this is the only thing fold would later do. */
3215 if ((commutative_tree_code (subcode)
3216 || commutative_ternary_tree_code (subcode))
3217 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3218 gimple_assign_rhs2 (stmt), false))
3219 {
3220 tree tem = gimple_assign_rhs1 (stmt);
3221 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3222 gimple_assign_set_rhs2 (stmt, tem);
3223 changed = true;
3224 }
3225 new_rhs = fold_gimple_assign (gsi);
2d18b16d 3226 if (new_rhs
3227 && !useless_type_conversion_p (TREE_TYPE (lhs),
3228 TREE_TYPE (new_rhs)))
3229 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3230 if (new_rhs
3231 && (!inplace
3232 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3233 {
3234 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3235 changed = true;
3236 }
3237 break;
3238 }
3239
3240 case GIMPLE_COND:
3241 changed |= fold_gimple_cond (stmt);
3242 break;
3243
3244 case GIMPLE_CALL:
3fd0ca33 3245 changed |= gimple_fold_call (gsi, inplace);
2d18b16d 3246 break;
3247
3248 case GIMPLE_ASM:
3249 /* Fold *& in asm operands. */
a02dcaf7 3250 {
3251 size_t noutputs;
3252 const char **oconstraints;
3253 const char *constraint;
3254 bool allows_mem, allows_reg;
3255
3256 noutputs = gimple_asm_noutputs (stmt);
3257 oconstraints = XALLOCAVEC (const char *, noutputs);
3258
3259 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3260 {
3261 tree link = gimple_asm_output_op (stmt, i);
3262 tree op = TREE_VALUE (link);
3263 oconstraints[i]
3264 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3265 if (REFERENCE_CLASS_P (op)
3266 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3267 {
3268 TREE_VALUE (link) = op;
3269 changed = true;
3270 }
3271 }
3272 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3273 {
3274 tree link = gimple_asm_input_op (stmt, i);
3275 tree op = TREE_VALUE (link);
3276 constraint
3277 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3278 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3279 oconstraints, &allows_mem, &allows_reg);
3280 if (REFERENCE_CLASS_P (op)
3281 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3282 != NULL_TREE)
3283 {
3284 TREE_VALUE (link) = op;
3285 changed = true;
3286 }
3287 }
3288 }
2d18b16d 3289 break;
3290
69f01c7b 3291 case GIMPLE_DEBUG:
3292 if (gimple_debug_bind_p (stmt))
3293 {
3294 tree val = gimple_debug_bind_get_value (stmt);
3295 if (val
3296 && REFERENCE_CLASS_P (val))
3297 {
3298 tree tem = maybe_fold_reference (val, false);
3299 if (tem)
3300 {
3301 gimple_debug_bind_set_value (stmt, tem);
3302 changed = true;
3303 }
3304 }
48bcb852 3305 else if (val
3306 && TREE_CODE (val) == ADDR_EXPR)
3307 {
3308 tree ref = TREE_OPERAND (val, 0);
3309 tree tem = maybe_fold_reference (ref, false);
3310 if (tem)
3311 {
3312 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3313 gimple_debug_bind_set_value (stmt, tem);
3314 changed = true;
3315 }
3316 }
69f01c7b 3317 }
3318 break;
3319
2d18b16d 3320 default:;
3321 }
3322
3323 stmt = gsi_stmt (*gsi);
3324
5352fc9b 3325 /* Fold *& on the lhs. */
3326 if (gimple_has_lhs (stmt))
2d18b16d 3327 {
3328 tree lhs = gimple_get_lhs (stmt);
3329 if (lhs && REFERENCE_CLASS_P (lhs))
3330 {
3331 tree new_lhs = maybe_fold_reference (lhs, true);
3332 if (new_lhs)
3333 {
3334 gimple_set_lhs (stmt, new_lhs);
3335 changed = true;
3336 }
3337 }
3338 }
3339
3340 return changed;
3341}
3342
55534d34 3343/* Valueziation callback that ends up not following SSA edges. */
3344
3345tree
3346no_follow_ssa_edges (tree)
3347{
3348 return NULL_TREE;
3349}
3350
b768d5ee 3351/* Valueization callback that ends up following single-use SSA edges only. */
3352
3353tree
3354follow_single_use_edges (tree val)
3355{
3356 if (TREE_CODE (val) == SSA_NAME
3357 && !has_single_use (val))
3358 return NULL_TREE;
3359 return val;
3360}
3361
2d18b16d 3362/* Fold the statement pointed to by GSI. In some cases, this function may
3363 replace the whole statement with a new one. Returns true iff folding
3364 makes any changes.
3365 The statement pointed to by GSI should be in valid gimple form but may
3366 be in unfolded state as resulting from for example constant propagation
3367 which can produce *&x = 0. */
3368
3369bool
3370fold_stmt (gimple_stmt_iterator *gsi)
3371{
55534d34 3372 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3373}
3374
3375bool
3376fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3377{
3378 return fold_stmt_1 (gsi, false, valueize);
2d18b16d 3379}
3380
50aacf4c 3381/* Perform the minimal folding on statement *GSI. Only operations like
2d18b16d 3382 *&x created by constant propagation are handled. The statement cannot
3383 be replaced with a new one. Return true if the statement was
3384 changed, false otherwise.
50aacf4c 3385 The statement *GSI should be in valid gimple form but may
2d18b16d 3386 be in unfolded state as resulting from for example constant propagation
3387 which can produce *&x = 0. */
3388
3389bool
50aacf4c 3390fold_stmt_inplace (gimple_stmt_iterator *gsi)
2d18b16d 3391{
50aacf4c 3392 gimple stmt = gsi_stmt (*gsi);
55534d34 3393 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
50aacf4c 3394 gcc_assert (gsi_stmt (*gsi) == stmt);
2d18b16d 3395 return changed;
3396}
3397
c82d157a 3398/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3399 if EXPR is null or we don't know how.
3400 If non-null, the result always has boolean type. */
3401
3402static tree
3403canonicalize_bool (tree expr, bool invert)
3404{
3405 if (!expr)
3406 return NULL_TREE;
3407 else if (invert)
3408 {
3409 if (integer_nonzerop (expr))
3410 return boolean_false_node;
3411 else if (integer_zerop (expr))
3412 return boolean_true_node;
3413 else if (TREE_CODE (expr) == SSA_NAME)
3414 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3415 build_int_cst (TREE_TYPE (expr), 0));
3416 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3417 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3418 boolean_type_node,
3419 TREE_OPERAND (expr, 0),
3420 TREE_OPERAND (expr, 1));
3421 else
3422 return NULL_TREE;
3423 }
3424 else
3425 {
3426 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3427 return expr;
3428 if (integer_nonzerop (expr))
3429 return boolean_true_node;
3430 else if (integer_zerop (expr))
3431 return boolean_false_node;
3432 else if (TREE_CODE (expr) == SSA_NAME)
3433 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3434 build_int_cst (TREE_TYPE (expr), 0));
3435 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3436 return fold_build2 (TREE_CODE (expr),
3437 boolean_type_node,
3438 TREE_OPERAND (expr, 0),
3439 TREE_OPERAND (expr, 1));
3440 else
3441 return NULL_TREE;
3442 }
3443}
3444
3445/* Check to see if a boolean expression EXPR is logically equivalent to the
3446 comparison (OP1 CODE OP2). Check for various identities involving
3447 SSA_NAMEs. */
3448
3449static bool
3450same_bool_comparison_p (const_tree expr, enum tree_code code,
3451 const_tree op1, const_tree op2)
3452{
3453 gimple s;
3454
3455 /* The obvious case. */
3456 if (TREE_CODE (expr) == code
3457 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3458 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3459 return true;
3460
3461 /* Check for comparing (name, name != 0) and the case where expr
3462 is an SSA_NAME with a definition matching the comparison. */
3463 if (TREE_CODE (expr) == SSA_NAME
3464 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3465 {
3466 if (operand_equal_p (expr, op1, 0))
3467 return ((code == NE_EXPR && integer_zerop (op2))
3468 || (code == EQ_EXPR && integer_nonzerop (op2)));
3469 s = SSA_NAME_DEF_STMT (expr);
3470 if (is_gimple_assign (s)
3471 && gimple_assign_rhs_code (s) == code
3472 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3473 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3474 return true;
3475 }
3476
3477 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3478 of name is a comparison, recurse. */
3479 if (TREE_CODE (op1) == SSA_NAME
3480 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3481 {
3482 s = SSA_NAME_DEF_STMT (op1);
3483 if (is_gimple_assign (s)
3484 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3485 {
3486 enum tree_code c = gimple_assign_rhs_code (s);
3487 if ((c == NE_EXPR && integer_zerop (op2))
3488 || (c == EQ_EXPR && integer_nonzerop (op2)))
3489 return same_bool_comparison_p (expr, c,
3490 gimple_assign_rhs1 (s),
3491 gimple_assign_rhs2 (s));
3492 if ((c == EQ_EXPR && integer_zerop (op2))
3493 || (c == NE_EXPR && integer_nonzerop (op2)))
3494 return same_bool_comparison_p (expr,
3495 invert_tree_comparison (c, false),
3496 gimple_assign_rhs1 (s),
3497 gimple_assign_rhs2 (s));
3498 }
3499 }
3500 return false;
3501}
3502
3503/* Check to see if two boolean expressions OP1 and OP2 are logically
3504 equivalent. */
3505
3506static bool
3507same_bool_result_p (const_tree op1, const_tree op2)
3508{
3509 /* Simple cases first. */
3510 if (operand_equal_p (op1, op2, 0))
3511 return true;
3512
3513 /* Check the cases where at least one of the operands is a comparison.
3514 These are a bit smarter than operand_equal_p in that they apply some
3515 identifies on SSA_NAMEs. */
3516 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3517 && same_bool_comparison_p (op1, TREE_CODE (op2),
3518 TREE_OPERAND (op2, 0),
3519 TREE_OPERAND (op2, 1)))
3520 return true;
3521 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3522 && same_bool_comparison_p (op2, TREE_CODE (op1),
3523 TREE_OPERAND (op1, 0),
3524 TREE_OPERAND (op1, 1)))
3525 return true;
3526
3527 /* Default case. */
3528 return false;
3529}
3530
3531/* Forward declarations for some mutually recursive functions. */
3532
3533static tree
3534and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3535 enum tree_code code2, tree op2a, tree op2b);
3536static tree
3537and_var_with_comparison (tree var, bool invert,
3538 enum tree_code code2, tree op2a, tree op2b);
3539static tree
3540and_var_with_comparison_1 (gimple stmt,
3541 enum tree_code code2, tree op2a, tree op2b);
3542static tree
3543or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3544 enum tree_code code2, tree op2a, tree op2b);
3545static tree
3546or_var_with_comparison (tree var, bool invert,
3547 enum tree_code code2, tree op2a, tree op2b);
3548static tree
3549or_var_with_comparison_1 (gimple stmt,
3550 enum tree_code code2, tree op2a, tree op2b);
3551
3552/* Helper function for and_comparisons_1: try to simplify the AND of the
3553 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3554 If INVERT is true, invert the value of the VAR before doing the AND.
3555 Return NULL_EXPR if we can't simplify this to a single expression. */
3556
3557static tree
3558and_var_with_comparison (tree var, bool invert,
3559 enum tree_code code2, tree op2a, tree op2b)
3560{
3561 tree t;
3562 gimple stmt = SSA_NAME_DEF_STMT (var);
3563
3564 /* We can only deal with variables whose definitions are assignments. */
3565 if (!is_gimple_assign (stmt))
3566 return NULL_TREE;
3567
3568 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3569 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3570 Then we only have to consider the simpler non-inverted cases. */
3571 if (invert)
3572 t = or_var_with_comparison_1 (stmt,
3573 invert_tree_comparison (code2, false),
3574 op2a, op2b);
3575 else
3576 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3577 return canonicalize_bool (t, invert);
3578}
3579
3580/* Try to simplify the AND of the ssa variable defined by the assignment
3581 STMT with the comparison specified by (OP2A CODE2 OP2B).
3582 Return NULL_EXPR if we can't simplify this to a single expression. */
3583
3584static tree
3585and_var_with_comparison_1 (gimple stmt,
3586 enum tree_code code2, tree op2a, tree op2b)
3587{
3588 tree var = gimple_assign_lhs (stmt);
3589 tree true_test_var = NULL_TREE;
3590 tree false_test_var = NULL_TREE;
3591 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3592
3593 /* Check for identities like (var AND (var == 0)) => false. */
3594 if (TREE_CODE (op2a) == SSA_NAME
3595 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3596 {
3597 if ((code2 == NE_EXPR && integer_zerop (op2b))
3598 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3599 {
3600 true_test_var = op2a;
3601 if (var == true_test_var)
3602 return var;
3603 }
3604 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3605 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3606 {
3607 false_test_var = op2a;
3608 if (var == false_test_var)
3609 return boolean_false_node;
3610 }
3611 }
3612
3613 /* If the definition is a comparison, recurse on it. */
3614 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3615 {
3616 tree t = and_comparisons_1 (innercode,
3617 gimple_assign_rhs1 (stmt),
3618 gimple_assign_rhs2 (stmt),
3619 code2,
3620 op2a,
3621 op2b);
3622 if (t)
3623 return t;
3624 }
3625
3626 /* If the definition is an AND or OR expression, we may be able to
3627 simplify by reassociating. */
08410c65 3628 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3629 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
c82d157a 3630 {
3631 tree inner1 = gimple_assign_rhs1 (stmt);
3632 tree inner2 = gimple_assign_rhs2 (stmt);
3633 gimple s;
3634 tree t;
3635 tree partial = NULL_TREE;
08410c65 3636 bool is_and = (innercode == BIT_AND_EXPR);
c82d157a 3637
3638 /* Check for boolean identities that don't require recursive examination
3639 of inner1/inner2:
3640 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3641 inner1 AND (inner1 OR inner2) => inner1
3642 !inner1 AND (inner1 AND inner2) => false
3643 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3644 Likewise for similar cases involving inner2. */
3645 if (inner1 == true_test_var)
3646 return (is_and ? var : inner1);
3647 else if (inner2 == true_test_var)
3648 return (is_and ? var : inner2);
3649 else if (inner1 == false_test_var)
3650 return (is_and
3651 ? boolean_false_node
3652 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
3653 else if (inner2 == false_test_var)
3654 return (is_and
3655 ? boolean_false_node
3656 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
3657
3658 /* Next, redistribute/reassociate the AND across the inner tests.
3659 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3660 if (TREE_CODE (inner1) == SSA_NAME
3661 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3662 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3663 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3664 gimple_assign_rhs1 (s),
3665 gimple_assign_rhs2 (s),
3666 code2, op2a, op2b)))
3667 {
3668 /* Handle the AND case, where we are reassociating:
3669 (inner1 AND inner2) AND (op2a code2 op2b)
3670 => (t AND inner2)
3671 If the partial result t is a constant, we win. Otherwise
3672 continue on to try reassociating with the other inner test. */
3673 if (is_and)
3674 {
3675 if (integer_onep (t))
3676 return inner2;
3677 else if (integer_zerop (t))
3678 return boolean_false_node;
3679 }
3680
3681 /* Handle the OR case, where we are redistributing:
3682 (inner1 OR inner2) AND (op2a code2 op2b)
3683 => (t OR (inner2 AND (op2a code2 op2b))) */
ad1e8581 3684 else if (integer_onep (t))
3685 return boolean_true_node;
3686
3687 /* Save partial result for later. */
3688 partial = t;
c82d157a 3689 }
3690
3691 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3692 if (TREE_CODE (inner2) == SSA_NAME
3693 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3694 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3695 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3696 gimple_assign_rhs1 (s),
3697 gimple_assign_rhs2 (s),
3698 code2, op2a, op2b)))
3699 {
3700 /* Handle the AND case, where we are reassociating:
3701 (inner1 AND inner2) AND (op2a code2 op2b)
3702 => (inner1 AND t) */
3703 if (is_and)
3704 {
3705 if (integer_onep (t))
3706 return inner1;
3707 else if (integer_zerop (t))
3708 return boolean_false_node;
ad1e8581 3709 /* If both are the same, we can apply the identity
3710 (x AND x) == x. */
3711 else if (partial && same_bool_result_p (t, partial))
3712 return t;
c82d157a 3713 }
3714
3715 /* Handle the OR case. where we are redistributing:
3716 (inner1 OR inner2) AND (op2a code2 op2b)
3717 => (t OR (inner1 AND (op2a code2 op2b)))
3718 => (t OR partial) */
3719 else
3720 {
3721 if (integer_onep (t))
3722 return boolean_true_node;
3723 else if (partial)
3724 {
3725 /* We already got a simplification for the other
3726 operand to the redistributed OR expression. The
3727 interesting case is when at least one is false.
3728 Or, if both are the same, we can apply the identity
3729 (x OR x) == x. */
3730 if (integer_zerop (partial))
3731 return t;
3732 else if (integer_zerop (t))
3733 return partial;
3734 else if (same_bool_result_p (t, partial))
3735 return t;
3736 }
3737 }
3738 }
3739 }
3740 return NULL_TREE;
3741}
3742
3743/* Try to simplify the AND of two comparisons defined by
3744 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3745 If this can be done without constructing an intermediate value,
3746 return the resulting tree; otherwise NULL_TREE is returned.
3747 This function is deliberately asymmetric as it recurses on SSA_DEFs
3748 in the first comparison but not the second. */
3749
3750static tree
3751and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3752 enum tree_code code2, tree op2a, tree op2b)
3753{
f2c1848b 3754 tree truth_type = truth_type_for (TREE_TYPE (op1a));
357d8e5d 3755
c82d157a 3756 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3757 if (operand_equal_p (op1a, op2a, 0)
3758 && operand_equal_p (op1b, op2b, 0))
3759 {
08410c65 3760 /* Result will be either NULL_TREE, or a combined comparison. */
c82d157a 3761 tree t = combine_comparisons (UNKNOWN_LOCATION,
3762 TRUTH_ANDIF_EXPR, code1, code2,
357d8e5d 3763 truth_type, op1a, op1b);
c82d157a 3764 if (t)
3765 return t;
3766 }
3767
3768 /* Likewise the swapped case of the above. */
3769 if (operand_equal_p (op1a, op2b, 0)
3770 && operand_equal_p (op1b, op2a, 0))
3771 {
08410c65 3772 /* Result will be either NULL_TREE, or a combined comparison. */
c82d157a 3773 tree t = combine_comparisons (UNKNOWN_LOCATION,
3774 TRUTH_ANDIF_EXPR, code1,
3775 swap_tree_comparison (code2),
357d8e5d 3776 truth_type, op1a, op1b);
c82d157a 3777 if (t)
3778 return t;
3779 }
3780
3781 /* If both comparisons are of the same value against constants, we might
3782 be able to merge them. */
3783 if (operand_equal_p (op1a, op2a, 0)
3784 && TREE_CODE (op1b) == INTEGER_CST
3785 && TREE_CODE (op2b) == INTEGER_CST)
3786 {
3787 int cmp = tree_int_cst_compare (op1b, op2b);
3788
3789 /* If we have (op1a == op1b), we should either be able to
3790 return that or FALSE, depending on whether the constant op1b
3791 also satisfies the other comparison against op2b. */
3792 if (code1 == EQ_EXPR)
3793 {
3794 bool done = true;
3795 bool val;
3796 switch (code2)
3797 {
3798 case EQ_EXPR: val = (cmp == 0); break;
3799 case NE_EXPR: val = (cmp != 0); break;
3800 case LT_EXPR: val = (cmp < 0); break;
3801 case GT_EXPR: val = (cmp > 0); break;
3802 case LE_EXPR: val = (cmp <= 0); break;
3803 case GE_EXPR: val = (cmp >= 0); break;
3804 default: done = false;
3805 }
3806 if (done)
3807 {
3808 if (val)
3809 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3810 else
3811 return boolean_false_node;
3812 }
3813 }
3814 /* Likewise if the second comparison is an == comparison. */
3815 else if (code2 == EQ_EXPR)
3816 {
3817 bool done = true;
3818 bool val;
3819 switch (code1)
3820 {
3821 case EQ_EXPR: val = (cmp == 0); break;
3822 case NE_EXPR: val = (cmp != 0); break;
3823 case LT_EXPR: val = (cmp > 0); break;
3824 case GT_EXPR: val = (cmp < 0); break;
3825 case LE_EXPR: val = (cmp >= 0); break;
3826 case GE_EXPR: val = (cmp <= 0); break;
3827 default: done = false;
3828 }
3829 if (done)
3830 {
3831 if (val)
3832 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3833 else
3834 return boolean_false_node;
3835 }
3836 }
3837
3838 /* Same business with inequality tests. */
3839 else if (code1 == NE_EXPR)
3840 {
3841 bool val;
3842 switch (code2)
3843 {
3844 case EQ_EXPR: val = (cmp != 0); break;
3845 case NE_EXPR: val = (cmp == 0); break;
3846 case LT_EXPR: val = (cmp >= 0); break;
3847 case GT_EXPR: val = (cmp <= 0); break;
3848 case LE_EXPR: val = (cmp > 0); break;
3849 case GE_EXPR: val = (cmp < 0); break;
3850 default:
3851 val = false;
3852 }
3853 if (val)
3854 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3855 }
3856 else if (code2 == NE_EXPR)
3857 {
3858 bool val;
3859 switch (code1)
3860 {
3861 case EQ_EXPR: val = (cmp == 0); break;
3862 case NE_EXPR: val = (cmp != 0); break;
3863 case LT_EXPR: val = (cmp <= 0); break;
3864 case GT_EXPR: val = (cmp >= 0); break;
3865 case LE_EXPR: val = (cmp < 0); break;
3866 case GE_EXPR: val = (cmp > 0); break;
3867 default:
3868 val = false;
3869 }
3870 if (val)
3871 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3872 }
3873
3874 /* Chose the more restrictive of two < or <= comparisons. */
3875 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3876 && (code2 == LT_EXPR || code2 == LE_EXPR))
3877 {
3878 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3879 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3880 else
3881 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3882 }
3883
3884 /* Likewise chose the more restrictive of two > or >= comparisons. */
3885 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3886 && (code2 == GT_EXPR || code2 == GE_EXPR))
3887 {
3888 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3889 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3890 else
3891 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3892 }
3893
3894 /* Check for singleton ranges. */
3895 else if (cmp == 0
3896 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3897 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3898 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3899
3900 /* Check for disjoint ranges. */
3901 else if (cmp <= 0
3902 && (code1 == LT_EXPR || code1 == LE_EXPR)
3903 && (code2 == GT_EXPR || code2 == GE_EXPR))
3904 return boolean_false_node;
3905 else if (cmp >= 0
3906 && (code1 == GT_EXPR || code1 == GE_EXPR)
3907 && (code2 == LT_EXPR || code2 == LE_EXPR))
3908 return boolean_false_node;
3909 }
3910
3911 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3912 NAME's definition is a truth value. See if there are any simplifications
3913 that can be done against the NAME's definition. */
3914 if (TREE_CODE (op1a) == SSA_NAME
3915 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3916 && (integer_zerop (op1b) || integer_onep (op1b)))
3917 {
3918 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3919 || (code1 == NE_EXPR && integer_onep (op1b)));
3920 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3921 switch (gimple_code (stmt))
3922 {
3923 case GIMPLE_ASSIGN:
3924 /* Try to simplify by copy-propagating the definition. */
3925 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3926
3927 case GIMPLE_PHI:
3928 /* If every argument to the PHI produces the same result when
3929 ANDed with the second comparison, we win.
3930 Do not do this unless the type is bool since we need a bool
3931 result here anyway. */
3932 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3933 {
3934 tree result = NULL_TREE;
3935 unsigned i;
3936 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3937 {
3938 tree arg = gimple_phi_arg_def (stmt, i);
3939
3940 /* If this PHI has itself as an argument, ignore it.
3941 If all the other args produce the same result,
3942 we're still OK. */
3943 if (arg == gimple_phi_result (stmt))
3944 continue;
3945 else if (TREE_CODE (arg) == INTEGER_CST)
3946 {
3947 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3948 {
3949 if (!result)
3950 result = boolean_false_node;
3951 else if (!integer_zerop (result))
3952 return NULL_TREE;
3953 }
3954 else if (!result)
3955 result = fold_build2 (code2, boolean_type_node,
3956 op2a, op2b);
3957 else if (!same_bool_comparison_p (result,
3958 code2, op2a, op2b))
3959 return NULL_TREE;
3960 }
ae4330d7 3961 else if (TREE_CODE (arg) == SSA_NAME
3962 && !SSA_NAME_IS_DEFAULT_DEF (arg))
c82d157a 3963 {
8a245b9d 3964 tree temp;
3965 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3966 /* In simple cases we can look through PHI nodes,
3967 but we have to be careful with loops.
3968 See PR49073. */
3969 if (! dom_info_available_p (CDI_DOMINATORS)
3970 || gimple_bb (def_stmt) == gimple_bb (stmt)
3971 || dominated_by_p (CDI_DOMINATORS,
3972 gimple_bb (def_stmt),
3973 gimple_bb (stmt)))
3974 return NULL_TREE;
3975 temp = and_var_with_comparison (arg, invert, code2,
3976 op2a, op2b);
c82d157a 3977 if (!temp)
3978 return NULL_TREE;
3979 else if (!result)
3980 result = temp;
3981 else if (!same_bool_result_p (result, temp))
3982 return NULL_TREE;
3983 }
3984 else
3985 return NULL_TREE;
3986 }
3987 return result;
3988 }
3989
3990 default:
3991 break;
3992 }
3993 }
3994 return NULL_TREE;
3995}
3996
3997/* Try to simplify the AND of two comparisons, specified by
3998 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3999 If this can be simplified to a single expression (without requiring
4000 introducing more SSA variables to hold intermediate values),
4001 return the resulting tree. Otherwise return NULL_TREE.
4002 If the result expression is non-null, it has boolean type. */
4003
4004tree
4005maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4006 enum tree_code code2, tree op2a, tree op2b)
4007{
4008 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4009 if (t)
4010 return t;
4011 else
4012 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4013}
4014
4015/* Helper function for or_comparisons_1: try to simplify the OR of the
4016 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4017 If INVERT is true, invert the value of VAR before doing the OR.
4018 Return NULL_EXPR if we can't simplify this to a single expression. */
4019
4020static tree
4021or_var_with_comparison (tree var, bool invert,
4022 enum tree_code code2, tree op2a, tree op2b)
4023{
4024 tree t;
4025 gimple stmt = SSA_NAME_DEF_STMT (var);
4026
4027 /* We can only deal with variables whose definitions are assignments. */
4028 if (!is_gimple_assign (stmt))
4029 return NULL_TREE;
4030
4031 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4032 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4033 Then we only have to consider the simpler non-inverted cases. */
4034 if (invert)
4035 t = and_var_with_comparison_1 (stmt,
4036 invert_tree_comparison (code2, false),
4037 op2a, op2b);
4038 else
4039 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4040 return canonicalize_bool (t, invert);
4041}
4042
4043/* Try to simplify the OR of the ssa variable defined by the assignment
4044 STMT with the comparison specified by (OP2A CODE2 OP2B).
4045 Return NULL_EXPR if we can't simplify this to a single expression. */
4046
4047static tree
4048or_var_with_comparison_1 (gimple stmt,
4049 enum tree_code code2, tree op2a, tree op2b)
4050{
4051 tree var = gimple_assign_lhs (stmt);
4052 tree true_test_var = NULL_TREE;
4053 tree false_test_var = NULL_TREE;
4054 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4055
4056 /* Check for identities like (var OR (var != 0)) => true . */
4057 if (TREE_CODE (op2a) == SSA_NAME
4058 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4059 {
4060 if ((code2 == NE_EXPR && integer_zerop (op2b))
4061 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4062 {
4063 true_test_var = op2a;
4064 if (var == true_test_var)
4065 return var;
4066 }
4067 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4068 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4069 {
4070 false_test_var = op2a;
4071 if (var == false_test_var)
4072 return boolean_true_node;
4073 }
4074 }
4075
4076 /* If the definition is a comparison, recurse on it. */
4077 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4078 {
4079 tree t = or_comparisons_1 (innercode,
4080 gimple_assign_rhs1 (stmt),
4081 gimple_assign_rhs2 (stmt),
4082 code2,
4083 op2a,
4084 op2b);
4085 if (t)
4086 return t;
4087 }
4088
4089 /* If the definition is an AND or OR expression, we may be able to
4090 simplify by reassociating. */
08410c65 4091 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4092 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
c82d157a 4093 {
4094 tree inner1 = gimple_assign_rhs1 (stmt);
4095 tree inner2 = gimple_assign_rhs2 (stmt);
4096 gimple s;
4097 tree t;
4098 tree partial = NULL_TREE;
08410c65 4099 bool is_or = (innercode == BIT_IOR_EXPR);
c82d157a 4100
4101 /* Check for boolean identities that don't require recursive examination
4102 of inner1/inner2:
4103 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4104 inner1 OR (inner1 AND inner2) => inner1
4105 !inner1 OR (inner1 OR inner2) => true
4106 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4107 */
4108 if (inner1 == true_test_var)
4109 return (is_or ? var : inner1);
4110 else if (inner2 == true_test_var)
4111 return (is_or ? var : inner2);
4112 else if (inner1 == false_test_var)
4113 return (is_or
4114 ? boolean_true_node
4115 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4116 else if (inner2 == false_test_var)
4117 return (is_or
4118 ? boolean_true_node
4119 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4120
4121 /* Next, redistribute/reassociate the OR across the inner tests.
4122 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4123 if (TREE_CODE (inner1) == SSA_NAME
4124 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4125 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4126 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4127 gimple_assign_rhs1 (s),
4128 gimple_assign_rhs2 (s),
4129 code2, op2a, op2b)))
4130 {
4131 /* Handle the OR case, where we are reassociating:
4132 (inner1 OR inner2) OR (op2a code2 op2b)
4133 => (t OR inner2)
4134 If the partial result t is a constant, we win. Otherwise
4135 continue on to try reassociating with the other inner test. */
ad1e8581 4136 if (is_or)
c82d157a 4137 {
4138 if (integer_onep (t))
4139 return boolean_true_node;
4140 else if (integer_zerop (t))
4141 return inner2;
4142 }
4143
4144 /* Handle the AND case, where we are redistributing:
4145 (inner1 AND inner2) OR (op2a code2 op2b)
4146 => (t AND (inner2 OR (op2a code op2b))) */
ad1e8581 4147 else if (integer_zerop (t))
4148 return boolean_false_node;
4149
4150 /* Save partial result for later. */
4151 partial = t;
c82d157a 4152 }
4153
4154 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4155 if (TREE_CODE (inner2) == SSA_NAME
4156 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4157 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4158 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4159 gimple_assign_rhs1 (s),
4160 gimple_assign_rhs2 (s),
4161 code2, op2a, op2b)))
4162 {
4163 /* Handle the OR case, where we are reassociating:
4164 (inner1 OR inner2) OR (op2a code2 op2b)
ad1e8581 4165 => (inner1 OR t)
4166 => (t OR partial) */
4167 if (is_or)
c82d157a 4168 {
4169 if (integer_zerop (t))
4170 return inner1;
4171 else if (integer_onep (t))
4172 return boolean_true_node;
ad1e8581 4173 /* If both are the same, we can apply the identity
4174 (x OR x) == x. */
4175 else if (partial && same_bool_result_p (t, partial))
4176 return t;
c82d157a 4177 }
4178
4179 /* Handle the AND case, where we are redistributing:
4180 (inner1 AND inner2) OR (op2a code2 op2b)
4181 => (t AND (inner1 OR (op2a code2 op2b)))
4182 => (t AND partial) */
4183 else
4184 {
4185 if (integer_zerop (t))
4186 return boolean_false_node;
4187 else if (partial)
4188 {
4189 /* We already got a simplification for the other
4190 operand to the redistributed AND expression. The
4191 interesting case is when at least one is true.
4192 Or, if both are the same, we can apply the identity
ad1e8581 4193 (x AND x) == x. */
c82d157a 4194 if (integer_onep (partial))
4195 return t;
4196 else if (integer_onep (t))
4197 return partial;
4198 else if (same_bool_result_p (t, partial))
ad1e8581 4199 return t;
c82d157a 4200 }
4201 }
4202 }
4203 }
4204 return NULL_TREE;
4205}
4206
4207/* Try to simplify the OR of two comparisons defined by
4208 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4209 If this can be done without constructing an intermediate value,
4210 return the resulting tree; otherwise NULL_TREE is returned.
4211 This function is deliberately asymmetric as it recurses on SSA_DEFs
4212 in the first comparison but not the second. */
4213
4214static tree
4215or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4216 enum tree_code code2, tree op2a, tree op2b)
4217{
f2c1848b 4218 tree truth_type = truth_type_for (TREE_TYPE (op1a));
357d8e5d 4219
c82d157a 4220 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4221 if (operand_equal_p (op1a, op2a, 0)
4222 && operand_equal_p (op1b, op2b, 0))
4223 {
08410c65 4224 /* Result will be either NULL_TREE, or a combined comparison. */
c82d157a 4225 tree t = combine_comparisons (UNKNOWN_LOCATION,
4226 TRUTH_ORIF_EXPR, code1, code2,
357d8e5d 4227 truth_type, op1a, op1b);
c82d157a 4228 if (t)
4229 return t;
4230 }
4231
4232 /* Likewise the swapped case of the above. */
4233 if (operand_equal_p (op1a, op2b, 0)
4234 && operand_equal_p (op1b, op2a, 0))
4235 {
08410c65 4236 /* Result will be either NULL_TREE, or a combined comparison. */
c82d157a 4237 tree t = combine_comparisons (UNKNOWN_LOCATION,
4238 TRUTH_ORIF_EXPR, code1,
4239 swap_tree_comparison (code2),
357d8e5d 4240 truth_type, op1a, op1b);
c82d157a 4241 if (t)
4242 return t;
4243 }
4244
4245 /* If both comparisons are of the same value against constants, we might
4246 be able to merge them. */
4247 if (operand_equal_p (op1a, op2a, 0)
4248 && TREE_CODE (op1b) == INTEGER_CST
4249 && TREE_CODE (op2b) == INTEGER_CST)
4250 {
4251 int cmp = tree_int_cst_compare (op1b, op2b);
4252
4253 /* If we have (op1a != op1b), we should either be able to
4254 return that or TRUE, depending on whether the constant op1b
4255 also satisfies the other comparison against op2b. */
4256 if (code1 == NE_EXPR)
4257 {
4258 bool done = true;
4259 bool val;
4260 switch (code2)
4261 {
4262 case EQ_EXPR: val = (cmp == 0); break;
4263 case NE_EXPR: val = (cmp != 0); break;
4264 case LT_EXPR: val = (cmp < 0); break;
4265 case GT_EXPR: val = (cmp > 0); break;
4266 case LE_EXPR: val = (cmp <= 0); break;
4267 case GE_EXPR: val = (cmp >= 0); break;
4268 default: done = false;
4269 }
4270 if (done)
4271 {
4272 if (val)
4273 return boolean_true_node;
4274 else
4275 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4276 }
4277 }
4278 /* Likewise if the second comparison is a != comparison. */
4279 else if (code2 == NE_EXPR)
4280 {
4281 bool done = true;
4282 bool val;
4283 switch (code1)
4284 {
4285 case EQ_EXPR: val = (cmp == 0); break;
4286 case NE_EXPR: val = (cmp != 0); break;
4287 case LT_EXPR: val = (cmp > 0); break;
4288 case GT_EXPR: val = (cmp < 0); break;
4289 case LE_EXPR: val = (cmp >= 0); break;
4290 case GE_EXPR: val = (cmp <= 0); break;
4291 default: done = false;
4292 }
4293 if (done)
4294 {
4295 if (val)
4296 return boolean_true_node;
4297 else
4298 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4299 }
4300 }
4301
4302 /* See if an equality test is redundant with the other comparison. */
4303 else if (code1 == EQ_EXPR)
4304 {
4305 bool val;
4306 switch (code2)
4307 {
4308 case EQ_EXPR: val = (cmp == 0); break;
4309 case NE_EXPR: val = (cmp != 0); break;
4310 case LT_EXPR: val = (cmp < 0); break;
4311 case GT_EXPR: val = (cmp > 0); break;
4312 case LE_EXPR: val = (cmp <= 0); break;
4313 case GE_EXPR: val = (cmp >= 0); break;
4314 default:
4315 val = false;
4316 }
4317 if (val)
4318 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4319 }
4320 else if (code2 == EQ_EXPR)
4321 {
4322 bool val;
4323 switch (code1)
4324 {
4325 case EQ_EXPR: val = (cmp == 0); break;
4326 case NE_EXPR: val = (cmp != 0); break;
4327 case LT_EXPR: val = (cmp > 0); break;
4328 case GT_EXPR: val = (cmp < 0); break;
4329 case LE_EXPR: val = (cmp >= 0); break;
4330 case GE_EXPR: val = (cmp <= 0); break;
4331 default:
4332 val = false;
4333 }
4334 if (val)
4335 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4336 }
4337
4338 /* Chose the less restrictive of two < or <= comparisons. */
4339 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4340 && (code2 == LT_EXPR || code2 == LE_EXPR))
4341 {
4342 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4343 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4344 else
4345 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4346 }
4347
4348 /* Likewise chose the less restrictive of two > or >= comparisons. */
4349 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4350 && (code2 == GT_EXPR || code2 == GE_EXPR))
4351 {
4352 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4353 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4354 else
4355 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4356 }
4357
4358 /* Check for singleton ranges. */
4359 else if (cmp == 0
4360 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4361 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4362 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4363
4364 /* Check for less/greater pairs that don't restrict the range at all. */
4365 else if (cmp >= 0
4366 && (code1 == LT_EXPR || code1 == LE_EXPR)
4367 && (code2 == GT_EXPR || code2 == GE_EXPR))
4368 return boolean_true_node;
4369 else if (cmp <= 0
4370 && (code1 == GT_EXPR || code1 == GE_EXPR)
4371 && (code2 == LT_EXPR || code2 == LE_EXPR))
4372 return boolean_true_node;
4373 }
4374
4375 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4376 NAME's definition is a truth value. See if there are any simplifications
4377 that can be done against the NAME's definition. */
4378 if (TREE_CODE (op1a) == SSA_NAME
4379 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4380 && (integer_zerop (op1b) || integer_onep (op1b)))
4381 {
4382 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4383 || (code1 == NE_EXPR && integer_onep (op1b)));
4384 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4385 switch (gimple_code (stmt))
4386 {
4387 case GIMPLE_ASSIGN:
4388 /* Try to simplify by copy-propagating the definition. */
4389 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4390
4391 case GIMPLE_PHI:
4392 /* If every argument to the PHI produces the same result when
4393 ORed with the second comparison, we win.
4394 Do not do this unless the type is bool since we need a bool
4395 result here anyway. */
4396 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4397 {
4398 tree result = NULL_TREE;
4399 unsigned i;
4400 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4401 {
4402 tree arg = gimple_phi_arg_def (stmt, i);
4403
4404 /* If this PHI has itself as an argument, ignore it.
4405 If all the other args produce the same result,
4406 we're still OK. */
4407 if (arg == gimple_phi_result (stmt))
4408 continue;
4409 else if (TREE_CODE (arg) == INTEGER_CST)
4410 {
4411 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4412 {
4413 if (!result)
4414 result = boolean_true_node;
4415 else if (!integer_onep (result))
4416 return NULL_TREE;
4417 }
4418 else if (!result)
4419 result = fold_build2 (code2, boolean_type_node,
4420 op2a, op2b);
4421 else if (!same_bool_comparison_p (result,
4422 code2, op2a, op2b))
4423 return NULL_TREE;
4424 }
ae4330d7 4425 else if (TREE_CODE (arg) == SSA_NAME
4426 && !SSA_NAME_IS_DEFAULT_DEF (arg))
c82d157a 4427 {
8a245b9d 4428 tree temp;
4429 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4430 /* In simple cases we can look through PHI nodes,
4431 but we have to be careful with loops.
4432 See PR49073. */
4433 if (! dom_info_available_p (CDI_DOMINATORS)
4434 || gimple_bb (def_stmt) == gimple_bb (stmt)
4435 || dominated_by_p (CDI_DOMINATORS,
4436 gimple_bb (def_stmt),
4437 gimple_bb (stmt)))
4438 return NULL_TREE;
4439 temp = or_var_with_comparison (arg, invert, code2,
4440 op2a, op2b);
c82d157a 4441 if (!temp)
4442 return NULL_TREE;
4443 else if (!result)
4444 result = temp;
4445 else if (!same_bool_result_p (result, temp))
4446 return NULL_TREE;
4447 }
4448 else
4449 return NULL_TREE;
4450 }
4451 return result;
4452 }
4453
4454 default:
4455 break;
4456 }
4457 }
4458 return NULL_TREE;
4459}
4460
4461/* Try to simplify the OR of two comparisons, specified by
4462 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4463 If this can be simplified to a single expression (without requiring
4464 introducing more SSA variables to hold intermediate values),
4465 return the resulting tree. Otherwise return NULL_TREE.
4466 If the result expression is non-null, it has boolean type. */
4467
4468tree
4469maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4470 enum tree_code code2, tree op2a, tree op2b)
4471{
4472 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4473 if (t)
4474 return t;
4475 else
4476 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4477}
1d0b727d 4478
4479
4480/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4481
4482 Either NULL_TREE, a simplified but non-constant or a constant
4483 is returned.
4484
4485 ??? This should go into a gimple-fold-inline.h file to be eventually
4486 privatized with the single valueize function used in the various TUs
4487 to avoid the indirect function call overhead. */
4488
4489tree
4e1b3545 4490gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4491 tree (*gvalueize) (tree))
1d0b727d 4492{
b768d5ee 4493 code_helper rcode;
4494 tree ops[3] = {};
4495 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4496 edges if there are intermediate VARYING defs. For this reason
4497 do not follow SSA edges here even though SCCVN can technically
4498 just deal fine with that. */
4e1b3545 4499 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
b768d5ee 4500 && rcode.is_tree_code ()
4501 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4502 || ((tree_code) rcode) == ADDR_EXPR)
4503 && is_gimple_val (ops[0]))
4504 {
4505 tree res = ops[0];
4506 if (dump_file && dump_flags & TDF_DETAILS)
4507 {
4508 fprintf (dump_file, "Match-and-simplified ");
4509 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4510 fprintf (dump_file, " to ");
4511 print_generic_expr (dump_file, res, 0);
4512 fprintf (dump_file, "\n");
4513 }
4514 return res;
4515 }
4516
1d0b727d 4517 location_t loc = gimple_location (stmt);
4518 switch (gimple_code (stmt))
4519 {
4520 case GIMPLE_ASSIGN:
4521 {
4522 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4523
4524 switch (get_gimple_rhs_class (subcode))
4525 {
4526 case GIMPLE_SINGLE_RHS:
4527 {
4528 tree rhs = gimple_assign_rhs1 (stmt);
4529 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4530
4531 if (TREE_CODE (rhs) == SSA_NAME)
4532 {
4533 /* If the RHS is an SSA_NAME, return its known constant value,
4534 if any. */
4535 return (*valueize) (rhs);
4536 }
4537 /* Handle propagating invariant addresses into address
4538 operations. */
4539 else if (TREE_CODE (rhs) == ADDR_EXPR
4540 && !is_gimple_min_invariant (rhs))
4541 {
03231f32 4542 HOST_WIDE_INT offset = 0;
1d0b727d 4543 tree base;
4544 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4545 &offset,
4546 valueize);
4547 if (base
4548 && (CONSTANT_CLASS_P (base)
4549 || decl_address_invariant_p (base)))
4550 return build_invariant_address (TREE_TYPE (rhs),
4551 base, offset);
4552 }
4553 else if (TREE_CODE (rhs) == CONSTRUCTOR
4554 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4555 && (CONSTRUCTOR_NELTS (rhs)
4556 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4557 {
4558 unsigned i;
fadf62f4 4559 tree val, *vec;
1d0b727d 4560
fadf62f4 4561 vec = XALLOCAVEC (tree,
4562 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
1d0b727d 4563 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4564 {
4565 val = (*valueize) (val);
4566 if (TREE_CODE (val) == INTEGER_CST
4567 || TREE_CODE (val) == REAL_CST
4568 || TREE_CODE (val) == FIXED_CST)
fadf62f4 4569 vec[i] = val;
1d0b727d 4570 else
4571 return NULL_TREE;
4572 }
4573
fadf62f4 4574 return build_vector (TREE_TYPE (rhs), vec);
1d0b727d 4575 }
0329fcdb 4576 if (subcode == OBJ_TYPE_REF)
4577 {
4578 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4579 /* If callee is constant, we can fold away the wrapper. */
4580 if (is_gimple_min_invariant (val))
4581 return val;
4582 }
1d0b727d 4583
4584 if (kind == tcc_reference)
4585 {
4586 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
4587 || TREE_CODE (rhs) == REALPART_EXPR
4588 || TREE_CODE (rhs) == IMAGPART_EXPR)
4589 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4590 {
4591 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4592 return fold_unary_loc (EXPR_LOCATION (rhs),
4593 TREE_CODE (rhs),
4594 TREE_TYPE (rhs), val);
4595 }
4596 else if (TREE_CODE (rhs) == BIT_FIELD_REF
4597 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4598 {
4599 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4600 return fold_ternary_loc (EXPR_LOCATION (rhs),
4601 TREE_CODE (rhs),
4602 TREE_TYPE (rhs), val,
4603 TREE_OPERAND (rhs, 1),
4604 TREE_OPERAND (rhs, 2));
4605 }
4606 else if (TREE_CODE (rhs) == MEM_REF
4607 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
4608 {
4609 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
4610 if (TREE_CODE (val) == ADDR_EXPR
4611 && is_gimple_min_invariant (val))
4612 {
4613 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
4614 unshare_expr (val),
4615 TREE_OPERAND (rhs, 1));
4616 if (tem)
4617 rhs = tem;
4618 }
4619 }
4620 return fold_const_aggregate_ref_1 (rhs, valueize);
4621 }
4622 else if (kind == tcc_declaration)
4623 return get_symbol_constant_value (rhs);
4624 return rhs;
4625 }
4626
4627 case GIMPLE_UNARY_RHS:
2ad7e37a 4628 return NULL_TREE;
1d0b727d 4629
4630 case GIMPLE_BINARY_RHS:
4631 {
4632 /* Handle binary operators that can appear in GIMPLE form. */
4633 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4634 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4635
4636 /* Translate &x + CST into an invariant form suitable for
4637 further propagation. */
4638 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
4639 && TREE_CODE (op0) == ADDR_EXPR
4640 && TREE_CODE (op1) == INTEGER_CST)
4641 {
4642 tree off = fold_convert (ptr_type_node, op1);
3048a5c0 4643 return build_fold_addr_expr_loc
4644 (loc,
4645 fold_build2 (MEM_REF,
1d0b727d 4646 TREE_TYPE (TREE_TYPE (op0)),
4647 unshare_expr (op0), off));
4648 }
4649
4650 return fold_binary_loc (loc, subcode,
4651 gimple_expr_type (stmt), op0, op1);
4652 }
4653
4654 case GIMPLE_TERNARY_RHS:
4655 {
4656 /* Handle ternary operators that can appear in GIMPLE form. */
4657 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
4658 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4659 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
4660
ce993cc2 4661 /* Fold embedded expressions in ternary codes. */
4662 if ((subcode == COND_EXPR
4663 || subcode == VEC_COND_EXPR)
4664 && COMPARISON_CLASS_P (op0))
4665 {
4666 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
4667 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
4668 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
4669 TREE_TYPE (op0), op00, op01);
4670 if (tem)
4671 op0 = tem;
4672 }
4673
1d0b727d 4674 return fold_ternary_loc (loc, subcode,
4675 gimple_expr_type (stmt), op0, op1, op2);
4676 }
4677
4678 default:
4679 gcc_unreachable ();
4680 }
4681 }
4682
4683 case GIMPLE_CALL:
4684 {
fb049fba 4685 tree fn;
4686
4687 if (gimple_call_internal_p (stmt))
137559b2 4688 {
4689 enum tree_code subcode = ERROR_MARK;
4690 switch (gimple_call_internal_fn (stmt))
4691 {
4692 case IFN_UBSAN_CHECK_ADD:
4693 subcode = PLUS_EXPR;
4694 break;
4695 case IFN_UBSAN_CHECK_SUB:
4696 subcode = MINUS_EXPR;
4697 break;
4698 case IFN_UBSAN_CHECK_MUL:
4699 subcode = MULT_EXPR;
4700 break;
4701 default:
4702 return NULL_TREE;
4703 }
ce8e6661 4704 tree arg0 = gimple_call_arg (stmt, 0);
4705 tree arg1 = gimple_call_arg (stmt, 1);
4706 tree op0 = (*valueize) (arg0);
4707 tree op1 = (*valueize) (arg1);
137559b2 4708
4709 if (TREE_CODE (op0) != INTEGER_CST
4710 || TREE_CODE (op1) != INTEGER_CST)
ce8e6661 4711 {
4712 switch (subcode)
4713 {
4714 case MULT_EXPR:
4715 /* x * 0 = 0 * x = 0 without overflow. */
4716 if (integer_zerop (op0) || integer_zerop (op1))
4717 return build_zero_cst (TREE_TYPE (arg0));
4718 break;
4719 case MINUS_EXPR:
4720 /* y - y = 0 without overflow. */
4721 if (operand_equal_p (op0, op1, 0))
4722 return build_zero_cst (TREE_TYPE (arg0));
4723 break;
4724 default:
4725 break;
4726 }
4727 }
4728 tree res
4729 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
137559b2 4730 if (res
4731 && TREE_CODE (res) == INTEGER_CST
4732 && !TREE_OVERFLOW (res))
4733 return res;
4734 return NULL_TREE;
4735 }
fb049fba 4736
4737 fn = (*valueize) (gimple_call_fn (stmt));
1d0b727d 4738 if (TREE_CODE (fn) == ADDR_EXPR
4739 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
8ded4352 4740 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4741 && gimple_builtin_call_types_compatible_p (stmt,
4742 TREE_OPERAND (fn, 0)))
1d0b727d 4743 {
4744 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4745 tree call, retval;
4746 unsigned i;
4747 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4748 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4749 call = build_call_array_loc (loc,
4750 gimple_call_return_type (stmt),
4751 fn, gimple_call_num_args (stmt), args);
4752 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4753 if (retval)
8ded4352 4754 {
4755 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4756 STRIP_NOPS (retval);
4757 retval = fold_convert (gimple_call_return_type (stmt), retval);
4758 }
1d0b727d 4759 return retval;
4760 }
4761 return NULL_TREE;
4762 }
4763
4764 default:
4765 return NULL_TREE;
4766 }
4767}
4768
4769/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4770 Returns NULL_TREE if folding to a constant is not possible, otherwise
4771 returns a constant according to is_gimple_min_invariant. */
4772
4773tree
4774gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4775{
4776 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4777 if (res && is_gimple_min_invariant (res))
4778 return res;
4779 return NULL_TREE;
4780}
4781
4782
4783/* The following set of functions are supposed to fold references using
4784 their constant initializers. */
4785
4786static tree fold_ctor_reference (tree type, tree ctor,
4787 unsigned HOST_WIDE_INT offset,
a65b88bf 4788 unsigned HOST_WIDE_INT size, tree);
1d0b727d 4789
4790/* See if we can find constructor defining value of BASE.
4791 When we know the consructor with constant offset (such as
4792 base is array[40] and we do know constructor of array), then
4793 BIT_OFFSET is adjusted accordingly.
4794
4795 As a special case, return error_mark_node when constructor
4796 is not explicitly available, but it is known to be zero
4797 such as 'static const int a;'. */
4798static tree
4799get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4800 tree (*valueize)(tree))
4801{
4802 HOST_WIDE_INT bit_offset2, size, max_size;
4803 if (TREE_CODE (base) == MEM_REF)
4804 {
4805 if (!integer_zerop (TREE_OPERAND (base, 1)))
4806 {
e913b5cd 4807 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
1d0b727d 4808 return NULL_TREE;
e913b5cd 4809 *bit_offset += (mem_ref_offset (base).to_short_addr ()
1d0b727d 4810 * BITS_PER_UNIT);
4811 }
4812
4813 if (valueize
4814 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4815 base = valueize (TREE_OPERAND (base, 0));
4816 if (!base || TREE_CODE (base) != ADDR_EXPR)
4817 return NULL_TREE;
4818 base = TREE_OPERAND (base, 0);
4819 }
4820
4821 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4822 DECL_INITIAL. If BASE is a nested reference into another
4823 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4824 the inner reference. */
4825 switch (TREE_CODE (base))
4826 {
4827 case VAR_DECL:
1d0b727d 4828 case CONST_DECL:
df8d3e89 4829 {
4830 tree init = ctor_for_folding (base);
4831
a04e8d62 4832 /* Our semantic is exact opposite of ctor_for_folding;
df8d3e89 4833 NULL means unknown, while error_mark_node is 0. */
4834 if (init == error_mark_node)
4835 return NULL_TREE;
4836 if (!init)
4837 return error_mark_node;
4838 return init;
4839 }
1d0b727d 4840
4841 case ARRAY_REF:
4842 case COMPONENT_REF:
4843 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4844 if (max_size == -1 || size != max_size)
4845 return NULL_TREE;
4846 *bit_offset += bit_offset2;
4847 return get_base_constructor (base, bit_offset, valueize);
4848
4849 case STRING_CST:
4850 case CONSTRUCTOR:
4851 return base;
4852
4853 default:
4854 return NULL_TREE;
4855 }
4856}
4857
1d0b727d 4858/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4859 SIZE to the memory at bit OFFSET. */
4860
4861static tree
4862fold_array_ctor_reference (tree type, tree ctor,
4863 unsigned HOST_WIDE_INT offset,
a65b88bf 4864 unsigned HOST_WIDE_INT size,
4865 tree from_decl)
1d0b727d 4866{
4867 unsigned HOST_WIDE_INT cnt;
4868 tree cfield, cval;
5de9d3ed 4869 offset_int low_bound;
4870 offset_int elt_size;
4871 offset_int index, max_index;
4872 offset_int access_index;
96c3acd0 4873 tree domain_type = NULL_TREE, index_type = NULL_TREE;
1d0b727d 4874 HOST_WIDE_INT inner_offset;
4875
4876 /* Compute low bound and elt size. */
415b0903 4877 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4878 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
1d0b727d 4879 if (domain_type && TYPE_MIN_VALUE (domain_type))
4880 {
4881 /* Static constructors for variably sized objects makes no sense. */
4882 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
96c3acd0 4883 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5de9d3ed 4884 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
1d0b727d 4885 }
4886 else
e913b5cd 4887 low_bound = 0;
1d0b727d 4888 /* Static constructors for variably sized objects makes no sense. */
9af5ce0c 4889 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
1d0b727d 4890 == INTEGER_CST);
5de9d3ed 4891 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
1d0b727d 4892
4893 /* We can handle only constantly sized accesses that are known to not
4894 be larger than size of array element. */
4895 if (!TYPE_SIZE_UNIT (type)
4896 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
fe34354e 4897 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4898 || elt_size == 0)
1d0b727d 4899 return NULL_TREE;
4900
4901 /* Compute the array index we look for. */
5de9d3ed 4902 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
796b6678 4903 elt_size);
cf8f0e63 4904 access_index += low_bound;
96c3acd0 4905 if (index_type)
796b6678 4906 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4907 TYPE_SIGN (index_type));
1d0b727d 4908
4909 /* And offset within the access. */
cf8f0e63 4910 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
1d0b727d 4911
4912 /* See if the array field is large enough to span whole access. We do not
4913 care to fold accesses spanning multiple array indexes. */
cf8f0e63 4914 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
1d0b727d 4915 return NULL_TREE;
4916
e913b5cd 4917 index = low_bound - 1;
96c3acd0 4918 if (index_type)
796b6678 4919 index = wi::ext (index, TYPE_PRECISION (index_type),
4920 TYPE_SIGN (index_type));
96c3acd0 4921
1d0b727d 4922 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4923 {
4924 /* Array constructor might explicitely set index, or specify range
4925 or leave index NULL meaning that it is next index after previous
4926 one. */
4927 if (cfield)
4928 {
4929 if (TREE_CODE (cfield) == INTEGER_CST)
5de9d3ed 4930 max_index = index = wi::to_offset (cfield);
1d0b727d 4931 else
4932 {
4933 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5de9d3ed 4934 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4935 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
1d0b727d 4936 }
4937 }
4938 else
96c3acd0 4939 {
e913b5cd 4940 index += 1;
96c3acd0 4941 if (index_type)
796b6678 4942 index = wi::ext (index, TYPE_PRECISION (index_type),
4943 TYPE_SIGN (index_type));
96c3acd0 4944 max_index = index;
4945 }
1d0b727d 4946
4947 /* Do we have match? */
796b6678 4948 if (wi::cmpu (access_index, index) >= 0
4949 && wi::cmpu (access_index, max_index) <= 0)
a65b88bf 4950 return fold_ctor_reference (type, cval, inner_offset, size,
4951 from_decl);
1d0b727d 4952 }
4953 /* When memory is not explicitely mentioned in constructor,
4954 it is 0 (or out of range). */
4955 return build_zero_cst (type);
4956}
4957
4958/* CTOR is CONSTRUCTOR of an aggregate or vector.
4959 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4960
4961static tree
4962fold_nonarray_ctor_reference (tree type, tree ctor,
4963 unsigned HOST_WIDE_INT offset,
a65b88bf 4964 unsigned HOST_WIDE_INT size,
4965 tree from_decl)
1d0b727d 4966{
4967 unsigned HOST_WIDE_INT cnt;
4968 tree cfield, cval;
4969
4970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4971 cval)
4972 {
4973 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4974 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4975 tree field_size = DECL_SIZE (cfield);
5de9d3ed 4976 offset_int bitoffset;
5de9d3ed 4977 offset_int bitoffset_end, access_end;
1d0b727d 4978
4979 /* Variable sized objects in static constructors makes no sense,
4980 but field_size can be NULL for flexible array members. */
4981 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4982 && TREE_CODE (byte_offset) == INTEGER_CST
4983 && (field_size != NULL_TREE
4984 ? TREE_CODE (field_size) == INTEGER_CST
4985 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4986
4987 /* Compute bit offset of the field. */
5de9d3ed 4988 bitoffset = (wi::to_offset (field_offset)
2315e038 4989 + wi::lshift (wi::to_offset (byte_offset),
4990 LOG2_BITS_PER_UNIT));
1d0b727d 4991 /* Compute bit offset where the field ends. */
4992 if (field_size != NULL_TREE)
5de9d3ed 4993 bitoffset_end = bitoffset + wi::to_offset (field_size);
1d0b727d 4994 else
e913b5cd 4995 bitoffset_end = 0;
1d0b727d 4996
5de9d3ed 4997 access_end = offset_int (offset) + size;
40a19864 4998
4999 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5000 [BITOFFSET, BITOFFSET_END)? */
796b6678 5001 if (wi::cmps (access_end, bitoffset) > 0
1d0b727d 5002 && (field_size == NULL_TREE
796b6678 5003 || wi::lts_p (offset, bitoffset_end)))
1d0b727d 5004 {
5de9d3ed 5005 offset_int inner_offset = offset_int (offset) - bitoffset;
1d0b727d 5006 /* We do have overlap. Now see if field is large enough to
5007 cover the access. Give up for accesses spanning multiple
5008 fields. */
796b6678 5009 if (wi::cmps (access_end, bitoffset_end) > 0)
1d0b727d 5010 return NULL_TREE;
796b6678 5011 if (wi::lts_p (offset, bitoffset))
40a19864 5012 return NULL_TREE;
1d0b727d 5013 return fold_ctor_reference (type, cval,
cf8f0e63 5014 inner_offset.to_uhwi (), size,
a65b88bf 5015 from_decl);
1d0b727d 5016 }
5017 }
5018 /* When memory is not explicitely mentioned in constructor, it is 0. */
5019 return build_zero_cst (type);
5020}
5021
5022/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5023 to the memory at bit OFFSET. */
5024
5025static tree
5026fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
a65b88bf 5027 unsigned HOST_WIDE_INT size, tree from_decl)
1d0b727d 5028{
5029 tree ret;
5030
5031 /* We found the field with exact match. */
5032 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5033 && !offset)
8f266cd9 5034 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
1d0b727d 5035
5036 /* We are at the end of walk, see if we can view convert the
5037 result. */
5038 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5039 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2eac3ab5 5040 && !compare_tree_int (TYPE_SIZE (type), size)
5041 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
1d0b727d 5042 {
8f266cd9 5043 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
1d0b727d 5044 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5045 if (ret)
5046 STRIP_NOPS (ret);
5047 return ret;
5048 }
4237b215 5049 /* For constants and byte-aligned/sized reads try to go through
5050 native_encode/interpret. */
5051 if (CONSTANT_CLASS_P (ctor)
5052 && BITS_PER_UNIT == 8
5053 && offset % BITS_PER_UNIT == 0
5054 && size % BITS_PER_UNIT == 0
5055 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5056 {
5057 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5058 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5059 offset / BITS_PER_UNIT) > 0)
5060 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5061 }
1d0b727d 5062 if (TREE_CODE (ctor) == CONSTRUCTOR)
5063 {
5064
415b0903 5065 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5066 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
a65b88bf 5067 return fold_array_ctor_reference (type, ctor, offset, size,
5068 from_decl);
1d0b727d 5069 else
a65b88bf 5070 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5071 from_decl);
1d0b727d 5072 }
5073
5074 return NULL_TREE;
5075}
5076
5077/* Return the tree representing the element referenced by T if T is an
5078 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5079 names using VALUEIZE. Return NULL_TREE otherwise. */
5080
5081tree
5082fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5083{
5084 tree ctor, idx, base;
5085 HOST_WIDE_INT offset, size, max_size;
5086 tree tem;
5087
62df0ff9 5088 if (TREE_THIS_VOLATILE (t))
5089 return NULL_TREE;
5090
1d0b727d 5091 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5092 return get_symbol_constant_value (t);
5093
5094 tem = fold_read_from_constant_string (t);
5095 if (tem)
5096 return tem;
5097
5098 switch (TREE_CODE (t))
5099 {
5100 case ARRAY_REF:
5101 case ARRAY_RANGE_REF:
5102 /* Constant indexes are handled well by get_base_constructor.
5103 Only special case variable offsets.
5104 FIXME: This code can't handle nested references with variable indexes
5105 (they will be handled only by iteration of ccp). Perhaps we can bring
5106 get_ref_base_and_extent here and make it use a valueize callback. */
5107 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5108 && valueize
5109 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
96c3acd0 5110 && TREE_CODE (idx) == INTEGER_CST)
1d0b727d 5111 {
936a705d 5112 tree low_bound, unit_size;
1d0b727d 5113
5114 /* If the resulting bit-offset is constant, track it. */
936a705d 5115 if ((low_bound = array_ref_low_bound (t),
5116 TREE_CODE (low_bound) == INTEGER_CST)
5117 && (unit_size = array_ref_element_size (t),
5118 tree_fits_uhwi_p (unit_size)))
1d0b727d 5119 {
5de9d3ed 5120 offset_int woffset
5121 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
796b6678 5122 TYPE_PRECISION (TREE_TYPE (idx)));
ddb1be65 5123
796b6678 5124 if (wi::fits_shwi_p (woffset))
e913b5cd 5125 {
5126 offset = woffset.to_shwi ();
5127 /* TODO: This code seems wrong, multiply then check
5128 to see if it fits. */
fe5ad926 5129 offset *= tree_to_uhwi (unit_size);
e913b5cd 5130 offset *= BITS_PER_UNIT;
ddb1be65 5131
e913b5cd 5132 base = TREE_OPERAND (t, 0);
5133 ctor = get_base_constructor (base, &offset, valueize);
5134 /* Empty constructor. Always fold to 0. */
5135 if (ctor == error_mark_node)
5136 return build_zero_cst (TREE_TYPE (t));
5137 /* Out of bound array access. Value is undefined,
5138 but don't fold. */
5139 if (offset < 0)
5140 return NULL_TREE;
5141 /* We can not determine ctor. */
5142 if (!ctor)
5143 return NULL_TREE;
5144 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5145 tree_to_uhwi (unit_size)
5146 * BITS_PER_UNIT,
5147 base);
5148 }
1d0b727d 5149 }
5150 }
5151 /* Fallthru. */
5152
5153 case COMPONENT_REF:
5154 case BIT_FIELD_REF:
5155 case TARGET_MEM_REF:
5156 case MEM_REF:
5157 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5158 ctor = get_base_constructor (base, &offset, valueize);
5159
5160 /* Empty constructor. Always fold to 0. */
5161 if (ctor == error_mark_node)
5162 return build_zero_cst (TREE_TYPE (t));
5163 /* We do not know precise address. */
5164 if (max_size == -1 || max_size != size)
5165 return NULL_TREE;
5166 /* We can not determine ctor. */
5167 if (!ctor)
5168 return NULL_TREE;
5169
5170 /* Out of bound array access. Value is undefined, but don't fold. */
5171 if (offset < 0)
5172 return NULL_TREE;
5173
a65b88bf 5174 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5175 base);
1d0b727d 5176
5177 case REALPART_EXPR:
5178 case IMAGPART_EXPR:
5179 {
5180 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5181 if (c && TREE_CODE (c) == COMPLEX_CST)
5182 return fold_build1_loc (EXPR_LOCATION (t),
5183 TREE_CODE (t), TREE_TYPE (t), c);
5184 break;
5185 }
5186
5187 default:
5188 break;
5189 }
5190
5191 return NULL_TREE;
5192}
5193
5194tree
5195fold_const_aggregate_ref (tree t)
5196{
5197 return fold_const_aggregate_ref_1 (t, NULL);
5198}
0b7ad900 5199
02636da3 5200/* Lookup virtual method with index TOKEN in a virtual table V
857c5a0b 5201 at OFFSET.
5202 Set CAN_REFER if non-NULL to false if method
5203 is not referable or if the virtual table is ill-formed (such as rewriten
5204 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
d4e80e2b 5205
5206tree
02636da3 5207gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5208 tree v,
857c5a0b 5209 unsigned HOST_WIDE_INT offset,
5210 bool *can_refer)
d4e80e2b 5211{
02636da3 5212 tree vtable = v, init, fn;
5213 unsigned HOST_WIDE_INT size;
57b16855 5214 unsigned HOST_WIDE_INT elt_size, access_index;
5215 tree domain_type;
d4e80e2b 5216
857c5a0b 5217 if (can_refer)
5218 *can_refer = true;
5219
6750de5f 5220 /* First of all double check we have virtual table. */
d4e80e2b 5221 if (TREE_CODE (v) != VAR_DECL
9b3e908f 5222 || !DECL_VIRTUAL_P (v))
857c5a0b 5223 {
5224 gcc_assert (in_lto_p);
5225 /* Pass down that we lost track of the target. */
5226 if (can_refer)
5227 *can_refer = false;
5228 return NULL_TREE;
5229 }
6750de5f 5230
9b3e908f 5231 init = ctor_for_folding (v);
5232
6750de5f 5233 /* The virtual tables should always be born with constructors
9b3e908f 5234 and we always should assume that they are avaialble for
5235 folding. At the moment we do not stream them in all cases,
5236 but it should never happen that ctor seem unreachable. */
5237 gcc_assert (init);
5238 if (init == error_mark_node)
5239 {
5240 gcc_assert (in_lto_p);
857c5a0b 5241 /* Pass down that we lost track of the target. */
5242 if (can_refer)
5243 *can_refer = false;
9b3e908f 5244 return NULL_TREE;
5245 }
d4e80e2b 5246 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
e913b5cd 5247 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
02636da3 5248 offset *= BITS_PER_UNIT;
d4e80e2b 5249 offset += token * size;
6750de5f 5250
57b16855 5251 /* Lookup the value in the constructor that is assumed to be array.
5252 This is equivalent to
5253 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5254 offset, size, NULL);
5255 but in a constant time. We expect that frontend produced a simple
5256 array without indexed initializers. */
5257
5258 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5259 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5260 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5261 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5262
5263 access_index = offset / BITS_PER_UNIT / elt_size;
5264 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5265
5266 /* This code makes an assumption that there are no
5267 indexed fileds produced by C++ FE, so we can directly index the array. */
5268 if (access_index < CONSTRUCTOR_NELTS (init))
5269 {
5270 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5271 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5272 STRIP_NOPS (fn);
5273 }
5274 else
5275 fn = NULL;
6750de5f 5276
5277 /* For type inconsistent program we may end up looking up virtual method
5278 in virtual table that does not contain TOKEN entries. We may overrun
5279 the virtual table and pick up a constant or RTTI info pointer.
5280 In any case the call is undefined. */
5281 if (!fn
5282 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5283 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5284 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5285 else
5286 {
5287 fn = TREE_OPERAND (fn, 0);
5288
5289 /* When cgraph node is missing and function is not public, we cannot
5290 devirtualize. This can happen in WHOPR when the actual method
5291 ends up in other partition, because we found devirtualization
5292 possibility too late. */
5293 if (!can_refer_decl_in_current_unit_p (fn, vtable))
857c5a0b 5294 {
5295 if (can_refer)
5296 {
5297 *can_refer = false;
5298 return fn;
5299 }
5300 return NULL_TREE;
5301 }
6750de5f 5302 }
d4e80e2b 5303
e25d4891 5304 /* Make sure we create a cgraph node for functions we'll reference.
5305 They can be non-existent if the reference comes from an entry
5306 of an external vtable for example. */
415d1b9a 5307 cgraph_node::get_create (fn);
e25d4891 5308
d4e80e2b 5309 return fn;
5310}
5311
02636da3 5312/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5313 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5314 KNOWN_BINFO carries the binfo describing the true type of
857c5a0b 5315 OBJ_TYPE_REF_OBJECT(REF).
5316 Set CAN_REFER if non-NULL to false if method
5317 is not referable or if the virtual table is ill-formed (such as rewriten
5318 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
02636da3 5319
5320tree
857c5a0b 5321gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5322 bool *can_refer)
02636da3 5323{
5324 unsigned HOST_WIDE_INT offset;
5325 tree v;
5326
5327 v = BINFO_VTABLE (known_binfo);
5328 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5329 if (!v)
5330 return NULL_TREE;
5331
5332 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
857c5a0b 5333 {
5334 if (can_refer)
5335 *can_refer = false;
5336 return NULL_TREE;
5337 }
5338 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
02636da3 5339}
5340
0b7ad900 5341/* Return true iff VAL is a gimple expression that is known to be
5342 non-negative. Restricted to floating-point inputs. */
5343
5344bool
5345gimple_val_nonnegative_real_p (tree val)
5346{
5347 gimple def_stmt;
5348
5349 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5350
5351 /* Use existing logic for non-gimple trees. */
5352 if (tree_expr_nonnegative_p (val))
5353 return true;
5354
5355 if (TREE_CODE (val) != SSA_NAME)
5356 return false;
5357
5358 /* Currently we look only at the immediately defining statement
5359 to make this determination, since recursion on defining
5360 statements of operands can lead to quadratic behavior in the
5361 worst case. This is expected to catch almost all occurrences
5362 in practice. It would be possible to implement limited-depth
5363 recursion if important cases are lost. Alternatively, passes
5364 that need this information (such as the pow/powi lowering code
5365 in the cse_sincos pass) could be revised to provide it through
5366 dataflow propagation. */
5367
5368 def_stmt = SSA_NAME_DEF_STMT (val);
5369
5370 if (is_gimple_assign (def_stmt))
5371 {
5372 tree op0, op1;
5373
5374 /* See fold-const.c:tree_expr_nonnegative_p for additional
5375 cases that could be handled with recursion. */
5376
5377 switch (gimple_assign_rhs_code (def_stmt))
5378 {
5379 case ABS_EXPR:
5380 /* Always true for floating-point operands. */
5381 return true;
5382
5383 case MULT_EXPR:
5384 /* True if the two operands are identical (since we are
5385 restricted to floating-point inputs). */
5386 op0 = gimple_assign_rhs1 (def_stmt);
5387 op1 = gimple_assign_rhs2 (def_stmt);
5388
5389 if (op0 == op1
5390 || operand_equal_p (op0, op1, 0))
5391 return true;
5392
5393 default:
5394 return false;
5395 }
5396 }
5397 else if (is_gimple_call (def_stmt))
5398 {
5399 tree fndecl = gimple_call_fndecl (def_stmt);
5400 if (fndecl
5401 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5402 {
5403 tree arg1;
5404
5405 switch (DECL_FUNCTION_CODE (fndecl))
5406 {
5407 CASE_FLT_FN (BUILT_IN_ACOS):
5408 CASE_FLT_FN (BUILT_IN_ACOSH):
5409 CASE_FLT_FN (BUILT_IN_CABS):
5410 CASE_FLT_FN (BUILT_IN_COSH):
5411 CASE_FLT_FN (BUILT_IN_ERFC):
5412 CASE_FLT_FN (BUILT_IN_EXP):
5413 CASE_FLT_FN (BUILT_IN_EXP10):
5414 CASE_FLT_FN (BUILT_IN_EXP2):
5415 CASE_FLT_FN (BUILT_IN_FABS):
5416 CASE_FLT_FN (BUILT_IN_FDIM):
5417 CASE_FLT_FN (BUILT_IN_HYPOT):
5418 CASE_FLT_FN (BUILT_IN_POW10):
5419 return true;
5420
5421 CASE_FLT_FN (BUILT_IN_SQRT):
5422 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5423 nonnegative inputs. */
5424 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
5425 return true;
5426
5427 break;
5428
5429 CASE_FLT_FN (BUILT_IN_POWI):
5430 /* True if the second argument is an even integer. */
5431 arg1 = gimple_call_arg (def_stmt, 1);
5432
5433 if (TREE_CODE (arg1) == INTEGER_CST
f9ae6f95 5434 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
0b7ad900 5435 return true;
5436
5437 break;
5438
5439 CASE_FLT_FN (BUILT_IN_POW):
5440 /* True if the second argument is an even integer-valued
5441 real. */
5442 arg1 = gimple_call_arg (def_stmt, 1);
5443
5444 if (TREE_CODE (arg1) == REAL_CST)
5445 {
5446 REAL_VALUE_TYPE c;
5447 HOST_WIDE_INT n;
5448
5449 c = TREE_REAL_CST (arg1);
5450 n = real_to_integer (&c);
5451
5452 if ((n & 1) == 0)
5453 {
5454 REAL_VALUE_TYPE cint;
e913b5cd 5455 real_from_integer (&cint, VOIDmode, n, SIGNED);
0b7ad900 5456 if (real_identical (&c, &cint))
5457 return true;
5458 }
5459 }
5460
5461 break;
5462
5463 default:
5464 return false;
5465 }
5466 }
5467 }
5468
5469 return false;
5470}
09c41ee4 5471
5472/* Given a pointer value OP0, return a simplified version of an
5473 indirection through OP0, or NULL_TREE if no simplification is
5474 possible. Note that the resulting type may be different from
5475 the type pointed to in the sense that it is still compatible
5476 from the langhooks point of view. */
5477
5478tree
5479gimple_fold_indirect_ref (tree t)
5480{
5481 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5482 tree sub = t;
5483 tree subtype;
5484
5485 STRIP_NOPS (sub);
5486 subtype = TREE_TYPE (sub);
5487 if (!POINTER_TYPE_P (subtype))
5488 return NULL_TREE;
5489
5490 if (TREE_CODE (sub) == ADDR_EXPR)
5491 {
5492 tree op = TREE_OPERAND (sub, 0);
5493 tree optype = TREE_TYPE (op);
5494 /* *&p => p */
5495 if (useless_type_conversion_p (type, optype))
5496 return op;
5497
5498 /* *(foo *)&fooarray => fooarray[0] */
5499 if (TREE_CODE (optype) == ARRAY_TYPE
5500 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5501 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5502 {
5503 tree type_domain = TYPE_DOMAIN (optype);
5504 tree min_val = size_zero_node;
5505 if (type_domain && TYPE_MIN_VALUE (type_domain))
5506 min_val = TYPE_MIN_VALUE (type_domain);
5507 if (TREE_CODE (min_val) == INTEGER_CST)
5508 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5509 }
5510 /* *(foo *)&complexfoo => __real__ complexfoo */
5511 else if (TREE_CODE (optype) == COMPLEX_TYPE
5512 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5513 return fold_build1 (REALPART_EXPR, type, op);
5514 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5515 else if (TREE_CODE (optype) == VECTOR_TYPE
5516 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5517 {
5518 tree part_width = TYPE_SIZE (type);
5519 tree index = bitsize_int (0);
5520 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5521 }
5522 }
5523
5524 /* *(p + CST) -> ... */
5525 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5526 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5527 {
5528 tree addr = TREE_OPERAND (sub, 0);
5529 tree off = TREE_OPERAND (sub, 1);
5530 tree addrtype;
5531
5532 STRIP_NOPS (addr);
5533 addrtype = TREE_TYPE (addr);
5534
5535 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5536 if (TREE_CODE (addr) == ADDR_EXPR
5537 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5538 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6b409616 5539 && tree_fits_uhwi_p (off))
09c41ee4 5540 {
6b409616 5541 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
09c41ee4 5542 tree part_width = TYPE_SIZE (type);
5543 unsigned HOST_WIDE_INT part_widthi
6b409616 5544 = tree_to_shwi (part_width) / BITS_PER_UNIT;
09c41ee4 5545 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5546 tree index = bitsize_int (indexi);
5547 if (offset / part_widthi
2dda221d 5548 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
09c41ee4 5549 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5550 part_width, index);
5551 }
5552
5553 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5554 if (TREE_CODE (addr) == ADDR_EXPR
5555 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5556 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5557 {
5558 tree size = TYPE_SIZE_UNIT (type);
5559 if (tree_int_cst_equal (size, off))
5560 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5561 }
5562
5563 /* *(p + CST) -> MEM_REF <p, CST>. */
5564 if (TREE_CODE (addr) != ADDR_EXPR
5565 || DECL_P (TREE_OPERAND (addr, 0)))
5566 return fold_build2 (MEM_REF, type,
5567 addr,
6b409616 5568 wide_int_to_tree (ptype, off));
09c41ee4 5569 }
5570
5571 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5572 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5573 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5574 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5575 {
5576 tree type_domain;
5577 tree min_val = size_zero_node;
5578 tree osub = sub;
5579 sub = gimple_fold_indirect_ref (sub);
5580 if (! sub)
5581 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5582 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5583 if (type_domain && TYPE_MIN_VALUE (type_domain))
5584 min_val = TYPE_MIN_VALUE (type_domain);
5585 if (TREE_CODE (min_val) == INTEGER_CST)
5586 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5587 }
5588
5589 return NULL_TREE;
5590}
f6a34e3f 5591
5592/* Return true if CODE is an operation that when operating on signed
5593 integer types involves undefined behavior on overflow and the
5594 operation can be expressed with unsigned arithmetic. */
5595
5596bool
5597arith_code_with_undefined_signed_overflow (tree_code code)
5598{
5599 switch (code)
5600 {
5601 case PLUS_EXPR:
5602 case MINUS_EXPR:
5603 case MULT_EXPR:
5604 case NEGATE_EXPR:
5605 case POINTER_PLUS_EXPR:
5606 return true;
5607 default:
5608 return false;
5609 }
5610}
5611
5612/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5613 operation that can be transformed to unsigned arithmetic by converting
5614 its operand, carrying out the operation in the corresponding unsigned
5615 type and converting the result back to the original type.
5616
5617 Returns a sequence of statements that replace STMT and also contain
5618 a modified form of STMT itself. */
5619
5620gimple_seq
5621rewrite_to_defined_overflow (gimple stmt)
5622{
5623 if (dump_file && (dump_flags & TDF_DETAILS))
5624 {
5625 fprintf (dump_file, "rewriting stmt with undefined signed "
5626 "overflow ");
5627 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5628 }
5629
5630 tree lhs = gimple_assign_lhs (stmt);
5631 tree type = unsigned_type_for (TREE_TYPE (lhs));
5632 gimple_seq stmts = NULL;
5633 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5634 {
5635 gimple_seq stmts2 = NULL;
5636 gimple_set_op (stmt, i,
5637 force_gimple_operand (fold_convert (type,
5638 gimple_op (stmt, i)),
5639 &stmts2, true, NULL_TREE));
5640 gimple_seq_add_seq (&stmts, stmts2);
5641 }
5642 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5643 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5644 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5645 gimple_seq_add_stmt (&stmts, stmt);
806413d2 5646 gimple cvt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
5647 gimple_assign_lhs (stmt));
f6a34e3f 5648 gimple_seq_add_stmt (&stmts, cvt);
5649
5650 return stmts;
5651}
65f73697 5652
2165588a 5653
5654/* Build the expression CODE OP0 of type TYPE with location LOC,
5655 simplifying it first if possible using VALUEIZE if not NULL.
5656 OP0 is expected to be valueized already. Returns the built
5657 expression value and appends statements possibly defining it
5658 to SEQ. */
5659
5660tree
5661gimple_build (gimple_seq *seq, location_t loc,
5662 enum tree_code code, tree type, tree op0,
5663 tree (*valueize)(tree))
5664{
5665 tree res = gimple_simplify (code, type, op0, seq, valueize);
5666 if (!res)
5667 {
5668 if (gimple_in_ssa_p (cfun))
5669 res = make_ssa_name (type, NULL);
5670 else
5671 res = create_tmp_reg (type, NULL);
5672 gimple stmt;
5673 if (code == REALPART_EXPR
5674 || code == IMAGPART_EXPR
5675 || code == VIEW_CONVERT_EXPR)
5676 stmt = gimple_build_assign_with_ops (code, res,
806413d2 5677 build1 (code, type, op0));
2165588a 5678 else
806413d2 5679 stmt = gimple_build_assign_with_ops (code, res, op0);
2165588a 5680 gimple_set_location (stmt, loc);
5681 gimple_seq_add_stmt_without_update (seq, stmt);
5682 }
5683 return res;
5684}
5685
5686/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5687 simplifying it first if possible using VALUEIZE if not NULL.
5688 OP0 and OP1 are expected to be valueized already. Returns the built
5689 expression value and appends statements possibly defining it
5690 to SEQ. */
5691
5692tree
5693gimple_build (gimple_seq *seq, location_t loc,
5694 enum tree_code code, tree type, tree op0, tree op1,
5695 tree (*valueize)(tree))
5696{
5697 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
5698 if (!res)
5699 {
5700 if (gimple_in_ssa_p (cfun))
5701 res = make_ssa_name (type, NULL);
5702 else
5703 res = create_tmp_reg (type, NULL);
5704 gimple stmt = gimple_build_assign_with_ops (code, res, op0, op1);
5705 gimple_set_location (stmt, loc);
5706 gimple_seq_add_stmt_without_update (seq, stmt);
5707 }
5708 return res;
5709}
5710
5711/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5712 simplifying it first if possible using VALUEIZE if not NULL.
5713 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5714 expression value and appends statements possibly defining it
5715 to SEQ. */
5716
5717tree
5718gimple_build (gimple_seq *seq, location_t loc,
5719 enum tree_code code, tree type, tree op0, tree op1, tree op2,
5720 tree (*valueize)(tree))
5721{
5722 tree res = gimple_simplify (code, type, op0, op1, op2,
5723 seq, valueize);
5724 if (!res)
5725 {
5726 if (gimple_in_ssa_p (cfun))
5727 res = make_ssa_name (type, NULL);
5728 else
5729 res = create_tmp_reg (type, NULL);
5730 gimple stmt;
5731 if (code == BIT_FIELD_REF)
5732 stmt = gimple_build_assign_with_ops (code, res,
5733 build3 (BIT_FIELD_REF, type,
806413d2 5734 op0, op1, op2));
2165588a 5735 else
5736 stmt = gimple_build_assign_with_ops (code, res, op0, op1, op2);
5737 gimple_set_location (stmt, loc);
5738 gimple_seq_add_stmt_without_update (seq, stmt);
5739 }
5740 return res;
5741}
5742
5743/* Build the call FN (ARG0) with a result of type TYPE
5744 (or no result if TYPE is void) with location LOC,
5745 simplifying it first if possible using VALUEIZE if not NULL.
5746 ARG0 is expected to be valueized already. Returns the built
5747 expression value (or NULL_TREE if TYPE is void) and appends
5748 statements possibly defining it to SEQ. */
5749
5750tree
5751gimple_build (gimple_seq *seq, location_t loc,
5752 enum built_in_function fn, tree type, tree arg0,
5753 tree (*valueize)(tree))
5754{
5755 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
5756 if (!res)
5757 {
5758 tree decl = builtin_decl_implicit (fn);
5759 gimple stmt = gimple_build_call (decl, 1, arg0);
5760 if (!VOID_TYPE_P (type))
5761 {
5762 if (gimple_in_ssa_p (cfun))
5763 res = make_ssa_name (type, NULL);
5764 else
5765 res = create_tmp_reg (type, NULL);
5766 gimple_call_set_lhs (stmt, res);
5767 }
5768 gimple_set_location (stmt, loc);
5769 gimple_seq_add_stmt_without_update (seq, stmt);
5770 }
5771 return res;
5772}
5773
5774/* Build the call FN (ARG0, ARG1) with a result of type TYPE
5775 (or no result if TYPE is void) with location LOC,
5776 simplifying it first if possible using VALUEIZE if not NULL.
5777 ARG0 is expected to be valueized already. Returns the built
5778 expression value (or NULL_TREE if TYPE is void) and appends
5779 statements possibly defining it to SEQ. */
5780
5781tree
5782gimple_build (gimple_seq *seq, location_t loc,
5783 enum built_in_function fn, tree type, tree arg0, tree arg1,
5784 tree (*valueize)(tree))
5785{
5786 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
5787 if (!res)
5788 {
5789 tree decl = builtin_decl_implicit (fn);
5790 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
5791 if (!VOID_TYPE_P (type))
5792 {
5793 if (gimple_in_ssa_p (cfun))
5794 res = make_ssa_name (type, NULL);
5795 else
5796 res = create_tmp_reg (type, NULL);
5797 gimple_call_set_lhs (stmt, res);
5798 }
5799 gimple_set_location (stmt, loc);
5800 gimple_seq_add_stmt_without_update (seq, stmt);
5801 }
5802 return res;
5803}
5804
5805/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5806 (or no result if TYPE is void) with location LOC,
5807 simplifying it first if possible using VALUEIZE if not NULL.
5808 ARG0 is expected to be valueized already. Returns the built
5809 expression value (or NULL_TREE if TYPE is void) and appends
5810 statements possibly defining it to SEQ. */
5811
5812tree
5813gimple_build (gimple_seq *seq, location_t loc,
5814 enum built_in_function fn, tree type,
5815 tree arg0, tree arg1, tree arg2,
5816 tree (*valueize)(tree))
5817{
5818 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
5819 if (!res)
5820 {
5821 tree decl = builtin_decl_implicit (fn);
5822 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
5823 if (!VOID_TYPE_P (type))
5824 {
5825 if (gimple_in_ssa_p (cfun))
5826 res = make_ssa_name (type, NULL);
5827 else
5828 res = create_tmp_reg (type, NULL);
5829 gimple_call_set_lhs (stmt, res);
5830 }
5831 gimple_set_location (stmt, loc);
5832 gimple_seq_add_stmt_without_update (seq, stmt);
5833 }
5834 return res;
5835}
5836
5837/* Build the conversion (TYPE) OP with a result of type TYPE
5838 with location LOC if such conversion is neccesary in GIMPLE,
5839 simplifying it first.
5840 Returns the built expression value and appends
5841 statements possibly defining it to SEQ. */
65f73697 5842
5843tree
5844gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
5845{
5846 if (useless_type_conversion_p (type, TREE_TYPE (op)))
5847 return op;
2165588a 5848 return gimple_build (seq, loc, NOP_EXPR, type, op);
65f73697 5849}