]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Fix warnings, update source code.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
818ab71a 2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
c7131fb2 33#include "fold-const.h"
36566b39
PK
34#include "stmt.h"
35#include "expr.h"
36#include "stor-layout.h"
7ee2468b 37#include "dumpfile.h"
2fb9a547 38#include "gimple-fold.h"
45b0be94 39#include "gimplify.h"
5be5c238 40#include "gimple-iterator.h"
442b4905
AM
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
cbdd87d4 44#include "tree-ssa-propagate.h"
450ad0cd 45#include "ipa-utils.h"
4484a35a 46#include "tree-ssa-address.h"
862d0b35 47#include "langhooks.h"
19e51b40 48#include "gimplify-me.h"
2b5f0895 49#include "dbgcnt.h"
9b2b7279 50#include "builtins.h"
e0ee10ed
RB
51#include "tree-eh.h"
52#include "gimple-match.h"
48126138 53#include "gomp-constants.h"
f869c12f 54#include "optabs-query.h"
451e8dae 55#include "omp-low.h"
3de2a40e 56#include "ipa-chkp.h"
f869c12f 57
cbdd87d4 58
b3b9f3d0 59/* Return true when DECL can be referenced from current unit.
c44c2088
JH
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
1389294c 63
1389294c
JH
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
3e89949e 76 we devirtualize only during final compilation stage.
b3b9f3d0
JH
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
80
1389294c 81static bool
c44c2088 82can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 83{
2c8326a5 84 varpool_node *vnode;
1389294c 85 struct cgraph_node *node;
5e20cdc9 86 symtab_node *snode;
c44c2088 87
00de328a 88 if (DECL_ABSTRACT_P (decl))
1632a686
JH
89 return false;
90
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
95
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 {
3aaf0529
JH
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
3dafb85c 101 if (symtab->function_flags_ready)
3aaf0529 102 return true;
d52f5295 103 snode = symtab_node::get (decl);
3aaf0529 104 if (!snode || !snode->definition)
1632a686 105 return false;
7de90a6c 106 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
107 return !node || !node->global.inlined_to;
108 }
109
6da8be89 110 /* We will later output the initializer, so we can refer to it.
c44c2088 111 So we are concerned only when DECL comes from initializer of
3aaf0529 112 external var or var that has been optimized out. */
c44c2088
JH
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 115 || (!DECL_EXTERNAL (from_decl)
9041d2e6 116 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 117 && vnode->definition)
6da8be89 118 || (flag_ltrans
9041d2e6 119 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 120 && vnode->in_other_partition))
c44c2088 121 return true;
c44c2088
JH
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
a33a931b 127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 129 return false;
b3b9f3d0
JH
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
3aaf0529
JH
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
138
139 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 140 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
3dafb85c 146 if (!symtab->function_flags_ready)
b3b9f3d0 147 return true;
c44c2088 148
d52f5295 149 snode = symtab_node::get (decl);
3aaf0529
JH
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
1389294c
JH
157}
158
0038d4e0 159/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
162
163tree
c44c2088 164canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 165{
50619002
EB
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
315f5f1b
RG
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 170 {
315f5f1b
RG
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
17f39a39
JH
179 }
180 if (TREE_CODE (cval) == ADDR_EXPR)
181 {
5a27a197
RG
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
184 {
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
188 }
5a27a197
RG
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
191 if (!base)
192 return NULL_TREE;
b3b9f3d0 193
7501ca28
RG
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 196 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 197 return NULL_TREE;
13f92e8d
JJ
198 if (TREE_TYPE (base) == error_mark_node)
199 return NULL_TREE;
7501ca28 200 if (TREE_CODE (base) == VAR_DECL)
46eb666a 201 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
202 else if (TREE_CODE (base) == FUNCTION_DECL)
203 {
204 /* Make sure we create a cgraph node for functions we'll reference.
205 They can be non-existent if the reference comes from an entry
206 of an external vtable for example. */
d52f5295 207 cgraph_node::get_create (base);
7501ca28 208 }
0038d4e0 209 /* Fixup types in global initializers. */
73aef89e
RG
210 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
211 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
212
213 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
214 cval = fold_convert (TREE_TYPE (orig_cval), cval);
215 return cval;
17f39a39 216 }
846abd0d
RB
217 if (TREE_OVERFLOW_P (cval))
218 return drop_tree_overflow (cval);
50619002 219 return orig_cval;
17f39a39 220}
cbdd87d4
RG
221
222/* If SYM is a constant variable with known value, return the value.
223 NULL_TREE is returned otherwise. */
224
225tree
226get_symbol_constant_value (tree sym)
227{
6a6dac52
JH
228 tree val = ctor_for_folding (sym);
229 if (val != error_mark_node)
cbdd87d4 230 {
cbdd87d4
RG
231 if (val)
232 {
9d60be38 233 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 234 if (val && is_gimple_min_invariant (val))
17f39a39 235 return val;
1389294c
JH
236 else
237 return NULL_TREE;
cbdd87d4
RG
238 }
239 /* Variables declared 'const' without an initializer
240 have zero as the initializer if they may not be
241 overridden at link or run time. */
242 if (!val
b8a8c472 243 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 244 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
245 }
246
247 return NULL_TREE;
248}
249
250
cbdd87d4
RG
251
252/* Subroutine of fold_stmt. We perform several simplifications of the
253 memory reference tree EXPR and make sure to re-gimplify them properly
254 after propagation of constant addresses. IS_LHS is true if the
255 reference is supposed to be an lvalue. */
256
257static tree
258maybe_fold_reference (tree expr, bool is_lhs)
259{
17f39a39 260 tree result;
cbdd87d4 261
f0eddb90
RG
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
267 TREE_CODE (expr),
268 TREE_TYPE (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
278
f0eddb90
RG
279 if (!is_lhs
280 && (result = fold_const_aggregate_ref (expr))
281 && is_gimple_min_invariant (result))
282 return result;
cbdd87d4 283
cbdd87d4
RG
284 return NULL_TREE;
285}
286
287
288/* Attempt to fold an assignment statement pointed-to by SI. Returns a
289 replacement rhs for the statement or NULL_TREE if no simplification
290 could be made. It is assumed that the operands have been previously
291 folded. */
292
293static tree
294fold_gimple_assign (gimple_stmt_iterator *si)
295{
355fe088 296 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
297 enum tree_code subcode = gimple_assign_rhs_code (stmt);
298 location_t loc = gimple_location (stmt);
299
300 tree result = NULL_TREE;
301
302 switch (get_gimple_rhs_class (subcode))
303 {
304 case GIMPLE_SINGLE_RHS:
305 {
306 tree rhs = gimple_assign_rhs1 (stmt);
307
8c00ba08
JW
308 if (TREE_CLOBBER_P (rhs))
309 return NULL_TREE;
310
4e71066d 311 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
312 return maybe_fold_reference (rhs, false);
313
bdf37f7a
JH
314 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
315 {
316 tree val = OBJ_TYPE_REF_EXPR (rhs);
317 if (is_gimple_min_invariant (val))
318 return val;
f8a39967 319 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
320 {
321 bool final;
322 vec <cgraph_node *>targets
f8a39967 323 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 324 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 325 {
2b5f0895
XDL
326 if (dump_enabled_p ())
327 {
807b7d62 328 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
329 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
330 "resolving virtual function address "
331 "reference to function %s\n",
332 targets.length () == 1
333 ? targets[0]->name ()
3ef276e4 334 : "NULL");
2b5f0895 335 }
3ef276e4
RB
336 if (targets.length () == 1)
337 {
338 val = fold_convert (TREE_TYPE (val),
339 build_fold_addr_expr_loc
340 (loc, targets[0]->decl));
341 STRIP_USELESS_TYPE_CONVERSION (val);
342 }
343 else
344 /* We can not use __builtin_unreachable here because it
345 can not have address taken. */
346 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
347 return val;
348 }
349 }
bdf37f7a 350 }
7524f419 351
cbdd87d4
RG
352 else if (TREE_CODE (rhs) == ADDR_EXPR)
353 {
70f34814
RG
354 tree ref = TREE_OPERAND (rhs, 0);
355 tree tem = maybe_fold_reference (ref, true);
356 if (tem
357 && TREE_CODE (tem) == MEM_REF
358 && integer_zerop (TREE_OPERAND (tem, 1)))
359 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
360 else if (tem)
cbdd87d4
RG
361 result = fold_convert (TREE_TYPE (rhs),
362 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
363 else if (TREE_CODE (ref) == MEM_REF
364 && integer_zerop (TREE_OPERAND (ref, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
366
367 if (result)
368 {
369 /* Strip away useless type conversions. Both the
370 NON_LVALUE_EXPR that may have been added by fold, and
371 "useless" type conversions that might now be apparent
372 due to propagation. */
373 STRIP_USELESS_TYPE_CONVERSION (result);
374
375 if (result != rhs && valid_gimple_rhs_p (result))
376 return result;
377 }
cbdd87d4
RG
378 }
379
380 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 381 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
382 {
383 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
384 unsigned i;
385 tree val;
386
387 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 388 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
389 return NULL_TREE;
390
391 return build_vector_from_ctor (TREE_TYPE (rhs),
392 CONSTRUCTOR_ELTS (rhs));
393 }
394
395 else if (DECL_P (rhs))
9d60be38 396 return get_symbol_constant_value (rhs);
cbdd87d4
RG
397 }
398 break;
399
400 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
401 break;
402
403 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
404 break;
405
0354c0c7 406 case GIMPLE_TERNARY_RHS:
5c099d40
RB
407 result = fold_ternary_loc (loc, subcode,
408 TREE_TYPE (gimple_assign_lhs (stmt)),
409 gimple_assign_rhs1 (stmt),
410 gimple_assign_rhs2 (stmt),
411 gimple_assign_rhs3 (stmt));
0354c0c7
BS
412
413 if (result)
414 {
415 STRIP_USELESS_TYPE_CONVERSION (result);
416 if (valid_gimple_rhs_p (result))
417 return result;
0354c0c7
BS
418 }
419 break;
420
cbdd87d4
RG
421 case GIMPLE_INVALID_RHS:
422 gcc_unreachable ();
423 }
424
425 return NULL_TREE;
426}
427
fef5a0d9
RB
428
429/* Replace a statement at *SI_P with a sequence of statements in STMTS,
430 adjusting the replacement stmts location and virtual operands.
431 If the statement has a lhs the last stmt in the sequence is expected
432 to assign to that lhs. */
433
434static void
435gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
436{
355fe088 437 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
438
439 if (gimple_has_location (stmt))
440 annotate_all_with_location (stmts, gimple_location (stmt));
441
442 /* First iterate over the replacement statements backward, assigning
443 virtual operands to their defining statements. */
355fe088 444 gimple *laststore = NULL;
fef5a0d9
RB
445 for (gimple_stmt_iterator i = gsi_last (stmts);
446 !gsi_end_p (i); gsi_prev (&i))
447 {
355fe088 448 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
449 if ((gimple_assign_single_p (new_stmt)
450 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
451 || (is_gimple_call (new_stmt)
452 && (gimple_call_flags (new_stmt)
453 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
454 {
455 tree vdef;
456 if (!laststore)
457 vdef = gimple_vdef (stmt);
458 else
459 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
460 gimple_set_vdef (new_stmt, vdef);
461 if (vdef && TREE_CODE (vdef) == SSA_NAME)
462 SSA_NAME_DEF_STMT (vdef) = new_stmt;
463 laststore = new_stmt;
464 }
465 }
466
467 /* Second iterate over the statements forward, assigning virtual
468 operands to their uses. */
469 tree reaching_vuse = gimple_vuse (stmt);
470 for (gimple_stmt_iterator i = gsi_start (stmts);
471 !gsi_end_p (i); gsi_next (&i))
472 {
355fe088 473 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
474 /* If the new statement possibly has a VUSE, update it with exact SSA
475 name we know will reach this one. */
476 if (gimple_has_mem_ops (new_stmt))
477 gimple_set_vuse (new_stmt, reaching_vuse);
478 gimple_set_modified (new_stmt, true);
479 if (gimple_vdef (new_stmt))
480 reaching_vuse = gimple_vdef (new_stmt);
481 }
482
483 /* If the new sequence does not do a store release the virtual
484 definition of the original statement. */
485 if (reaching_vuse
486 && reaching_vuse == gimple_vuse (stmt))
487 {
488 tree vdef = gimple_vdef (stmt);
489 if (vdef
490 && TREE_CODE (vdef) == SSA_NAME)
491 {
492 unlink_stmt_vdef (stmt);
493 release_ssa_name (vdef);
494 }
495 }
496
497 /* Finally replace the original statement with the sequence. */
498 gsi_replace_with_seq (si_p, stmts, false);
499}
500
cbdd87d4
RG
501/* Convert EXPR into a GIMPLE value suitable for substitution on the
502 RHS of an assignment. Insert the necessary statements before
503 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
504 is replaced. If the call is expected to produces a result, then it
505 is replaced by an assignment of the new RHS to the result variable.
506 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
507 GIMPLE_NOP. A proper VDEF chain is retained by making the first
508 VUSE and the last VDEF of the whole sequence be the same as the replaced
509 statement and using new SSA names for stores in between. */
cbdd87d4
RG
510
511void
512gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
513{
514 tree lhs;
355fe088 515 gimple *stmt, *new_stmt;
cbdd87d4 516 gimple_stmt_iterator i;
355a7673 517 gimple_seq stmts = NULL;
cbdd87d4
RG
518
519 stmt = gsi_stmt (*si_p);
520
521 gcc_assert (is_gimple_call (stmt));
522
45852dcc 523 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 524
e256dfce 525 lhs = gimple_call_lhs (stmt);
cbdd87d4 526 if (lhs == NULL_TREE)
6e572326
RG
527 {
528 gimplify_and_add (expr, &stmts);
529 /* We can end up with folding a memcpy of an empty class assignment
530 which gets optimized away by C++ gimplification. */
531 if (gimple_seq_empty_p (stmts))
532 {
9fdc58de 533 pop_gimplify_context (NULL);
6e572326
RG
534 if (gimple_in_ssa_p (cfun))
535 {
536 unlink_stmt_vdef (stmt);
537 release_defs (stmt);
538 }
f6b4dc28 539 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
540 return;
541 }
542 }
cbdd87d4 543 else
e256dfce
RG
544 {
545 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
546 new_stmt = gimple_build_assign (lhs, tmp);
547 i = gsi_last (stmts);
548 gsi_insert_after_without_update (&i, new_stmt,
549 GSI_CONTINUE_LINKING);
550 }
cbdd87d4
RG
551
552 pop_gimplify_context (NULL);
553
fef5a0d9
RB
554 gsi_replace_with_seq_vops (si_p, stmts);
555}
cbdd87d4 556
fef5a0d9
RB
557
558/* Replace the call at *GSI with the gimple value VAL. */
559
560static void
561replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
562{
355fe088 563 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 564 tree lhs = gimple_call_lhs (stmt);
355fe088 565 gimple *repl;
fef5a0d9 566 if (lhs)
e256dfce 567 {
fef5a0d9
RB
568 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
569 val = fold_convert (TREE_TYPE (lhs), val);
570 repl = gimple_build_assign (lhs, val);
571 }
572 else
573 repl = gimple_build_nop ();
574 tree vdef = gimple_vdef (stmt);
575 if (vdef && TREE_CODE (vdef) == SSA_NAME)
576 {
577 unlink_stmt_vdef (stmt);
578 release_ssa_name (vdef);
579 }
f6b4dc28 580 gsi_replace (gsi, repl, false);
fef5a0d9
RB
581}
582
583/* Replace the call at *GSI with the new call REPL and fold that
584 again. */
585
586static void
355fe088 587replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 588{
355fe088 589 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
590 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
591 gimple_set_location (repl, gimple_location (stmt));
592 if (gimple_vdef (stmt)
593 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
594 {
595 gimple_set_vdef (repl, gimple_vdef (stmt));
596 gimple_set_vuse (repl, gimple_vuse (stmt));
597 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
598 }
f6b4dc28 599 gsi_replace (gsi, repl, false);
fef5a0d9
RB
600 fold_stmt (gsi);
601}
602
603/* Return true if VAR is a VAR_DECL or a component thereof. */
604
605static bool
606var_decl_component_p (tree var)
607{
608 tree inner = var;
609 while (handled_component_p (inner))
610 inner = TREE_OPERAND (inner, 0);
611 return SSA_VAR_P (inner);
612}
613
614/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 615 false if no simplification can be made.
fef5a0d9
RB
616 If ENDP is 0, return DEST (like memcpy).
617 If ENDP is 1, return DEST+LEN (like mempcpy).
618 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
619 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
620 (memmove). */
621
622static bool
623gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
624 tree dest, tree src, int endp)
625{
355fe088 626 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
627 tree lhs = gimple_call_lhs (stmt);
628 tree len = gimple_call_arg (stmt, 2);
629 tree destvar, srcvar;
630 location_t loc = gimple_location (stmt);
631
632 /* If the LEN parameter is zero, return DEST. */
633 if (integer_zerop (len))
634 {
355fe088 635 gimple *repl;
fef5a0d9
RB
636 if (gimple_call_lhs (stmt))
637 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
638 else
639 repl = gimple_build_nop ();
640 tree vdef = gimple_vdef (stmt);
641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 642 {
fef5a0d9
RB
643 unlink_stmt_vdef (stmt);
644 release_ssa_name (vdef);
645 }
f6b4dc28 646 gsi_replace (gsi, repl, false);
fef5a0d9
RB
647 return true;
648 }
649
650 /* If SRC and DEST are the same (and not volatile), return
651 DEST{,+LEN,+LEN-1}. */
652 if (operand_equal_p (src, dest, 0))
653 {
654 unlink_stmt_vdef (stmt);
655 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
656 release_ssa_name (gimple_vdef (stmt));
657 if (!lhs)
658 {
f6b4dc28 659 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
660 return true;
661 }
662 goto done;
663 }
664 else
665 {
666 tree srctype, desttype;
667 unsigned int src_align, dest_align;
668 tree off0;
669
3de2a40e
IE
670 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
671 pointers as wide integer) and also may result in huge function
672 size because of inlined bounds copy. Thus don't inline for
673 functions we want to instrument. */
674 if (flag_check_pointer_bounds
675 && chkp_instrumentable_p (cfun->decl)
676 /* Even if data may contain pointers we can inline if copy
677 less than a pointer size. */
678 && (!tree_fits_uhwi_p (len)
679 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
680 return false;
681
fef5a0d9
RB
682 /* Build accesses at offset zero with a ref-all character type. */
683 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
684 ptr_mode, true), 0);
685
686 /* If we can perform the copy efficiently with first doing all loads
687 and then all stores inline it that way. Currently efficiently
688 means that we can load all the memory into a single integer
689 register which is what MOVE_MAX gives us. */
690 src_align = get_pointer_alignment (src);
691 dest_align = get_pointer_alignment (dest);
692 if (tree_fits_uhwi_p (len)
693 && compare_tree_int (len, MOVE_MAX) <= 0
694 /* ??? Don't transform copies from strings with known length this
695 confuses the tree-ssa-strlen.c. This doesn't handle
696 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
697 reason. */
698 && !c_strlen (src, 2))
699 {
700 unsigned ilen = tree_to_uhwi (len);
701 if (exact_log2 (ilen) != -1)
702 {
703 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
704 if (type
705 && TYPE_MODE (type) != BLKmode
706 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
707 == ilen * 8)
708 /* If the destination pointer is not aligned we must be able
709 to emit an unaligned store. */
710 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
f869c12f
RR
711 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)
712 || (optab_handler (movmisalign_optab, TYPE_MODE (type))
713 != CODE_FOR_nothing)))
fef5a0d9
RB
714 {
715 tree srctype = type;
716 tree desttype = type;
717 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
718 srctype = build_aligned_type (type, src_align);
719 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
720 tree tem = fold_const_aggregate_ref (srcmem);
721 if (tem)
722 srcmem = tem;
723 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
724 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
f869c12f
RR
725 src_align)
726 && (optab_handler (movmisalign_optab,
727 TYPE_MODE (type))
728 == CODE_FOR_nothing))
fef5a0d9
RB
729 srcmem = NULL_TREE;
730 if (srcmem)
731 {
355fe088 732 gimple *new_stmt;
fef5a0d9
RB
733 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
734 {
735 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
736 if (gimple_in_ssa_p (cfun))
737 srcmem = make_ssa_name (TREE_TYPE (srcmem),
738 new_stmt);
739 else
b731b390 740 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
741 gimple_assign_set_lhs (new_stmt, srcmem);
742 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
743 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
744 }
745 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
746 desttype = build_aligned_type (type, dest_align);
747 new_stmt
748 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
749 dest, off0),
750 srcmem);
751 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
752 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
753 if (gimple_vdef (new_stmt)
754 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
755 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
756 if (!lhs)
757 {
f6b4dc28 758 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
759 return true;
760 }
761 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
762 goto done;
763 }
764 }
765 }
766 }
767
768 if (endp == 3)
769 {
770 /* Both DEST and SRC must be pointer types.
771 ??? This is what old code did. Is the testing for pointer types
772 really mandatory?
773
774 If either SRC is readonly or length is 1, we can use memcpy. */
775 if (!dest_align || !src_align)
776 return false;
777 if (readonly_data_expr (src)
778 || (tree_fits_uhwi_p (len)
779 && (MIN (src_align, dest_align) / BITS_PER_UNIT
780 >= tree_to_uhwi (len))))
781 {
782 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
783 if (!fn)
784 return false;
785 gimple_call_set_fndecl (stmt, fn);
786 gimple_call_set_arg (stmt, 0, dest);
787 gimple_call_set_arg (stmt, 1, src);
788 fold_stmt (gsi);
789 return true;
790 }
791
792 /* If *src and *dest can't overlap, optimize into memcpy as well. */
793 if (TREE_CODE (src) == ADDR_EXPR
794 && TREE_CODE (dest) == ADDR_EXPR)
795 {
796 tree src_base, dest_base, fn;
797 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
798 HOST_WIDE_INT size = -1;
799 HOST_WIDE_INT maxsize = -1;
ee45a32d 800 bool reverse;
fef5a0d9
RB
801
802 srcvar = TREE_OPERAND (src, 0);
803 src_base = get_ref_base_and_extent (srcvar, &src_offset,
ee45a32d 804 &size, &maxsize, &reverse);
fef5a0d9
RB
805 destvar = TREE_OPERAND (dest, 0);
806 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
ee45a32d 807 &size, &maxsize, &reverse);
fef5a0d9
RB
808 if (tree_fits_uhwi_p (len))
809 maxsize = tree_to_uhwi (len);
810 else
811 maxsize = -1;
812 src_offset /= BITS_PER_UNIT;
813 dest_offset /= BITS_PER_UNIT;
814 if (SSA_VAR_P (src_base)
815 && SSA_VAR_P (dest_base))
816 {
817 if (operand_equal_p (src_base, dest_base, 0)
818 && ranges_overlap_p (src_offset, maxsize,
819 dest_offset, maxsize))
820 return false;
821 }
822 else if (TREE_CODE (src_base) == MEM_REF
823 && TREE_CODE (dest_base) == MEM_REF)
824 {
825 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
826 TREE_OPERAND (dest_base, 0), 0))
827 return false;
828 offset_int off = mem_ref_offset (src_base) + src_offset;
829 if (!wi::fits_shwi_p (off))
830 return false;
831 src_offset = off.to_shwi ();
832
833 off = mem_ref_offset (dest_base) + dest_offset;
834 if (!wi::fits_shwi_p (off))
835 return false;
836 dest_offset = off.to_shwi ();
837 if (ranges_overlap_p (src_offset, maxsize,
838 dest_offset, maxsize))
839 return false;
840 }
841 else
842 return false;
843
844 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
845 if (!fn)
846 return false;
847 gimple_call_set_fndecl (stmt, fn);
848 gimple_call_set_arg (stmt, 0, dest);
849 gimple_call_set_arg (stmt, 1, src);
850 fold_stmt (gsi);
851 return true;
852 }
853
854 /* If the destination and source do not alias optimize into
855 memcpy as well. */
856 if ((is_gimple_min_invariant (dest)
857 || TREE_CODE (dest) == SSA_NAME)
858 && (is_gimple_min_invariant (src)
859 || TREE_CODE (src) == SSA_NAME))
860 {
861 ao_ref destr, srcr;
862 ao_ref_init_from_ptr_and_size (&destr, dest, len);
863 ao_ref_init_from_ptr_and_size (&srcr, src, len);
864 if (!refs_may_alias_p_1 (&destr, &srcr, false))
865 {
866 tree fn;
867 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
868 if (!fn)
869 return false;
870 gimple_call_set_fndecl (stmt, fn);
871 gimple_call_set_arg (stmt, 0, dest);
872 gimple_call_set_arg (stmt, 1, src);
873 fold_stmt (gsi);
874 return true;
875 }
876 }
877
878 return false;
879 }
880
881 if (!tree_fits_shwi_p (len))
882 return false;
883 /* FIXME:
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
887 STRIP_NOPS (src);
888 STRIP_NOPS (dest);
889 if (!POINTER_TYPE_P (TREE_TYPE (src))
890 || !POINTER_TYPE_P (TREE_TYPE (dest)))
891 return false;
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
900 {
901 tree tem = TREE_OPERAND (src, 0);
902 STRIP_NOPS (tem);
903 if (tem != TREE_OPERAND (src, 0))
904 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
905 }
906 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
907 {
908 tree tem = TREE_OPERAND (dest, 0);
909 STRIP_NOPS (tem);
910 if (tem != TREE_OPERAND (dest, 0))
911 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
912 }
913 srctype = TREE_TYPE (TREE_TYPE (src));
914 if (TREE_CODE (srctype) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
916 {
917 srctype = TREE_TYPE (srctype);
918 STRIP_NOPS (src);
919 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
920 }
921 desttype = TREE_TYPE (TREE_TYPE (dest));
922 if (TREE_CODE (desttype) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
924 {
925 desttype = TREE_TYPE (desttype);
926 STRIP_NOPS (dest);
927 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
928 }
929 if (TREE_ADDRESSABLE (srctype)
930 || TREE_ADDRESSABLE (desttype))
931 return false;
932
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype))
936 || TREE_CODE (desttype) == BOOLEAN_TYPE
937 || TREE_CODE (desttype) == ENUMERAL_TYPE)
938 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype))
940 || TREE_CODE (srctype) == BOOLEAN_TYPE
941 || TREE_CODE (srctype) == ENUMERAL_TYPE)
942 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
943 if (!srctype)
944 srctype = desttype;
945 if (!desttype)
946 desttype = srctype;
947 if (!srctype)
948 return false;
949
950 src_align = get_pointer_alignment (src);
951 dest_align = get_pointer_alignment (dest);
952 if (dest_align < TYPE_ALIGN (desttype)
953 || src_align < TYPE_ALIGN (srctype))
954 return false;
955
956 destvar = dest;
957 STRIP_NOPS (destvar);
958 if (TREE_CODE (destvar) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
961 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
962 else
963 destvar = NULL_TREE;
964
965 srcvar = src;
966 STRIP_NOPS (srcvar);
967 if (TREE_CODE (srcvar) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
970 {
971 if (!destvar
972 || src_align >= TYPE_ALIGN (desttype))
973 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
974 srcvar, off0);
975 else if (!STRICT_ALIGNMENT)
976 {
977 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
978 src_align);
979 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
980 }
e256dfce 981 else
fef5a0d9
RB
982 srcvar = NULL_TREE;
983 }
984 else
985 srcvar = NULL_TREE;
986
987 if (srcvar == NULL_TREE && destvar == NULL_TREE)
988 return false;
989
990 if (srcvar == NULL_TREE)
991 {
992 STRIP_NOPS (src);
993 if (src_align >= TYPE_ALIGN (desttype))
994 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
995 else
996 {
997 if (STRICT_ALIGNMENT)
998 return false;
999 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1000 src_align);
1001 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1002 }
1003 }
1004 else if (destvar == NULL_TREE)
1005 {
1006 STRIP_NOPS (dest);
1007 if (dest_align >= TYPE_ALIGN (srctype))
1008 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1009 else
1010 {
1011 if (STRICT_ALIGNMENT)
1012 return false;
1013 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1014 dest_align);
1015 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1016 }
1017 }
1018
355fe088 1019 gimple *new_stmt;
fef5a0d9
RB
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1021 {
1022 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1023 if (gimple_in_ssa_p (cfun))
1024 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1025 else
b731b390 1026 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1027 gimple_assign_set_lhs (new_stmt, srcvar);
1028 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1029 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1030 }
1031 new_stmt = gimple_build_assign (destvar, srcvar);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1034 if (gimple_vdef (new_stmt)
1035 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1036 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1037 if (!lhs)
1038 {
f6b4dc28 1039 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1040 return true;
1041 }
1042 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 }
1044
1045done:
74e3c262 1046 gimple_seq stmts = NULL;
fef5a0d9
RB
1047 if (endp == 0 || endp == 3)
1048 len = NULL_TREE;
1049 else if (endp == 2)
74e3c262
RB
1050 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1051 ssize_int (1));
fef5a0d9 1052 if (endp == 2 || endp == 1)
74e3c262
RB
1053 {
1054 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1055 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1056 TREE_TYPE (dest), dest, len);
1057 }
fef5a0d9 1058
74e3c262 1059 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1060 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1061 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1062 return true;
1063}
1064
1065/* Fold function call to builtin memset or bzero at *GSI setting the
1066 memory of size LEN to VAL. Return whether a simplification was made. */
1067
1068static bool
1069gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1070{
355fe088 1071 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1072 tree etype;
1073 unsigned HOST_WIDE_INT length, cval;
1074
1075 /* If the LEN parameter is zero, return DEST. */
1076 if (integer_zerop (len))
1077 {
1078 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1079 return true;
1080 }
1081
1082 if (! tree_fits_uhwi_p (len))
1083 return false;
1084
1085 if (TREE_CODE (c) != INTEGER_CST)
1086 return false;
1087
1088 tree dest = gimple_call_arg (stmt, 0);
1089 tree var = dest;
1090 if (TREE_CODE (var) != ADDR_EXPR)
1091 return false;
1092
1093 var = TREE_OPERAND (var, 0);
1094 if (TREE_THIS_VOLATILE (var))
1095 return false;
1096
1097 etype = TREE_TYPE (var);
1098 if (TREE_CODE (etype) == ARRAY_TYPE)
1099 etype = TREE_TYPE (etype);
1100
1101 if (!INTEGRAL_TYPE_P (etype)
1102 && !POINTER_TYPE_P (etype))
1103 return NULL_TREE;
1104
1105 if (! var_decl_component_p (var))
1106 return NULL_TREE;
1107
1108 length = tree_to_uhwi (len);
1109 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1110 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1111 return NULL_TREE;
1112
1113 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1114 return NULL_TREE;
1115
1116 if (integer_zerop (c))
1117 cval = 0;
1118 else
1119 {
1120 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1121 return NULL_TREE;
1122
1123 cval = TREE_INT_CST_LOW (c);
1124 cval &= 0xff;
1125 cval |= cval << 8;
1126 cval |= cval << 16;
1127 cval |= (cval << 31) << 1;
1128 }
1129
1130 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1131 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1132 gimple_set_vuse (store, gimple_vuse (stmt));
1133 tree vdef = gimple_vdef (stmt);
1134 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1135 {
1136 gimple_set_vdef (store, gimple_vdef (stmt));
1137 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1138 }
1139 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1140 if (gimple_call_lhs (stmt))
1141 {
355fe088 1142 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1143 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1144 }
1145 else
1146 {
1147 gimple_stmt_iterator gsi2 = *gsi;
1148 gsi_prev (gsi);
1149 gsi_remove (&gsi2, true);
1150 }
1151
1152 return true;
1153}
1154
1155
1156/* Return the string length, maximum string length or maximum value of
1157 ARG in LENGTH.
1158 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1159 is not NULL and, for TYPE == 0, its value is not equal to the length
1160 we determine or if we are unable to determine the length or value,
1161 return false. VISITED is a bitmap of visited variables.
1162 TYPE is 0 if string length should be returned, 1 for maximum string
1163 length and 2 for maximum value ARG can have. */
1164
1165static bool
dcb7fae2 1166get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1167{
1168 tree var, val;
355fe088 1169 gimple *def_stmt;
fef5a0d9
RB
1170
1171 if (TREE_CODE (arg) != SSA_NAME)
1172 {
1173 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1174 if (TREE_CODE (arg) == ADDR_EXPR
1175 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1176 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1177 {
1178 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1179 if (TREE_CODE (aop0) == INDIRECT_REF
1180 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1181 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1182 length, visited, type);
1183 }
1184
1185 if (type == 2)
1186 {
1187 val = arg;
1188 if (TREE_CODE (val) != INTEGER_CST
1189 || tree_int_cst_sgn (val) < 0)
1190 return false;
1191 }
1192 else
1193 val = c_strlen (arg, 1);
1194 if (!val)
1195 return false;
1196
1197 if (*length)
1198 {
1199 if (type > 0)
1200 {
1201 if (TREE_CODE (*length) != INTEGER_CST
1202 || TREE_CODE (val) != INTEGER_CST)
1203 return false;
1204
1205 if (tree_int_cst_lt (*length, val))
1206 *length = val;
1207 return true;
1208 }
1209 else if (simple_cst_equal (val, *length) != 1)
1210 return false;
1211 }
1212
1213 *length = val;
1214 return true;
1215 }
1216
1217 /* If ARG is registered for SSA update we cannot look at its defining
1218 statement. */
1219 if (name_registered_for_update_p (arg))
1220 return false;
1221
1222 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1223 if (!*visited)
1224 *visited = BITMAP_ALLOC (NULL);
1225 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1226 return true;
1227
1228 var = arg;
1229 def_stmt = SSA_NAME_DEF_STMT (var);
1230
1231 switch (gimple_code (def_stmt))
1232 {
1233 case GIMPLE_ASSIGN:
1234 /* The RHS of the statement defining VAR must either have a
1235 constant length or come from another SSA_NAME with a constant
1236 length. */
1237 if (gimple_assign_single_p (def_stmt)
1238 || gimple_assign_unary_nop_p (def_stmt))
1239 {
1240 tree rhs = gimple_assign_rhs1 (def_stmt);
1241 return get_maxval_strlen (rhs, length, visited, type);
1242 }
1243 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1244 {
1245 tree op2 = gimple_assign_rhs2 (def_stmt);
1246 tree op3 = gimple_assign_rhs3 (def_stmt);
1247 return get_maxval_strlen (op2, length, visited, type)
1248 && get_maxval_strlen (op3, length, visited, type);
1249 }
1250 return false;
1251
1252 case GIMPLE_PHI:
1253 {
1254 /* All the arguments of the PHI node must have the same constant
1255 length. */
1256 unsigned i;
1257
1258 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1259 {
1260 tree arg = gimple_phi_arg (def_stmt, i)->def;
1261
1262 /* If this PHI has itself as an argument, we cannot
1263 determine the string length of this argument. However,
1264 if we can find a constant string length for the other
1265 PHI args then we can still be sure that this is a
1266 constant string length. So be optimistic and just
1267 continue with the next argument. */
1268 if (arg == gimple_phi_result (def_stmt))
1269 continue;
1270
1271 if (!get_maxval_strlen (arg, length, visited, type))
1272 return false;
1273 }
1274 }
1275 return true;
1276
1277 default:
1278 return false;
1279 }
1280}
1281
dcb7fae2
RB
1282tree
1283get_maxval_strlen (tree arg, int type)
1284{
1285 bitmap visited = NULL;
1286 tree len = NULL_TREE;
1287 if (!get_maxval_strlen (arg, &len, &visited, type))
1288 len = NULL_TREE;
1289 if (visited)
1290 BITMAP_FREE (visited);
1291
1292 return len;
1293}
1294
fef5a0d9
RB
1295
1296/* Fold function call to builtin strcpy with arguments DEST and SRC.
1297 If LEN is not NULL, it represents the length of the string to be
1298 copied. Return NULL_TREE if no simplification can be made. */
1299
1300static bool
1301gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1302 tree dest, tree src)
fef5a0d9 1303{
dcb7fae2 1304 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1305 tree fn;
1306
1307 /* If SRC and DEST are the same (and not volatile), return DEST. */
1308 if (operand_equal_p (src, dest, 0))
1309 {
1310 replace_call_with_value (gsi, dest);
1311 return true;
1312 }
1313
1314 if (optimize_function_for_size_p (cfun))
1315 return false;
1316
1317 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1318 if (!fn)
1319 return false;
1320
1579e1f8 1321 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1322 if (!len)
dcb7fae2 1323 return false;
fef5a0d9
RB
1324
1325 len = fold_convert_loc (loc, size_type_node, len);
1326 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1327 len = force_gimple_operand_gsi (gsi, len, true,
1328 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1329 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1330 replace_call_with_call_and_fold (gsi, repl);
1331 return true;
1332}
1333
1334/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1335 If SLEN is not NULL, it represents the length of the source string.
1336 Return NULL_TREE if no simplification can be made. */
1337
1338static bool
dcb7fae2
RB
1339gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1340 tree dest, tree src, tree len)
fef5a0d9 1341{
dcb7fae2 1342 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1343 tree fn;
1344
1345 /* If the LEN parameter is zero, return DEST. */
1346 if (integer_zerop (len))
1347 {
1348 replace_call_with_value (gsi, dest);
1349 return true;
1350 }
1351
1352 /* We can't compare slen with len as constants below if len is not a
1353 constant. */
dcb7fae2 1354 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1355 return false;
1356
fef5a0d9 1357 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1358 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1359 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1360 return false;
1361
1362 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1363
1364 /* We do not support simplification of this case, though we do
1365 support it when expanding trees into RTL. */
1366 /* FIXME: generate a call to __builtin_memset. */
1367 if (tree_int_cst_lt (slen, len))
1368 return false;
1369
1370 /* OK transform into builtin memcpy. */
1371 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1372 if (!fn)
1373 return false;
1374
1375 len = fold_convert_loc (loc, size_type_node, len);
1376 len = force_gimple_operand_gsi (gsi, len, true,
1377 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1378 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1379 replace_call_with_call_and_fold (gsi, repl);
1380 return true;
1381}
1382
1383/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1384 to the call.
1385
1386 Return NULL_TREE if no simplification was possible, otherwise return the
1387 simplified form of the call as a tree.
1388
1389 The simplified form may be a constant or other expression which
1390 computes the same value, but in a more efficient manner (including
1391 calls to other builtin functions).
1392
1393 The call may contain arguments which need to be evaluated, but
1394 which are not useful to determine the result of the call. In
1395 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1396 COMPOUND_EXPR will be an argument which must be evaluated.
1397 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1398 COMPOUND_EXPR in the chain will contain the tree for the simplified
1399 form of the builtin function call. */
1400
1401static bool
dcb7fae2 1402gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1403{
355fe088 1404 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1405 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1406
1407 const char *p = c_getstr (src);
1408
1409 /* If the string length is zero, return the dst parameter. */
1410 if (p && *p == '\0')
1411 {
1412 replace_call_with_value (gsi, dst);
1413 return true;
1414 }
1415
1416 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1417 return false;
1418
1419 /* See if we can store by pieces into (dst + strlen(dst)). */
1420 tree newdst;
1421 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1422 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1423
1424 if (!strlen_fn || !memcpy_fn)
1425 return false;
1426
1427 /* If the length of the source string isn't computable don't
1428 split strcat into strlen and memcpy. */
dcb7fae2 1429 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1430 if (! len)
fef5a0d9
RB
1431 return false;
1432
1433 /* Create strlen (dst). */
1434 gimple_seq stmts = NULL, stmts2;
355fe088 1435 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9
RB
1436 gimple_set_location (repl, loc);
1437 if (gimple_in_ssa_p (cfun))
b731b390 1438 newdst = make_ssa_name (size_type_node);
fef5a0d9 1439 else
b731b390 1440 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1441 gimple_call_set_lhs (repl, newdst);
1442 gimple_seq_add_stmt_without_update (&stmts, repl);
1443
1444 /* Create (dst p+ strlen (dst)). */
1445 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1446 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1447 gimple_seq_add_seq_without_update (&stmts, stmts2);
1448
1449 len = fold_convert_loc (loc, size_type_node, len);
1450 len = size_binop_loc (loc, PLUS_EXPR, len,
1451 build_int_cst (size_type_node, 1));
1452 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1453 gimple_seq_add_seq_without_update (&stmts, stmts2);
1454
1455 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1456 gimple_seq_add_stmt_without_update (&stmts, repl);
1457 if (gimple_call_lhs (stmt))
1458 {
1459 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1460 gimple_seq_add_stmt_without_update (&stmts, repl);
1461 gsi_replace_with_seq_vops (gsi, stmts);
1462 /* gsi now points at the assignment to the lhs, get a
1463 stmt iterator to the memcpy call.
1464 ??? We can't use gsi_for_stmt as that doesn't work when the
1465 CFG isn't built yet. */
1466 gimple_stmt_iterator gsi2 = *gsi;
1467 gsi_prev (&gsi2);
1468 fold_stmt (&gsi2);
1469 }
1470 else
1471 {
1472 gsi_replace_with_seq_vops (gsi, stmts);
1473 fold_stmt (gsi);
1474 }
1475 return true;
1476}
1477
07f1cf56
RB
1478/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1479 are the arguments to the call. */
1480
1481static bool
1482gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1483{
355fe088 1484 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1485 tree dest = gimple_call_arg (stmt, 0);
1486 tree src = gimple_call_arg (stmt, 1);
1487 tree size = gimple_call_arg (stmt, 2);
1488 tree fn;
1489 const char *p;
1490
1491
1492 p = c_getstr (src);
1493 /* If the SRC parameter is "", return DEST. */
1494 if (p && *p == '\0')
1495 {
1496 replace_call_with_value (gsi, dest);
1497 return true;
1498 }
1499
1500 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1501 return false;
1502
1503 /* If __builtin_strcat_chk is used, assume strcat is available. */
1504 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1505 if (!fn)
1506 return false;
1507
355fe088 1508 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1509 replace_call_with_call_and_fold (gsi, repl);
1510 return true;
1511}
1512
ad03a744
RB
1513/* Simplify a call to the strncat builtin. */
1514
1515static bool
1516gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1517{
1518 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1519 tree dst = gimple_call_arg (stmt, 0);
1520 tree src = gimple_call_arg (stmt, 1);
1521 tree len = gimple_call_arg (stmt, 2);
1522
1523 const char *p = c_getstr (src);
1524
1525 /* If the requested length is zero, or the src parameter string
1526 length is zero, return the dst parameter. */
1527 if (integer_zerop (len) || (p && *p == '\0'))
1528 {
1529 replace_call_with_value (gsi, dst);
1530 return true;
1531 }
1532
1533 /* If the requested len is greater than or equal to the string
1534 length, call strcat. */
1535 if (TREE_CODE (len) == INTEGER_CST && p
1536 && compare_tree_int (len, strlen (p)) >= 0)
1537 {
1538 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1539
1540 /* If the replacement _DECL isn't initialized, don't do the
1541 transformation. */
1542 if (!fn)
1543 return false;
1544
1545 gcall *repl = gimple_build_call (fn, 2, dst, src);
1546 replace_call_with_call_and_fold (gsi, repl);
1547 return true;
1548 }
1549
1550 return false;
1551}
1552
745583f9
RB
1553/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1554 LEN, and SIZE. */
1555
1556static bool
1557gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1558{
355fe088 1559 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1560 tree dest = gimple_call_arg (stmt, 0);
1561 tree src = gimple_call_arg (stmt, 1);
1562 tree len = gimple_call_arg (stmt, 2);
1563 tree size = gimple_call_arg (stmt, 3);
1564 tree fn;
1565 const char *p;
1566
1567 p = c_getstr (src);
1568 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1569 if ((p && *p == '\0')
1570 || integer_zerop (len))
1571 {
1572 replace_call_with_value (gsi, dest);
1573 return true;
1574 }
1575
1576 if (! tree_fits_uhwi_p (size))
1577 return false;
1578
1579 if (! integer_all_onesp (size))
1580 {
1581 tree src_len = c_strlen (src, 1);
1582 if (src_len
1583 && tree_fits_uhwi_p (src_len)
1584 && tree_fits_uhwi_p (len)
1585 && ! tree_int_cst_lt (len, src_len))
1586 {
1587 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1588 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1589 if (!fn)
1590 return false;
1591
355fe088 1592 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1593 replace_call_with_call_and_fold (gsi, repl);
1594 return true;
1595 }
1596 return false;
1597 }
1598
1599 /* If __builtin_strncat_chk is used, assume strncat is available. */
1600 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1601 if (!fn)
1602 return false;
1603
355fe088 1604 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1605 replace_call_with_call_and_fold (gsi, repl);
1606 return true;
1607}
1608
fef5a0d9
RB
1609/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1610 to the call. IGNORE is true if the value returned
1611 by the builtin will be ignored. UNLOCKED is true is true if this
1612 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1613 the known length of the string. Return NULL_TREE if no simplification
1614 was possible. */
1615
1616static bool
1617gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1618 tree arg0, tree arg1,
dcb7fae2 1619 bool unlocked)
fef5a0d9 1620{
355fe088 1621 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1622
fef5a0d9
RB
1623 /* If we're using an unlocked function, assume the other unlocked
1624 functions exist explicitly. */
1625 tree const fn_fputc = (unlocked
1626 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1627 : builtin_decl_implicit (BUILT_IN_FPUTC));
1628 tree const fn_fwrite = (unlocked
1629 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1630 : builtin_decl_implicit (BUILT_IN_FWRITE));
1631
1632 /* If the return value is used, don't do the transformation. */
dcb7fae2 1633 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1634 return false;
1635
fef5a0d9
RB
1636 /* Get the length of the string passed to fputs. If the length
1637 can't be determined, punt. */
dcb7fae2 1638 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1639 if (!len
1640 || TREE_CODE (len) != INTEGER_CST)
1641 return false;
1642
1643 switch (compare_tree_int (len, 1))
1644 {
1645 case -1: /* length is 0, delete the call entirely . */
1646 replace_call_with_value (gsi, integer_zero_node);
1647 return true;
1648
1649 case 0: /* length is 1, call fputc. */
1650 {
1651 const char *p = c_getstr (arg0);
1652 if (p != NULL)
1653 {
1654 if (!fn_fputc)
1655 return false;
1656
355fe088 1657 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
1658 build_int_cst
1659 (integer_type_node, p[0]), arg1);
1660 replace_call_with_call_and_fold (gsi, repl);
1661 return true;
1662 }
1663 }
1664 /* FALLTHROUGH */
1665 case 1: /* length is greater than 1, call fwrite. */
1666 {
1667 /* If optimizing for size keep fputs. */
1668 if (optimize_function_for_size_p (cfun))
1669 return false;
1670 /* New argument list transforming fputs(string, stream) to
1671 fwrite(string, 1, len, stream). */
1672 if (!fn_fwrite)
1673 return false;
1674
355fe088 1675 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
1676 size_one_node, len, arg1);
1677 replace_call_with_call_and_fold (gsi, repl);
1678 return true;
1679 }
1680 default:
1681 gcc_unreachable ();
1682 }
1683 return false;
1684}
1685
1686/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1687 DEST, SRC, LEN, and SIZE are the arguments to the call.
1688 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1689 code of the builtin. If MAXLEN is not NULL, it is maximum length
1690 passed as third argument. */
1691
1692static bool
1693gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1694 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1695 enum built_in_function fcode)
1696{
355fe088 1697 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1698 location_t loc = gimple_location (stmt);
1699 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1700 tree fn;
1701
1702 /* If SRC and DEST are the same (and not volatile), return DEST
1703 (resp. DEST+LEN for __mempcpy_chk). */
1704 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1705 {
1706 if (fcode != BUILT_IN_MEMPCPY_CHK)
1707 {
1708 replace_call_with_value (gsi, dest);
1709 return true;
1710 }
1711 else
1712 {
74e3c262
RB
1713 gimple_seq stmts = NULL;
1714 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
1715 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1716 TREE_TYPE (dest), dest, len);
74e3c262 1717 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
1718 replace_call_with_value (gsi, temp);
1719 return true;
1720 }
1721 }
1722
1723 if (! tree_fits_uhwi_p (size))
1724 return false;
1725
dcb7fae2 1726 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1727 if (! integer_all_onesp (size))
1728 {
1729 if (! tree_fits_uhwi_p (len))
1730 {
1731 /* If LEN is not constant, try MAXLEN too.
1732 For MAXLEN only allow optimizing into non-_ocs function
1733 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1734 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1735 {
1736 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1737 {
1738 /* (void) __mempcpy_chk () can be optimized into
1739 (void) __memcpy_chk (). */
1740 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1741 if (!fn)
1742 return false;
1743
355fe088 1744 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1745 replace_call_with_call_and_fold (gsi, repl);
1746 return true;
1747 }
1748 return false;
1749 }
1750 }
1751 else
1752 maxlen = len;
1753
1754 if (tree_int_cst_lt (size, maxlen))
1755 return false;
1756 }
1757
1758 fn = NULL_TREE;
1759 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1760 mem{cpy,pcpy,move,set} is available. */
1761 switch (fcode)
1762 {
1763 case BUILT_IN_MEMCPY_CHK:
1764 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1765 break;
1766 case BUILT_IN_MEMPCPY_CHK:
1767 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1768 break;
1769 case BUILT_IN_MEMMOVE_CHK:
1770 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1771 break;
1772 case BUILT_IN_MEMSET_CHK:
1773 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1774 break;
1775 default:
1776 break;
1777 }
1778
1779 if (!fn)
1780 return false;
1781
355fe088 1782 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1783 replace_call_with_call_and_fold (gsi, repl);
1784 return true;
1785}
1786
1787/* Fold a call to the __st[rp]cpy_chk builtin.
1788 DEST, SRC, and SIZE are the arguments to the call.
1789 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1790 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1791 strings passed as second argument. */
1792
1793static bool
1794gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1795 tree dest,
fef5a0d9 1796 tree src, tree size,
fef5a0d9
RB
1797 enum built_in_function fcode)
1798{
355fe088 1799 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1800 location_t loc = gimple_location (stmt);
1801 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1802 tree len, fn;
1803
1804 /* If SRC and DEST are the same (and not volatile), return DEST. */
1805 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1806 {
1807 replace_call_with_value (gsi, dest);
1808 return true;
1809 }
1810
1811 if (! tree_fits_uhwi_p (size))
1812 return false;
1813
dcb7fae2 1814 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1815 if (! integer_all_onesp (size))
1816 {
1817 len = c_strlen (src, 1);
1818 if (! len || ! tree_fits_uhwi_p (len))
1819 {
1820 /* If LEN is not constant, try MAXLEN too.
1821 For MAXLEN only allow optimizing into non-_ocs function
1822 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1823 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1824 {
1825 if (fcode == BUILT_IN_STPCPY_CHK)
1826 {
1827 if (! ignore)
1828 return false;
1829
1830 /* If return value of __stpcpy_chk is ignored,
1831 optimize into __strcpy_chk. */
1832 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1833 if (!fn)
1834 return false;
1835
355fe088 1836 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
1837 replace_call_with_call_and_fold (gsi, repl);
1838 return true;
1839 }
1840
1841 if (! len || TREE_SIDE_EFFECTS (len))
1842 return false;
1843
1844 /* If c_strlen returned something, but not a constant,
1845 transform __strcpy_chk into __memcpy_chk. */
1846 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1847 if (!fn)
1848 return false;
1849
74e3c262
RB
1850 gimple_seq stmts = NULL;
1851 len = gimple_convert (&stmts, loc, size_type_node, len);
1852 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
1853 build_int_cst (size_type_node, 1));
1854 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1855 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1856 replace_call_with_call_and_fold (gsi, repl);
1857 return true;
1858 }
e256dfce 1859 }
fef5a0d9
RB
1860 else
1861 maxlen = len;
1862
1863 if (! tree_int_cst_lt (maxlen, size))
1864 return false;
e256dfce
RG
1865 }
1866
fef5a0d9
RB
1867 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1868 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1869 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1870 if (!fn)
1871 return false;
1872
355fe088 1873 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
1874 replace_call_with_call_and_fold (gsi, repl);
1875 return true;
1876}
1877
1878/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1879 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1880 length passed as third argument. IGNORE is true if return value can be
1881 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1882
1883static bool
1884gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1885 tree dest, tree src,
dcb7fae2 1886 tree len, tree size,
fef5a0d9
RB
1887 enum built_in_function fcode)
1888{
355fe088 1889 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1890 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1891 tree fn;
1892
1893 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1894 {
fef5a0d9
RB
1895 /* If return value of __stpncpy_chk is ignored,
1896 optimize into __strncpy_chk. */
1897 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1898 if (fn)
1899 {
355fe088 1900 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1901 replace_call_with_call_and_fold (gsi, repl);
1902 return true;
1903 }
cbdd87d4
RG
1904 }
1905
fef5a0d9
RB
1906 if (! tree_fits_uhwi_p (size))
1907 return false;
1908
dcb7fae2 1909 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1910 if (! integer_all_onesp (size))
cbdd87d4 1911 {
fef5a0d9 1912 if (! tree_fits_uhwi_p (len))
fe2ef088 1913 {
fef5a0d9
RB
1914 /* If LEN is not constant, try MAXLEN too.
1915 For MAXLEN only allow optimizing into non-_ocs function
1916 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1917 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1918 return false;
8a1561bc 1919 }
fef5a0d9
RB
1920 else
1921 maxlen = len;
1922
1923 if (tree_int_cst_lt (size, maxlen))
1924 return false;
cbdd87d4
RG
1925 }
1926
fef5a0d9
RB
1927 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1928 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1929 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1930 if (!fn)
1931 return false;
1932
355fe088 1933 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1934 replace_call_with_call_and_fold (gsi, repl);
1935 return true;
cbdd87d4
RG
1936}
1937
2625bb5d
RB
1938/* Fold function call to builtin stpcpy with arguments DEST and SRC.
1939 Return NULL_TREE if no simplification can be made. */
1940
1941static bool
1942gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1943{
1944 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1945 location_t loc = gimple_location (stmt);
1946 tree dest = gimple_call_arg (stmt, 0);
1947 tree src = gimple_call_arg (stmt, 1);
1948 tree fn, len, lenp1;
1949
1950 /* If the result is unused, replace stpcpy with strcpy. */
1951 if (gimple_call_lhs (stmt) == NULL_TREE)
1952 {
1953 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1954 if (!fn)
1955 return false;
1956 gimple_call_set_fndecl (stmt, fn);
1957 fold_stmt (gsi);
1958 return true;
1959 }
1960
1961 len = c_strlen (src, 1);
1962 if (!len
1963 || TREE_CODE (len) != INTEGER_CST)
1964 return false;
1965
1966 if (optimize_function_for_size_p (cfun)
1967 /* If length is zero it's small enough. */
1968 && !integer_zerop (len))
1969 return false;
1970
1971 /* If the source has a known length replace stpcpy with memcpy. */
1972 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1973 if (!fn)
1974 return false;
1975
1976 gimple_seq stmts = NULL;
1977 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1978 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1979 tem, build_int_cst (size_type_node, 1));
1980 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1981 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1982 gimple_set_vuse (repl, gimple_vuse (stmt));
1983 gimple_set_vdef (repl, gimple_vdef (stmt));
1984 if (gimple_vdef (repl)
1985 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1986 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1987 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1988 /* Replace the result with dest + len. */
1989 stmts = NULL;
1990 tem = gimple_convert (&stmts, loc, sizetype, len);
1991 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1992 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1993 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 1994 gsi_replace (gsi, ret, false);
2625bb5d
RB
1995 /* Finally fold the memcpy call. */
1996 gimple_stmt_iterator gsi2 = *gsi;
1997 gsi_prev (&gsi2);
1998 fold_stmt (&gsi2);
1999 return true;
2000}
2001
fef5a0d9
RB
2002/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2003 NULL_TREE if a normal call should be emitted rather than expanding
2004 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2005 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2006 passed as second argument. */
cbdd87d4
RG
2007
2008static bool
fef5a0d9 2009gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2010 enum built_in_function fcode)
cbdd87d4 2011{
538dd0b7 2012 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2013 tree dest, size, len, fn, fmt, flag;
2014 const char *fmt_str;
cbdd87d4 2015
fef5a0d9
RB
2016 /* Verify the required arguments in the original call. */
2017 if (gimple_call_num_args (stmt) < 5)
2018 return false;
cbdd87d4 2019
fef5a0d9
RB
2020 dest = gimple_call_arg (stmt, 0);
2021 len = gimple_call_arg (stmt, 1);
2022 flag = gimple_call_arg (stmt, 2);
2023 size = gimple_call_arg (stmt, 3);
2024 fmt = gimple_call_arg (stmt, 4);
2025
2026 if (! tree_fits_uhwi_p (size))
2027 return false;
2028
2029 if (! integer_all_onesp (size))
2030 {
dcb7fae2 2031 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2032 if (! tree_fits_uhwi_p (len))
cbdd87d4 2033 {
fef5a0d9
RB
2034 /* If LEN is not constant, try MAXLEN too.
2035 For MAXLEN only allow optimizing into non-_ocs function
2036 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2037 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2038 return false;
2039 }
2040 else
fef5a0d9 2041 maxlen = len;
cbdd87d4 2042
fef5a0d9
RB
2043 if (tree_int_cst_lt (size, maxlen))
2044 return false;
2045 }
cbdd87d4 2046
fef5a0d9
RB
2047 if (!init_target_chars ())
2048 return false;
cbdd87d4 2049
fef5a0d9
RB
2050 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2051 or if format doesn't contain % chars or is "%s". */
2052 if (! integer_zerop (flag))
2053 {
2054 fmt_str = c_getstr (fmt);
2055 if (fmt_str == NULL)
2056 return false;
2057 if (strchr (fmt_str, target_percent) != NULL
2058 && strcmp (fmt_str, target_percent_s))
2059 return false;
cbdd87d4
RG
2060 }
2061
fef5a0d9
RB
2062 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2063 available. */
2064 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2065 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2066 if (!fn)
491e0b9b
RG
2067 return false;
2068
fef5a0d9
RB
2069 /* Replace the called function and the first 5 argument by 3 retaining
2070 trailing varargs. */
2071 gimple_call_set_fndecl (stmt, fn);
2072 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2073 gimple_call_set_arg (stmt, 0, dest);
2074 gimple_call_set_arg (stmt, 1, len);
2075 gimple_call_set_arg (stmt, 2, fmt);
2076 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2077 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2078 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2079 fold_stmt (gsi);
2080 return true;
2081}
cbdd87d4 2082
fef5a0d9
RB
2083/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2084 Return NULL_TREE if a normal call should be emitted rather than
2085 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2086 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2087
fef5a0d9
RB
2088static bool
2089gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2090 enum built_in_function fcode)
2091{
538dd0b7 2092 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2093 tree dest, size, len, fn, fmt, flag;
2094 const char *fmt_str;
2095 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2096
fef5a0d9
RB
2097 /* Verify the required arguments in the original call. */
2098 if (nargs < 4)
2099 return false;
2100 dest = gimple_call_arg (stmt, 0);
2101 flag = gimple_call_arg (stmt, 1);
2102 size = gimple_call_arg (stmt, 2);
2103 fmt = gimple_call_arg (stmt, 3);
2104
2105 if (! tree_fits_uhwi_p (size))
2106 return false;
2107
2108 len = NULL_TREE;
2109
2110 if (!init_target_chars ())
2111 return false;
2112
2113 /* Check whether the format is a literal string constant. */
2114 fmt_str = c_getstr (fmt);
2115 if (fmt_str != NULL)
2116 {
2117 /* If the format doesn't contain % args or %%, we know the size. */
2118 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2119 {
fef5a0d9
RB
2120 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2121 len = build_int_cstu (size_type_node, strlen (fmt_str));
2122 }
2123 /* If the format is "%s" and first ... argument is a string literal,
2124 we know the size too. */
2125 else if (fcode == BUILT_IN_SPRINTF_CHK
2126 && strcmp (fmt_str, target_percent_s) == 0)
2127 {
2128 tree arg;
cbdd87d4 2129
fef5a0d9
RB
2130 if (nargs == 5)
2131 {
2132 arg = gimple_call_arg (stmt, 4);
2133 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2134 {
2135 len = c_strlen (arg, 1);
2136 if (! len || ! tree_fits_uhwi_p (len))
2137 len = NULL_TREE;
2138 }
2139 }
2140 }
2141 }
cbdd87d4 2142
fef5a0d9
RB
2143 if (! integer_all_onesp (size))
2144 {
2145 if (! len || ! tree_int_cst_lt (len, size))
2146 return false;
2147 }
cbdd87d4 2148
fef5a0d9
RB
2149 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2150 or if format doesn't contain % chars or is "%s". */
2151 if (! integer_zerop (flag))
2152 {
2153 if (fmt_str == NULL)
2154 return false;
2155 if (strchr (fmt_str, target_percent) != NULL
2156 && strcmp (fmt_str, target_percent_s))
2157 return false;
2158 }
cbdd87d4 2159
fef5a0d9
RB
2160 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2161 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2162 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2163 if (!fn)
2164 return false;
2165
2166 /* Replace the called function and the first 4 argument by 2 retaining
2167 trailing varargs. */
2168 gimple_call_set_fndecl (stmt, fn);
2169 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2170 gimple_call_set_arg (stmt, 0, dest);
2171 gimple_call_set_arg (stmt, 1, fmt);
2172 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2173 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2174 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2175 fold_stmt (gsi);
2176 return true;
2177}
2178
35770bb2
RB
2179/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2180 ORIG may be null if this is a 2-argument call. We don't attempt to
2181 simplify calls with more than 3 arguments.
2182
2183 Return NULL_TREE if no simplification was possible, otherwise return the
2184 simplified form of the call as a tree. If IGNORED is true, it means that
2185 the caller does not use the returned value of the function. */
2186
2187static bool
dcb7fae2 2188gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2189{
355fe088 2190 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2191 tree dest = gimple_call_arg (stmt, 0);
2192 tree fmt = gimple_call_arg (stmt, 1);
2193 tree orig = NULL_TREE;
2194 const char *fmt_str = NULL;
2195
2196 /* Verify the required arguments in the original call. We deal with two
2197 types of sprintf() calls: 'sprintf (str, fmt)' and
2198 'sprintf (dest, "%s", orig)'. */
2199 if (gimple_call_num_args (stmt) > 3)
2200 return false;
2201
2202 if (gimple_call_num_args (stmt) == 3)
2203 orig = gimple_call_arg (stmt, 2);
2204
2205 /* Check whether the format is a literal string constant. */
2206 fmt_str = c_getstr (fmt);
2207 if (fmt_str == NULL)
2208 return false;
2209
2210 if (!init_target_chars ())
2211 return false;
2212
2213 /* If the format doesn't contain % args or %%, use strcpy. */
2214 if (strchr (fmt_str, target_percent) == NULL)
2215 {
2216 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2217
2218 if (!fn)
2219 return false;
2220
2221 /* Don't optimize sprintf (buf, "abc", ptr++). */
2222 if (orig)
2223 return false;
2224
2225 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2226 'format' is known to contain no % formats. */
2227 gimple_seq stmts = NULL;
355fe088 2228 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2229 gimple_seq_add_stmt_without_update (&stmts, repl);
2230 if (gimple_call_lhs (stmt))
2231 {
2232 repl = gimple_build_assign (gimple_call_lhs (stmt),
2233 build_int_cst (integer_type_node,
2234 strlen (fmt_str)));
2235 gimple_seq_add_stmt_without_update (&stmts, repl);
2236 gsi_replace_with_seq_vops (gsi, stmts);
2237 /* gsi now points at the assignment to the lhs, get a
2238 stmt iterator to the memcpy call.
2239 ??? We can't use gsi_for_stmt as that doesn't work when the
2240 CFG isn't built yet. */
2241 gimple_stmt_iterator gsi2 = *gsi;
2242 gsi_prev (&gsi2);
2243 fold_stmt (&gsi2);
2244 }
2245 else
2246 {
2247 gsi_replace_with_seq_vops (gsi, stmts);
2248 fold_stmt (gsi);
2249 }
2250 return true;
2251 }
2252
2253 /* If the format is "%s", use strcpy if the result isn't used. */
2254 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2255 {
2256 tree fn;
2257 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2258
2259 if (!fn)
2260 return false;
2261
2262 /* Don't crash on sprintf (str1, "%s"). */
2263 if (!orig)
2264 return false;
2265
dcb7fae2
RB
2266 tree orig_len = NULL_TREE;
2267 if (gimple_call_lhs (stmt))
35770bb2 2268 {
dcb7fae2 2269 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2270 if (!orig_len)
35770bb2
RB
2271 return false;
2272 }
2273
2274 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2275 gimple_seq stmts = NULL;
355fe088 2276 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2277 gimple_seq_add_stmt_without_update (&stmts, repl);
2278 if (gimple_call_lhs (stmt))
2279 {
d7e78447
RB
2280 if (!useless_type_conversion_p (integer_type_node,
2281 TREE_TYPE (orig_len)))
2282 orig_len = fold_convert (integer_type_node, orig_len);
2283 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2284 gimple_seq_add_stmt_without_update (&stmts, repl);
2285 gsi_replace_with_seq_vops (gsi, stmts);
2286 /* gsi now points at the assignment to the lhs, get a
2287 stmt iterator to the memcpy call.
2288 ??? We can't use gsi_for_stmt as that doesn't work when the
2289 CFG isn't built yet. */
2290 gimple_stmt_iterator gsi2 = *gsi;
2291 gsi_prev (&gsi2);
2292 fold_stmt (&gsi2);
2293 }
2294 else
2295 {
2296 gsi_replace_with_seq_vops (gsi, stmts);
2297 fold_stmt (gsi);
2298 }
2299 return true;
2300 }
2301 return false;
2302}
2303
d7e78447
RB
2304/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2305 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2306 attempt to simplify calls with more than 4 arguments.
35770bb2 2307
d7e78447
RB
2308 Return NULL_TREE if no simplification was possible, otherwise return the
2309 simplified form of the call as a tree. If IGNORED is true, it means that
2310 the caller does not use the returned value of the function. */
2311
2312static bool
dcb7fae2 2313gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2314{
538dd0b7 2315 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2316 tree dest = gimple_call_arg (stmt, 0);
2317 tree destsize = gimple_call_arg (stmt, 1);
2318 tree fmt = gimple_call_arg (stmt, 2);
2319 tree orig = NULL_TREE;
2320 const char *fmt_str = NULL;
2321
2322 if (gimple_call_num_args (stmt) > 4)
2323 return false;
2324
2325 if (gimple_call_num_args (stmt) == 4)
2326 orig = gimple_call_arg (stmt, 3);
2327
2328 if (!tree_fits_uhwi_p (destsize))
2329 return false;
2330 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2331
2332 /* Check whether the format is a literal string constant. */
2333 fmt_str = c_getstr (fmt);
2334 if (fmt_str == NULL)
2335 return false;
2336
2337 if (!init_target_chars ())
2338 return false;
2339
2340 /* If the format doesn't contain % args or %%, use strcpy. */
2341 if (strchr (fmt_str, target_percent) == NULL)
2342 {
2343 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2344 if (!fn)
2345 return false;
2346
2347 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2348 if (orig)
2349 return false;
2350
2351 /* We could expand this as
2352 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2353 or to
2354 memcpy (str, fmt_with_nul_at_cstm1, cst);
2355 but in the former case that might increase code size
2356 and in the latter case grow .rodata section too much.
2357 So punt for now. */
2358 size_t len = strlen (fmt_str);
2359 if (len >= destlen)
2360 return false;
2361
2362 gimple_seq stmts = NULL;
355fe088 2363 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2364 gimple_seq_add_stmt_without_update (&stmts, repl);
2365 if (gimple_call_lhs (stmt))
2366 {
2367 repl = gimple_build_assign (gimple_call_lhs (stmt),
2368 build_int_cst (integer_type_node, len));
2369 gimple_seq_add_stmt_without_update (&stmts, repl);
2370 gsi_replace_with_seq_vops (gsi, stmts);
2371 /* gsi now points at the assignment to the lhs, get a
2372 stmt iterator to the memcpy call.
2373 ??? We can't use gsi_for_stmt as that doesn't work when the
2374 CFG isn't built yet. */
2375 gimple_stmt_iterator gsi2 = *gsi;
2376 gsi_prev (&gsi2);
2377 fold_stmt (&gsi2);
2378 }
2379 else
2380 {
2381 gsi_replace_with_seq_vops (gsi, stmts);
2382 fold_stmt (gsi);
2383 }
2384 return true;
2385 }
2386
2387 /* If the format is "%s", use strcpy if the result isn't used. */
2388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2389 {
2390 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2391 if (!fn)
2392 return false;
2393
2394 /* Don't crash on snprintf (str1, cst, "%s"). */
2395 if (!orig)
2396 return false;
2397
dcb7fae2 2398 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2399 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2400 return false;
d7e78447
RB
2401
2402 /* We could expand this as
2403 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2404 or to
2405 memcpy (str1, str2_with_nul_at_cstm1, cst);
2406 but in the former case that might increase code size
2407 and in the latter case grow .rodata section too much.
2408 So punt for now. */
2409 if (compare_tree_int (orig_len, destlen) >= 0)
2410 return false;
2411
2412 /* Convert snprintf (str1, cst, "%s", str2) into
2413 strcpy (str1, str2) if strlen (str2) < cst. */
2414 gimple_seq stmts = NULL;
355fe088 2415 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
2416 gimple_seq_add_stmt_without_update (&stmts, repl);
2417 if (gimple_call_lhs (stmt))
2418 {
2419 if (!useless_type_conversion_p (integer_type_node,
2420 TREE_TYPE (orig_len)))
2421 orig_len = fold_convert (integer_type_node, orig_len);
2422 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2423 gimple_seq_add_stmt_without_update (&stmts, repl);
2424 gsi_replace_with_seq_vops (gsi, stmts);
2425 /* gsi now points at the assignment to the lhs, get a
2426 stmt iterator to the memcpy call.
2427 ??? We can't use gsi_for_stmt as that doesn't work when the
2428 CFG isn't built yet. */
2429 gimple_stmt_iterator gsi2 = *gsi;
2430 gsi_prev (&gsi2);
2431 fold_stmt (&gsi2);
2432 }
2433 else
2434 {
2435 gsi_replace_with_seq_vops (gsi, stmts);
2436 fold_stmt (gsi);
2437 }
2438 return true;
2439 }
2440 return false;
2441}
35770bb2 2442
edd7ae68
RB
2443/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2444 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2445 more than 3 arguments, and ARG may be null in the 2-argument case.
2446
2447 Return NULL_TREE if no simplification was possible, otherwise return the
2448 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2449 code of the function to be simplified. */
2450
2451static bool
2452gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2453 tree fp, tree fmt, tree arg,
2454 enum built_in_function fcode)
2455{
2456 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2457 tree fn_fputc, fn_fputs;
2458 const char *fmt_str = NULL;
2459
2460 /* If the return value is used, don't do the transformation. */
2461 if (gimple_call_lhs (stmt) != NULL_TREE)
2462 return false;
2463
2464 /* Check whether the format is a literal string constant. */
2465 fmt_str = c_getstr (fmt);
2466 if (fmt_str == NULL)
2467 return false;
2468
2469 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2470 {
2471 /* If we're using an unlocked function, assume the other
2472 unlocked functions exist explicitly. */
2473 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2474 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2475 }
2476 else
2477 {
2478 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2479 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2480 }
2481
2482 if (!init_target_chars ())
2483 return false;
2484
2485 /* If the format doesn't contain % args or %%, use strcpy. */
2486 if (strchr (fmt_str, target_percent) == NULL)
2487 {
2488 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2489 && arg)
2490 return false;
2491
2492 /* If the format specifier was "", fprintf does nothing. */
2493 if (fmt_str[0] == '\0')
2494 {
2495 replace_call_with_value (gsi, NULL_TREE);
2496 return true;
2497 }
2498
2499 /* When "string" doesn't contain %, replace all cases of
2500 fprintf (fp, string) with fputs (string, fp). The fputs
2501 builtin will take care of special cases like length == 1. */
2502 if (fn_fputs)
2503 {
2504 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2505 replace_call_with_call_and_fold (gsi, repl);
2506 return true;
2507 }
2508 }
2509
2510 /* The other optimizations can be done only on the non-va_list variants. */
2511 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2512 return false;
2513
2514 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2515 else if (strcmp (fmt_str, target_percent_s) == 0)
2516 {
2517 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2518 return false;
2519 if (fn_fputs)
2520 {
2521 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2522 replace_call_with_call_and_fold (gsi, repl);
2523 return true;
2524 }
2525 }
2526
2527 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2528 else if (strcmp (fmt_str, target_percent_c) == 0)
2529 {
2530 if (!arg
2531 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2532 return false;
2533 if (fn_fputc)
2534 {
2535 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2536 replace_call_with_call_and_fold (gsi, repl);
2537 return true;
2538 }
2539 }
2540
2541 return false;
2542}
2543
ad03a744
RB
2544/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2545 FMT and ARG are the arguments to the call; we don't fold cases with
2546 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2547
2548 Return NULL_TREE if no simplification was possible, otherwise return the
2549 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2550 code of the function to be simplified. */
2551
2552static bool
2553gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2554 tree arg, enum built_in_function fcode)
2555{
2556 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2557 tree fn_putchar, fn_puts, newarg;
2558 const char *fmt_str = NULL;
2559
2560 /* If the return value is used, don't do the transformation. */
2561 if (gimple_call_lhs (stmt) != NULL_TREE)
2562 return false;
2563
2564 /* Check whether the format is a literal string constant. */
2565 fmt_str = c_getstr (fmt);
2566 if (fmt_str == NULL)
2567 return false;
2568
2569 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2570 {
2571 /* If we're using an unlocked function, assume the other
2572 unlocked functions exist explicitly. */
2573 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2574 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2575 }
2576 else
2577 {
2578 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2579 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2580 }
2581
2582 if (!init_target_chars ())
2583 return false;
2584
2585 if (strcmp (fmt_str, target_percent_s) == 0
2586 || strchr (fmt_str, target_percent) == NULL)
2587 {
2588 const char *str;
2589
2590 if (strcmp (fmt_str, target_percent_s) == 0)
2591 {
2592 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2593 return false;
2594
2595 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2596 return false;
2597
2598 str = c_getstr (arg);
2599 if (str == NULL)
2600 return false;
2601 }
2602 else
2603 {
2604 /* The format specifier doesn't contain any '%' characters. */
2605 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2606 && arg)
2607 return false;
2608 str = fmt_str;
2609 }
2610
2611 /* If the string was "", printf does nothing. */
2612 if (str[0] == '\0')
2613 {
2614 replace_call_with_value (gsi, NULL_TREE);
2615 return true;
2616 }
2617
2618 /* If the string has length of 1, call putchar. */
2619 if (str[1] == '\0')
2620 {
2621 /* Given printf("c"), (where c is any one character,)
2622 convert "c"[0] to an int and pass that to the replacement
2623 function. */
2624 newarg = build_int_cst (integer_type_node, str[0]);
2625 if (fn_putchar)
2626 {
2627 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2628 replace_call_with_call_and_fold (gsi, repl);
2629 return true;
2630 }
2631 }
2632 else
2633 {
2634 /* If the string was "string\n", call puts("string"). */
2635 size_t len = strlen (str);
2636 if ((unsigned char)str[len - 1] == target_newline
2637 && (size_t) (int) len == len
2638 && (int) len > 0)
2639 {
2640 char *newstr;
2641 tree offset_node, string_cst;
2642
2643 /* Create a NUL-terminated string that's one char shorter
2644 than the original, stripping off the trailing '\n'. */
2645 newarg = build_string_literal (len, str);
2646 string_cst = string_constant (newarg, &offset_node);
2647 gcc_checking_assert (string_cst
2648 && (TREE_STRING_LENGTH (string_cst)
2649 == (int) len)
2650 && integer_zerop (offset_node)
2651 && (unsigned char)
2652 TREE_STRING_POINTER (string_cst)[len - 1]
2653 == target_newline);
2654 /* build_string_literal creates a new STRING_CST,
2655 modify it in place to avoid double copying. */
2656 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2657 newstr[len - 1] = '\0';
2658 if (fn_puts)
2659 {
2660 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2661 replace_call_with_call_and_fold (gsi, repl);
2662 return true;
2663 }
2664 }
2665 else
2666 /* We'd like to arrange to call fputs(string,stdout) here,
2667 but we need stdout and don't have a way to get it yet. */
2668 return false;
2669 }
2670 }
2671
2672 /* The other optimizations can be done only on the non-va_list variants. */
2673 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2674 return false;
2675
2676 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2677 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2678 {
2679 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2680 return false;
2681 if (fn_puts)
2682 {
2683 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2684 replace_call_with_call_and_fold (gsi, repl);
2685 return true;
2686 }
2687 }
2688
2689 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2690 else if (strcmp (fmt_str, target_percent_c) == 0)
2691 {
2692 if (!arg || ! useless_type_conversion_p (integer_type_node,
2693 TREE_TYPE (arg)))
2694 return false;
2695 if (fn_putchar)
2696 {
2697 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2698 replace_call_with_call_and_fold (gsi, repl);
2699 return true;
2700 }
2701 }
2702
2703 return false;
2704}
2705
edd7ae68 2706
fef5a0d9
RB
2707
2708/* Fold a call to __builtin_strlen with known length LEN. */
2709
2710static bool
dcb7fae2 2711gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2712{
355fe088 2713 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2714 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2715 if (!len)
2716 return false;
2813904b 2717 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2718 replace_call_with_value (gsi, len);
2719 return true;
cbdd87d4
RG
2720}
2721
48126138
NS
2722/* Fold a call to __builtin_acc_on_device. */
2723
2724static bool
2725gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2726{
2727 /* Defer folding until we know which compiler we're in. */
2728 if (symtab->state != EXPANSION)
2729 return false;
2730
2731 unsigned val_host = GOMP_DEVICE_HOST;
2732 unsigned val_dev = GOMP_DEVICE_NONE;
2733
2734#ifdef ACCEL_COMPILER
2735 val_host = GOMP_DEVICE_NOT_HOST;
2736 val_dev = ACCEL_COMPILER_acc_device;
2737#endif
2738
2739 location_t loc = gimple_location (gsi_stmt (*gsi));
2740
2741 tree host_eq = make_ssa_name (boolean_type_node);
2742 gimple *host_ass = gimple_build_assign
2743 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2744 gimple_set_location (host_ass, loc);
2745 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2746
2747 tree dev_eq = make_ssa_name (boolean_type_node);
2748 gimple *dev_ass = gimple_build_assign
2749 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2750 gimple_set_location (dev_ass, loc);
2751 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2752
2753 tree result = make_ssa_name (boolean_type_node);
2754 gimple *result_ass = gimple_build_assign
2755 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2756 gimple_set_location (result_ass, loc);
2757 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2758
2759 replace_call_with_value (gsi, result);
2760
2761 return true;
2762}
cbdd87d4 2763
dcb7fae2
RB
2764/* Fold the non-target builtin at *GSI and return whether any simplification
2765 was made. */
cbdd87d4 2766
fef5a0d9 2767static bool
dcb7fae2 2768gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2769{
538dd0b7 2770 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2771 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2772
dcb7fae2
RB
2773 /* Give up for always_inline inline builtins until they are
2774 inlined. */
2775 if (avoid_folding_inline_builtin (callee))
2776 return false;
cbdd87d4 2777
edd7ae68
RB
2778 unsigned n = gimple_call_num_args (stmt);
2779 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2780 switch (fcode)
cbdd87d4 2781 {
dcb7fae2
RB
2782 case BUILT_IN_BZERO:
2783 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2784 gimple_call_arg (stmt, 1));
2785 case BUILT_IN_MEMSET:
2786 return gimple_fold_builtin_memset (gsi,
2787 gimple_call_arg (stmt, 1),
2788 gimple_call_arg (stmt, 2));
2789 case BUILT_IN_BCOPY:
2790 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2791 gimple_call_arg (stmt, 0), 3);
2792 case BUILT_IN_MEMCPY:
2793 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2794 gimple_call_arg (stmt, 1), 0);
2795 case BUILT_IN_MEMPCPY:
2796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2797 gimple_call_arg (stmt, 1), 1);
2798 case BUILT_IN_MEMMOVE:
2799 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2800 gimple_call_arg (stmt, 1), 3);
2801 case BUILT_IN_SPRINTF_CHK:
2802 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2803 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2804 case BUILT_IN_STRCAT_CHK:
2805 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2806 case BUILT_IN_STRNCAT_CHK:
2807 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2808 case BUILT_IN_STRLEN:
dcb7fae2 2809 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2810 case BUILT_IN_STRCPY:
dcb7fae2 2811 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2812 gimple_call_arg (stmt, 0),
dcb7fae2 2813 gimple_call_arg (stmt, 1));
cbdd87d4 2814 case BUILT_IN_STRNCPY:
dcb7fae2 2815 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2816 gimple_call_arg (stmt, 0),
2817 gimple_call_arg (stmt, 1),
dcb7fae2 2818 gimple_call_arg (stmt, 2));
9a7eefec 2819 case BUILT_IN_STRCAT:
dcb7fae2
RB
2820 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2821 gimple_call_arg (stmt, 1));
ad03a744
RB
2822 case BUILT_IN_STRNCAT:
2823 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2824 case BUILT_IN_FPUTS:
dcb7fae2
RB
2825 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1), false);
cbdd87d4 2827 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2828 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2829 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2830 case BUILT_IN_MEMCPY_CHK:
2831 case BUILT_IN_MEMPCPY_CHK:
2832 case BUILT_IN_MEMMOVE_CHK:
2833 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2834 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
2838 gimple_call_arg (stmt, 3),
edd7ae68 2839 fcode);
2625bb5d
RB
2840 case BUILT_IN_STPCPY:
2841 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2842 case BUILT_IN_STRCPY_CHK:
2843 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2844 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2845 gimple_call_arg (stmt, 0),
2846 gimple_call_arg (stmt, 1),
2847 gimple_call_arg (stmt, 2),
edd7ae68 2848 fcode);
cbdd87d4 2849 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2850 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2851 return gimple_fold_builtin_stxncpy_chk (gsi,
2852 gimple_call_arg (stmt, 0),
2853 gimple_call_arg (stmt, 1),
2854 gimple_call_arg (stmt, 2),
2855 gimple_call_arg (stmt, 3),
edd7ae68 2856 fcode);
cbdd87d4
RG
2857 case BUILT_IN_SNPRINTF_CHK:
2858 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2859 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2860 case BUILT_IN_SNPRINTF:
dcb7fae2 2861 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2862 case BUILT_IN_SPRINTF:
dcb7fae2 2863 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2864 case BUILT_IN_FPRINTF:
2865 case BUILT_IN_FPRINTF_UNLOCKED:
2866 case BUILT_IN_VFPRINTF:
2867 if (n == 2 || n == 3)
2868 return gimple_fold_builtin_fprintf (gsi,
2869 gimple_call_arg (stmt, 0),
2870 gimple_call_arg (stmt, 1),
2871 n == 3
2872 ? gimple_call_arg (stmt, 2)
2873 : NULL_TREE,
2874 fcode);
2875 break;
2876 case BUILT_IN_FPRINTF_CHK:
2877 case BUILT_IN_VFPRINTF_CHK:
2878 if (n == 3 || n == 4)
2879 return gimple_fold_builtin_fprintf (gsi,
2880 gimple_call_arg (stmt, 0),
2881 gimple_call_arg (stmt, 2),
2882 n == 4
2883 ? gimple_call_arg (stmt, 3)
2884 : NULL_TREE,
2885 fcode);
2886 break;
ad03a744
RB
2887 case BUILT_IN_PRINTF:
2888 case BUILT_IN_PRINTF_UNLOCKED:
2889 case BUILT_IN_VPRINTF:
2890 if (n == 1 || n == 2)
2891 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2892 n == 2
2893 ? gimple_call_arg (stmt, 1)
2894 : NULL_TREE, fcode);
2895 break;
2896 case BUILT_IN_PRINTF_CHK:
2897 case BUILT_IN_VPRINTF_CHK:
2898 if (n == 2 || n == 3)
2899 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2900 n == 3
2901 ? gimple_call_arg (stmt, 2)
2902 : NULL_TREE, fcode);
242a37f1 2903 break;
48126138
NS
2904 case BUILT_IN_ACC_ON_DEVICE:
2905 return gimple_fold_builtin_acc_on_device (gsi,
2906 gimple_call_arg (stmt, 0));
fef5a0d9
RB
2907 default:;
2908 }
2909
2910 /* Try the generic builtin folder. */
2911 bool ignore = (gimple_call_lhs (stmt) == NULL);
2912 tree result = fold_call_stmt (stmt, ignore);
2913 if (result)
2914 {
2915 if (ignore)
2916 STRIP_NOPS (result);
2917 else
2918 result = fold_convert (gimple_call_return_type (stmt), result);
2919 if (!update_call_from_tree (gsi, result))
2920 gimplify_and_update_call_from_tree (gsi, result);
2921 return true;
2922 }
2923
2924 return false;
2925}
2926
451e8dae
NS
2927/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2928 function calls to constants, where possible. */
2929
2930static tree
2931fold_internal_goacc_dim (const gimple *call)
2932{
2933 int axis = get_oacc_ifn_dim_arg (call);
2934 int size = get_oacc_fn_dim_size (current_function_decl, axis);
2935 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
2936 tree result = NULL_TREE;
2937
2938 /* If the size is 1, or we only want the size and it is not dynamic,
2939 we know the answer. */
2940 if (size == 1 || (!is_pos && size))
2941 {
2942 tree type = TREE_TYPE (gimple_call_lhs (call));
2943 result = build_int_cst (type, size - is_pos);
2944 }
2945
2946 return result;
2947}
2948
1304953e
JJ
2949/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2950 doesn't fit into TYPE. The test for overflow should be regardless of
2951 -fwrapv, and even for unsigned types. */
2952
2953bool
2954arith_overflowed_p (enum tree_code code, const_tree type,
2955 const_tree arg0, const_tree arg1)
2956{
2957 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2958 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2959 widest2_int_cst;
2960 widest2_int warg0 = widest2_int_cst (arg0);
2961 widest2_int warg1 = widest2_int_cst (arg1);
2962 widest2_int wres;
2963 switch (code)
2964 {
2965 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2966 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2967 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2968 default: gcc_unreachable ();
2969 }
2970 signop sign = TYPE_SIGN (type);
2971 if (sign == UNSIGNED && wi::neg_p (wres))
2972 return true;
2973 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2974}
2975
cbdd87d4
RG
2976/* Attempt to fold a call statement referenced by the statement iterator GSI.
2977 The statement may be replaced by another statement, e.g., if the call
2978 simplifies to a constant value. Return true if any changes were made.
2979 It is assumed that the operands have been previously folded. */
2980
e021c122 2981static bool
ceeffab0 2982gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 2983{
538dd0b7 2984 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 2985 tree callee;
e021c122
RG
2986 bool changed = false;
2987 unsigned i;
cbdd87d4 2988
e021c122
RG
2989 /* Fold *& in call arguments. */
2990 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2991 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2992 {
2993 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2994 if (tmp)
2995 {
2996 gimple_call_set_arg (stmt, i, tmp);
2997 changed = true;
2998 }
2999 }
3b45a007
RG
3000
3001 /* Check for virtual calls that became direct calls. */
3002 callee = gimple_call_fn (stmt);
25583c4f 3003 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3004 {
49c471e3
MJ
3005 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3006 {
450ad0cd
JH
3007 if (dump_file && virtual_method_call_p (callee)
3008 && !possible_polymorphic_call_target_p
6f8091fc
JH
3009 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3010 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3011 {
3012 fprintf (dump_file,
a70e9985 3013 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3014 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3015 fprintf (dump_file, " to ");
3016 print_generic_expr (dump_file, callee, TDF_SLIM);
3017 fprintf (dump_file, "\n");
3018 }
3019
49c471e3 3020 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3021 changed = true;
3022 }
a70e9985 3023 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3024 {
61dd6a2e
JH
3025 bool final;
3026 vec <cgraph_node *>targets
058d0a90 3027 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3028 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3029 {
a70e9985 3030 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3031 if (dump_enabled_p ())
3032 {
807b7d62 3033 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3034 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3035 "folding virtual function call to %s\n",
3036 targets.length () == 1
3037 ? targets[0]->name ()
3038 : "__builtin_unreachable");
3039 }
61dd6a2e 3040 if (targets.length () == 1)
cf3e5a89
JJ
3041 {
3042 gimple_call_set_fndecl (stmt, targets[0]->decl);
3043 changed = true;
a70e9985
JJ
3044 /* If the call becomes noreturn, remove the lhs. */
3045 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3046 {
3047 if (TREE_CODE (lhs) == SSA_NAME)
3048 {
b731b390 3049 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3050 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3051 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3052 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3053 }
3054 gimple_call_set_lhs (stmt, NULL_TREE);
3055 }
0b986c6a 3056 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3057 }
a70e9985 3058 else
cf3e5a89
JJ
3059 {
3060 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3061 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3062 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3063 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3064 {
b731b390 3065 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3066 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3067
3068 /* To satisfy condition for
3069 cgraph_update_edges_for_call_stmt_node,
3070 we need to preserve GIMPLE_CALL statement
3071 at position of GSI iterator. */
a70e9985 3072 update_call_from_tree (gsi, def);
eb14a79f 3073 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3074 }
3075 else
42e52a51
RB
3076 {
3077 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3078 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3079 gsi_replace (gsi, new_stmt, false);
3080 }
cf3e5a89
JJ
3081 return true;
3082 }
e021c122 3083 }
49c471e3 3084 }
e021c122 3085 }
49c471e3 3086
f2d3d07e
RH
3087 /* Check for indirect calls that became direct calls, and then
3088 no longer require a static chain. */
3089 if (gimple_call_chain (stmt))
3090 {
3091 tree fn = gimple_call_fndecl (stmt);
3092 if (fn && !DECL_STATIC_CHAIN (fn))
3093 {
3094 gimple_call_set_chain (stmt, NULL);
3095 changed = true;
3096 }
3097 else
3098 {
3099 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3100 if (tmp)
3101 {
3102 gimple_call_set_chain (stmt, tmp);
3103 changed = true;
3104 }
3105 }
3106 }
3107
e021c122
RG
3108 if (inplace)
3109 return changed;
3110
3111 /* Check for builtins that CCP can handle using information not
3112 available in the generic fold routines. */
fef5a0d9
RB
3113 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3114 {
3115 if (gimple_fold_builtin (gsi))
3116 changed = true;
3117 }
3118 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3119 {
ea679d55 3120 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3121 }
368b454d 3122 else if (gimple_call_internal_p (stmt))
ed9c79e1 3123 {
368b454d
JJ
3124 enum tree_code subcode = ERROR_MARK;
3125 tree result = NULL_TREE;
1304953e
JJ
3126 bool cplx_result = false;
3127 tree overflow = NULL_TREE;
368b454d
JJ
3128 switch (gimple_call_internal_fn (stmt))
3129 {
3130 case IFN_BUILTIN_EXPECT:
3131 result = fold_builtin_expect (gimple_location (stmt),
3132 gimple_call_arg (stmt, 0),
3133 gimple_call_arg (stmt, 1),
3134 gimple_call_arg (stmt, 2));
3135 break;
0e82f089
MP
3136 case IFN_UBSAN_OBJECT_SIZE:
3137 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3138 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3139 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3140 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3141 gimple_call_arg (stmt, 2))))
3142 {
f6b4dc28 3143 gsi_replace (gsi, gimple_build_nop (), false);
0e82f089
MP
3144 unlink_stmt_vdef (stmt);
3145 release_defs (stmt);
3146 return true;
3147 }
3148 break;
451e8dae
NS
3149 case IFN_GOACC_DIM_SIZE:
3150 case IFN_GOACC_DIM_POS:
3151 result = fold_internal_goacc_dim (stmt);
3152 break;
368b454d
JJ
3153 case IFN_UBSAN_CHECK_ADD:
3154 subcode = PLUS_EXPR;
3155 break;
3156 case IFN_UBSAN_CHECK_SUB:
3157 subcode = MINUS_EXPR;
3158 break;
3159 case IFN_UBSAN_CHECK_MUL:
3160 subcode = MULT_EXPR;
3161 break;
1304953e
JJ
3162 case IFN_ADD_OVERFLOW:
3163 subcode = PLUS_EXPR;
3164 cplx_result = true;
3165 break;
3166 case IFN_SUB_OVERFLOW:
3167 subcode = MINUS_EXPR;
3168 cplx_result = true;
3169 break;
3170 case IFN_MUL_OVERFLOW:
3171 subcode = MULT_EXPR;
3172 cplx_result = true;
3173 break;
368b454d
JJ
3174 default:
3175 break;
3176 }
3177 if (subcode != ERROR_MARK)
3178 {
3179 tree arg0 = gimple_call_arg (stmt, 0);
3180 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3181 tree type = TREE_TYPE (arg0);
3182 if (cplx_result)
3183 {
3184 tree lhs = gimple_call_lhs (stmt);
3185 if (lhs == NULL_TREE)
3186 type = NULL_TREE;
3187 else
3188 type = TREE_TYPE (TREE_TYPE (lhs));
3189 }
3190 if (type == NULL_TREE)
3191 ;
368b454d 3192 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3193 else if (integer_zerop (arg1))
3194 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3195 /* x = 0 + y; x = 0 * y; */
3196 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3197 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3198 /* x = y - y; */
3199 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3200 result = integer_zero_node;
368b454d 3201 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3202 else if (subcode == MULT_EXPR && integer_onep (arg1))
3203 result = arg0;
3204 else if (subcode == MULT_EXPR && integer_onep (arg0))
3205 result = arg1;
3206 else if (TREE_CODE (arg0) == INTEGER_CST
3207 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3208 {
1304953e
JJ
3209 if (cplx_result)
3210 result = int_const_binop (subcode, fold_convert (type, arg0),
3211 fold_convert (type, arg1));
3212 else
3213 result = int_const_binop (subcode, arg0, arg1);
3214 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3215 {
3216 if (cplx_result)
3217 overflow = build_one_cst (type);
3218 else
3219 result = NULL_TREE;
3220 }
3221 }
3222 if (result)
3223 {
3224 if (result == integer_zero_node)
3225 result = build_zero_cst (type);
3226 else if (cplx_result && TREE_TYPE (result) != type)
3227 {
3228 if (TREE_CODE (result) == INTEGER_CST)
3229 {
3230 if (arith_overflowed_p (PLUS_EXPR, type, result,
3231 integer_zero_node))
3232 overflow = build_one_cst (type);
3233 }
3234 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3235 && TYPE_UNSIGNED (type))
3236 || (TYPE_PRECISION (type)
3237 < (TYPE_PRECISION (TREE_TYPE (result))
3238 + (TYPE_UNSIGNED (TREE_TYPE (result))
3239 && !TYPE_UNSIGNED (type)))))
3240 result = NULL_TREE;
3241 if (result)
3242 result = fold_convert (type, result);
3243 }
368b454d
JJ
3244 }
3245 }
1304953e 3246
ed9c79e1
JJ
3247 if (result)
3248 {
1304953e
JJ
3249 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3250 result = drop_tree_overflow (result);
3251 if (cplx_result)
3252 {
3253 if (overflow == NULL_TREE)
3254 overflow = build_zero_cst (TREE_TYPE (result));
3255 tree ctype = build_complex_type (TREE_TYPE (result));
3256 if (TREE_CODE (result) == INTEGER_CST
3257 && TREE_CODE (overflow) == INTEGER_CST)
3258 result = build_complex (ctype, result, overflow);
3259 else
3260 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3261 ctype, result, overflow);
3262 }
ed9c79e1
JJ
3263 if (!update_call_from_tree (gsi, result))
3264 gimplify_and_update_call_from_tree (gsi, result);
3265 changed = true;
3266 }
3267 }
3b45a007 3268
e021c122 3269 return changed;
cbdd87d4
RG
3270}
3271
e0ee10ed 3272
89a79e96
RB
3273/* Return true whether NAME has a use on STMT. */
3274
3275static bool
355fe088 3276has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
3277{
3278 imm_use_iterator iter;
3279 use_operand_p use_p;
3280 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3281 if (USE_STMT (use_p) == stmt)
3282 return true;
3283 return false;
3284}
3285
e0ee10ed
RB
3286/* Worker for fold_stmt_1 dispatch to pattern based folding with
3287 gimple_simplify.
3288
3289 Replaces *GSI with the simplification result in RCODE and OPS
3290 and the associated statements in *SEQ. Does the replacement
3291 according to INPLACE and returns true if the operation succeeded. */
3292
3293static bool
3294replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3295 code_helper rcode, tree *ops,
3296 gimple_seq *seq, bool inplace)
3297{
355fe088 3298 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
3299
3300 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
3301 newly created statements. See also maybe_push_res_to_seq.
3302 As an exception allow such uses if there was a use of the
3303 same SSA name on the old stmt. */
e0ee10ed 3304 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
3305 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3306 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
3307 || (ops[1]
3308 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
3309 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3310 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
3311 || (ops[2]
3312 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96 3313 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
e0237993
JJ
3314 && !has_use_on_stmt (ops[2], stmt))
3315 || (COMPARISON_CLASS_P (ops[0])
3316 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
3317 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
3318 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
3319 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
3320 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
3321 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
e0ee10ed
RB
3322 return false;
3323
fec40d06
RS
3324 /* Don't insert new statements when INPLACE is true, even if we could
3325 reuse STMT for the final statement. */
3326 if (inplace && !gimple_seq_empty_p (*seq))
3327 return false;
3328
538dd0b7 3329 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3330 {
3331 gcc_assert (rcode.is_tree_code ());
3332 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3333 /* GIMPLE_CONDs condition may not throw. */
3334 && (!flag_exceptions
3335 || !cfun->can_throw_non_call_exceptions
3336 || !operation_could_trap_p (rcode,
3337 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3338 false, NULL_TREE)))
538dd0b7 3339 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3340 else if (rcode == SSA_NAME)
538dd0b7 3341 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3342 build_zero_cst (TREE_TYPE (ops[0])));
3343 else if (rcode == INTEGER_CST)
3344 {
3345 if (integer_zerop (ops[0]))
538dd0b7 3346 gimple_cond_make_false (cond_stmt);
e0ee10ed 3347 else
538dd0b7 3348 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3349 }
3350 else if (!inplace)
3351 {
3352 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3353 ops, seq);
3354 if (!res)
3355 return false;
538dd0b7 3356 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3357 build_zero_cst (TREE_TYPE (res)));
3358 }
3359 else
3360 return false;
3361 if (dump_file && (dump_flags & TDF_DETAILS))
3362 {
3363 fprintf (dump_file, "gimple_simplified to ");
3364 if (!gimple_seq_empty_p (*seq))
3365 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3366 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3367 0, TDF_SLIM);
3368 }
3369 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3370 return true;
3371 }
3372 else if (is_gimple_assign (stmt)
3373 && rcode.is_tree_code ())
3374 {
3375 if (!inplace
f3582e54 3376 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3377 {
3378 maybe_build_generic_op (rcode,
545cd7ec 3379 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
00d66391 3380 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3381 if (dump_file && (dump_flags & TDF_DETAILS))
3382 {
3383 fprintf (dump_file, "gimple_simplified to ");
3384 if (!gimple_seq_empty_p (*seq))
3385 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3386 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3387 0, TDF_SLIM);
3388 }
3389 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3390 return true;
3391 }
3392 }
37d486ab 3393 else if (rcode.is_fn_code ()
c9e926ce 3394 && gimple_call_combined_fn (stmt) == rcode)
37d486ab
RB
3395 {
3396 unsigned i;
3397 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3398 {
3399 gcc_assert (ops[i] != NULL_TREE);
3400 gimple_call_set_arg (stmt, i, ops[i]);
3401 }
3402 if (i < 3)
3403 gcc_assert (ops[i] == NULL_TREE);
fec40d06
RS
3404 if (dump_file && (dump_flags & TDF_DETAILS))
3405 {
3406 fprintf (dump_file, "gimple_simplified to ");
3407 if (!gimple_seq_empty_p (*seq))
3408 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3409 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
3410 }
3411 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
3412 return true;
3413 }
e0ee10ed
RB
3414 else if (!inplace)
3415 {
3416 if (gimple_has_lhs (stmt))
3417 {
3418 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3419 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3420 ops, seq, lhs))
3421 return false;
e0ee10ed
RB
3422 if (dump_file && (dump_flags & TDF_DETAILS))
3423 {
3424 fprintf (dump_file, "gimple_simplified to ");
3425 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3426 }
3427 gsi_replace_with_seq_vops (gsi, *seq);
3428 return true;
3429 }
3430 else
3431 gcc_unreachable ();
3432 }
3433
3434 return false;
3435}
3436
040292e7
RB
3437/* Canonicalize MEM_REFs invariant address operand after propagation. */
3438
3439static bool
3440maybe_canonicalize_mem_ref_addr (tree *t)
3441{
3442 bool res = false;
3443
3444 if (TREE_CODE (*t) == ADDR_EXPR)
3445 t = &TREE_OPERAND (*t, 0);
3446
3447 while (handled_component_p (*t))
3448 t = &TREE_OPERAND (*t, 0);
3449
3450 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3451 of invariant addresses into a SSA name MEM_REF address. */
3452 if (TREE_CODE (*t) == MEM_REF
3453 || TREE_CODE (*t) == TARGET_MEM_REF)
3454 {
3455 tree addr = TREE_OPERAND (*t, 0);
3456 if (TREE_CODE (addr) == ADDR_EXPR
3457 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3458 || handled_component_p (TREE_OPERAND (addr, 0))))
3459 {
3460 tree base;
3461 HOST_WIDE_INT coffset;
3462 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3463 &coffset);
3464 if (!base)
3465 gcc_unreachable ();
3466
3467 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3468 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3469 TREE_OPERAND (*t, 1),
3470 size_int (coffset));
3471 res = true;
3472 }
3473 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3474 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3475 }
3476
3477 /* Canonicalize back MEM_REFs to plain reference trees if the object
3478 accessed is a decl that has the same access semantics as the MEM_REF. */
3479 if (TREE_CODE (*t) == MEM_REF
3480 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3481 && integer_zerop (TREE_OPERAND (*t, 1))
3482 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3483 {
3484 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3485 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3486 if (/* Same volatile qualification. */
3487 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3488 /* Same TBAA behavior with -fstrict-aliasing. */
3489 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3490 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3491 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3492 /* Same alignment. */
3493 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3494 /* We have to look out here to not drop a required conversion
3495 from the rhs to the lhs if *t appears on the lhs or vice-versa
3496 if it appears on the rhs. Thus require strict type
3497 compatibility. */
3498 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3499 {
3500 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3501 res = true;
3502 }
3503 }
3504
3505 /* Canonicalize TARGET_MEM_REF in particular with respect to
3506 the indexes becoming constant. */
3507 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3508 {
3509 tree tem = maybe_fold_tmr (*t);
3510 if (tem)
3511 {
3512 *t = tem;
3513 res = true;
3514 }
3515 }
3516
3517 return res;
3518}
3519
cbdd87d4
RG
3520/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3521 distinguishes both cases. */
3522
3523static bool
e0ee10ed 3524fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3525{
3526 bool changed = false;
355fe088 3527 gimple *stmt = gsi_stmt (*gsi);
cbdd87d4
RG
3528 unsigned i;
3529
040292e7
RB
3530 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3531 after propagation.
3532 ??? This shouldn't be done in generic folding but in the
3533 propagation helpers which also know whether an address was
89a79e96
RB
3534 propagated.
3535 Also canonicalize operand order. */
040292e7
RB
3536 switch (gimple_code (stmt))
3537 {
3538 case GIMPLE_ASSIGN:
3539 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3540 {
3541 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3542 if ((REFERENCE_CLASS_P (*rhs)
3543 || TREE_CODE (*rhs) == ADDR_EXPR)
3544 && maybe_canonicalize_mem_ref_addr (rhs))
3545 changed = true;
3546 tree *lhs = gimple_assign_lhs_ptr (stmt);
3547 if (REFERENCE_CLASS_P (*lhs)
3548 && maybe_canonicalize_mem_ref_addr (lhs))
3549 changed = true;
3550 }
89a79e96
RB
3551 else
3552 {
3553 /* Canonicalize operand order. */
3554 enum tree_code code = gimple_assign_rhs_code (stmt);
3555 if (TREE_CODE_CLASS (code) == tcc_comparison
3556 || commutative_tree_code (code)
3557 || commutative_ternary_tree_code (code))
3558 {
3559 tree rhs1 = gimple_assign_rhs1 (stmt);
3560 tree rhs2 = gimple_assign_rhs2 (stmt);
3561 if (tree_swap_operands_p (rhs1, rhs2, false))
3562 {
3563 gimple_assign_set_rhs1 (stmt, rhs2);
3564 gimple_assign_set_rhs2 (stmt, rhs1);
3565 if (TREE_CODE_CLASS (code) == tcc_comparison)
3566 gimple_assign_set_rhs_code (stmt,
3567 swap_tree_comparison (code));
3568 changed = true;
3569 }
3570 }
3571 }
040292e7
RB
3572 break;
3573 case GIMPLE_CALL:
3574 {
3575 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3576 {
3577 tree *arg = gimple_call_arg_ptr (stmt, i);
3578 if (REFERENCE_CLASS_P (*arg)
3579 && maybe_canonicalize_mem_ref_addr (arg))
3580 changed = true;
3581 }
3582 tree *lhs = gimple_call_lhs_ptr (stmt);
3583 if (*lhs
3584 && REFERENCE_CLASS_P (*lhs)
3585 && maybe_canonicalize_mem_ref_addr (lhs))
3586 changed = true;
3587 break;
3588 }
3589 case GIMPLE_ASM:
3590 {
538dd0b7
DM
3591 gasm *asm_stmt = as_a <gasm *> (stmt);
3592 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3593 {
538dd0b7 3594 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3595 tree op = TREE_VALUE (link);
3596 if (REFERENCE_CLASS_P (op)
3597 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3598 changed = true;
3599 }
538dd0b7 3600 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3601 {
538dd0b7 3602 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3603 tree op = TREE_VALUE (link);
3604 if ((REFERENCE_CLASS_P (op)
3605 || TREE_CODE (op) == ADDR_EXPR)
3606 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3607 changed = true;
3608 }
3609 }
3610 break;
3611 case GIMPLE_DEBUG:
3612 if (gimple_debug_bind_p (stmt))
3613 {
3614 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3615 if (*val
3616 && (REFERENCE_CLASS_P (*val)
3617 || TREE_CODE (*val) == ADDR_EXPR)
3618 && maybe_canonicalize_mem_ref_addr (val))
3619 changed = true;
3620 }
3621 break;
89a79e96
RB
3622 case GIMPLE_COND:
3623 {
3624 /* Canonicalize operand order. */
3625 tree lhs = gimple_cond_lhs (stmt);
3626 tree rhs = gimple_cond_rhs (stmt);
3627 if (tree_swap_operands_p (lhs, rhs, false))
3628 {
3629 gcond *gc = as_a <gcond *> (stmt);
3630 gimple_cond_set_lhs (gc, rhs);
3631 gimple_cond_set_rhs (gc, lhs);
3632 gimple_cond_set_code (gc,
3633 swap_tree_comparison (gimple_cond_code (gc)));
3634 changed = true;
3635 }
3636 }
040292e7
RB
3637 default:;
3638 }
3639
e0ee10ed
RB
3640 /* Dispatch to pattern-based folding. */
3641 if (!inplace
3642 || is_gimple_assign (stmt)
3643 || gimple_code (stmt) == GIMPLE_COND)
3644 {
3645 gimple_seq seq = NULL;
3646 code_helper rcode;
3647 tree ops[3] = {};
0ff093d8
RB
3648 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3649 valueize, valueize))
e0ee10ed
RB
3650 {
3651 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3652 changed = true;
3653 else
3654 gimple_seq_discard (seq);
3655 }
3656 }
3657
3658 stmt = gsi_stmt (*gsi);
3659
cbdd87d4
RG
3660 /* Fold the main computation performed by the statement. */
3661 switch (gimple_code (stmt))
3662 {
3663 case GIMPLE_ASSIGN:
3664 {
819ec64c
RB
3665 /* Try to canonicalize for boolean-typed X the comparisons
3666 X == 0, X == 1, X != 0, and X != 1. */
3667 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3668 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 3669 {
819ec64c
RB
3670 tree lhs = gimple_assign_lhs (stmt);
3671 tree op1 = gimple_assign_rhs1 (stmt);
3672 tree op2 = gimple_assign_rhs2 (stmt);
3673 tree type = TREE_TYPE (op1);
3674
3675 /* Check whether the comparison operands are of the same boolean
3676 type as the result type is.
3677 Check that second operand is an integer-constant with value
3678 one or zero. */
3679 if (TREE_CODE (op2) == INTEGER_CST
3680 && (integer_zerop (op2) || integer_onep (op2))
3681 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3682 {
3683 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3684 bool is_logical_not = false;
3685
3686 /* X == 0 and X != 1 is a logical-not.of X
3687 X == 1 and X != 0 is X */
3688 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3689 || (cmp_code == NE_EXPR && integer_onep (op2)))
3690 is_logical_not = true;
3691
3692 if (is_logical_not == false)
3693 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3694 /* Only for one-bit precision typed X the transformation
3695 !X -> ~X is valied. */
3696 else if (TYPE_PRECISION (type) == 1)
3697 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3698 /* Otherwise we use !X -> X ^ 1. */
3699 else
3700 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3701 build_int_cst (type, 1));
3702 changed = true;
3703 break;
3704 }
5fbcc0ed 3705 }
819ec64c
RB
3706
3707 unsigned old_num_ops = gimple_num_ops (stmt);
3708 tree lhs = gimple_assign_lhs (stmt);
3709 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3710 if (new_rhs
3711 && !useless_type_conversion_p (TREE_TYPE (lhs),
3712 TREE_TYPE (new_rhs)))
3713 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3714 if (new_rhs
3715 && (!inplace
3716 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3717 {
3718 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3719 changed = true;
3720 }
3721 break;
3722 }
3723
cbdd87d4 3724 case GIMPLE_CALL:
ceeffab0 3725 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3726 break;
3727
3728 case GIMPLE_ASM:
3729 /* Fold *& in asm operands. */
38384150 3730 {
538dd0b7 3731 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3732 size_t noutputs;
3733 const char **oconstraints;
3734 const char *constraint;
3735 bool allows_mem, allows_reg;
3736
538dd0b7 3737 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3738 oconstraints = XALLOCAVEC (const char *, noutputs);
3739
538dd0b7 3740 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3741 {
538dd0b7 3742 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3743 tree op = TREE_VALUE (link);
3744 oconstraints[i]
3745 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3746 if (REFERENCE_CLASS_P (op)
3747 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3748 {
3749 TREE_VALUE (link) = op;
3750 changed = true;
3751 }
3752 }
538dd0b7 3753 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3754 {
538dd0b7 3755 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3756 tree op = TREE_VALUE (link);
3757 constraint
3758 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3759 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3760 oconstraints, &allows_mem, &allows_reg);
3761 if (REFERENCE_CLASS_P (op)
3762 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3763 != NULL_TREE)
3764 {
3765 TREE_VALUE (link) = op;
3766 changed = true;
3767 }
3768 }
3769 }
cbdd87d4
RG
3770 break;
3771
bd422c4a
RG
3772 case GIMPLE_DEBUG:
3773 if (gimple_debug_bind_p (stmt))
3774 {
3775 tree val = gimple_debug_bind_get_value (stmt);
3776 if (val
3777 && REFERENCE_CLASS_P (val))
3778 {
3779 tree tem = maybe_fold_reference (val, false);
3780 if (tem)
3781 {
3782 gimple_debug_bind_set_value (stmt, tem);
3783 changed = true;
3784 }
3785 }
3e888a5e
RG
3786 else if (val
3787 && TREE_CODE (val) == ADDR_EXPR)
3788 {
3789 tree ref = TREE_OPERAND (val, 0);
3790 tree tem = maybe_fold_reference (ref, false);
3791 if (tem)
3792 {
3793 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3794 gimple_debug_bind_set_value (stmt, tem);
3795 changed = true;
3796 }
3797 }
bd422c4a
RG
3798 }
3799 break;
3800
cbdd87d4
RG
3801 default:;
3802 }
3803
3804 stmt = gsi_stmt (*gsi);
3805
37376165
RB
3806 /* Fold *& on the lhs. */
3807 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3808 {
3809 tree lhs = gimple_get_lhs (stmt);
3810 if (lhs && REFERENCE_CLASS_P (lhs))
3811 {
3812 tree new_lhs = maybe_fold_reference (lhs, true);
3813 if (new_lhs)
3814 {
3815 gimple_set_lhs (stmt, new_lhs);
3816 changed = true;
3817 }
3818 }
3819 }
3820
3821 return changed;
3822}
3823
e0ee10ed
RB
3824/* Valueziation callback that ends up not following SSA edges. */
3825
3826tree
3827no_follow_ssa_edges (tree)
3828{
3829 return NULL_TREE;
3830}
3831
45cc9f96
RB
3832/* Valueization callback that ends up following single-use SSA edges only. */
3833
3834tree
3835follow_single_use_edges (tree val)
3836{
3837 if (TREE_CODE (val) == SSA_NAME
3838 && !has_single_use (val))
3839 return NULL_TREE;
3840 return val;
3841}
3842
cbdd87d4
RG
3843/* Fold the statement pointed to by GSI. In some cases, this function may
3844 replace the whole statement with a new one. Returns true iff folding
3845 makes any changes.
3846 The statement pointed to by GSI should be in valid gimple form but may
3847 be in unfolded state as resulting from for example constant propagation
3848 which can produce *&x = 0. */
3849
3850bool
3851fold_stmt (gimple_stmt_iterator *gsi)
3852{
e0ee10ed
RB
3853 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3854}
3855
3856bool
3857fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3858{
3859 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3860}
3861
59401b92 3862/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3863 *&x created by constant propagation are handled. The statement cannot
3864 be replaced with a new one. Return true if the statement was
3865 changed, false otherwise.
59401b92 3866 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3867 be in unfolded state as resulting from for example constant propagation
3868 which can produce *&x = 0. */
3869
3870bool
59401b92 3871fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3872{
355fe088 3873 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 3874 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3875 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3876 return changed;
3877}
3878
e89065a1
SL
3879/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3880 if EXPR is null or we don't know how.
3881 If non-null, the result always has boolean type. */
3882
3883static tree
3884canonicalize_bool (tree expr, bool invert)
3885{
3886 if (!expr)
3887 return NULL_TREE;
3888 else if (invert)
3889 {
3890 if (integer_nonzerop (expr))
3891 return boolean_false_node;
3892 else if (integer_zerop (expr))
3893 return boolean_true_node;
3894 else if (TREE_CODE (expr) == SSA_NAME)
3895 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3896 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3897 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3898 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3899 boolean_type_node,
3900 TREE_OPERAND (expr, 0),
3901 TREE_OPERAND (expr, 1));
3902 else
3903 return NULL_TREE;
3904 }
3905 else
3906 {
3907 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3908 return expr;
3909 if (integer_nonzerop (expr))
3910 return boolean_true_node;
3911 else if (integer_zerop (expr))
3912 return boolean_false_node;
3913 else if (TREE_CODE (expr) == SSA_NAME)
3914 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3915 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3916 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3917 return fold_build2 (TREE_CODE (expr),
3918 boolean_type_node,
3919 TREE_OPERAND (expr, 0),
3920 TREE_OPERAND (expr, 1));
3921 else
3922 return NULL_TREE;
3923 }
3924}
3925
3926/* Check to see if a boolean expression EXPR is logically equivalent to the
3927 comparison (OP1 CODE OP2). Check for various identities involving
3928 SSA_NAMEs. */
3929
3930static bool
3931same_bool_comparison_p (const_tree expr, enum tree_code code,
3932 const_tree op1, const_tree op2)
3933{
355fe088 3934 gimple *s;
e89065a1
SL
3935
3936 /* The obvious case. */
3937 if (TREE_CODE (expr) == code
3938 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3939 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3940 return true;
3941
3942 /* Check for comparing (name, name != 0) and the case where expr
3943 is an SSA_NAME with a definition matching the comparison. */
3944 if (TREE_CODE (expr) == SSA_NAME
3945 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3946 {
3947 if (operand_equal_p (expr, op1, 0))
3948 return ((code == NE_EXPR && integer_zerop (op2))
3949 || (code == EQ_EXPR && integer_nonzerop (op2)));
3950 s = SSA_NAME_DEF_STMT (expr);
3951 if (is_gimple_assign (s)
3952 && gimple_assign_rhs_code (s) == code
3953 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3954 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3955 return true;
3956 }
3957
3958 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3959 of name is a comparison, recurse. */
3960 if (TREE_CODE (op1) == SSA_NAME
3961 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3962 {
3963 s = SSA_NAME_DEF_STMT (op1);
3964 if (is_gimple_assign (s)
3965 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3966 {
3967 enum tree_code c = gimple_assign_rhs_code (s);
3968 if ((c == NE_EXPR && integer_zerop (op2))
3969 || (c == EQ_EXPR && integer_nonzerop (op2)))
3970 return same_bool_comparison_p (expr, c,
3971 gimple_assign_rhs1 (s),
3972 gimple_assign_rhs2 (s));
3973 if ((c == EQ_EXPR && integer_zerop (op2))
3974 || (c == NE_EXPR && integer_nonzerop (op2)))
3975 return same_bool_comparison_p (expr,
3976 invert_tree_comparison (c, false),
3977 gimple_assign_rhs1 (s),
3978 gimple_assign_rhs2 (s));
3979 }
3980 }
3981 return false;
3982}
3983
3984/* Check to see if two boolean expressions OP1 and OP2 are logically
3985 equivalent. */
3986
3987static bool
3988same_bool_result_p (const_tree op1, const_tree op2)
3989{
3990 /* Simple cases first. */
3991 if (operand_equal_p (op1, op2, 0))
3992 return true;
3993
3994 /* Check the cases where at least one of the operands is a comparison.
3995 These are a bit smarter than operand_equal_p in that they apply some
3996 identifies on SSA_NAMEs. */
98209db3 3997 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
3998 && same_bool_comparison_p (op1, TREE_CODE (op2),
3999 TREE_OPERAND (op2, 0),
4000 TREE_OPERAND (op2, 1)))
4001 return true;
98209db3 4002 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
4003 && same_bool_comparison_p (op2, TREE_CODE (op1),
4004 TREE_OPERAND (op1, 0),
4005 TREE_OPERAND (op1, 1)))
4006 return true;
4007
4008 /* Default case. */
4009 return false;
4010}
4011
4012/* Forward declarations for some mutually recursive functions. */
4013
4014static tree
4015and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4016 enum tree_code code2, tree op2a, tree op2b);
4017static tree
4018and_var_with_comparison (tree var, bool invert,
4019 enum tree_code code2, tree op2a, tree op2b);
4020static tree
355fe088 4021and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4022 enum tree_code code2, tree op2a, tree op2b);
4023static tree
4024or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4025 enum tree_code code2, tree op2a, tree op2b);
4026static tree
4027or_var_with_comparison (tree var, bool invert,
4028 enum tree_code code2, tree op2a, tree op2b);
4029static tree
355fe088 4030or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4031 enum tree_code code2, tree op2a, tree op2b);
4032
4033/* Helper function for and_comparisons_1: try to simplify the AND of the
4034 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4035 If INVERT is true, invert the value of the VAR before doing the AND.
4036 Return NULL_EXPR if we can't simplify this to a single expression. */
4037
4038static tree
4039and_var_with_comparison (tree var, bool invert,
4040 enum tree_code code2, tree op2a, tree op2b)
4041{
4042 tree t;
355fe088 4043 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4044
4045 /* We can only deal with variables whose definitions are assignments. */
4046 if (!is_gimple_assign (stmt))
4047 return NULL_TREE;
4048
4049 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4050 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4051 Then we only have to consider the simpler non-inverted cases. */
4052 if (invert)
4053 t = or_var_with_comparison_1 (stmt,
4054 invert_tree_comparison (code2, false),
4055 op2a, op2b);
4056 else
4057 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4058 return canonicalize_bool (t, invert);
4059}
4060
4061/* Try to simplify the AND of the ssa variable defined by the assignment
4062 STMT with the comparison specified by (OP2A CODE2 OP2B).
4063 Return NULL_EXPR if we can't simplify this to a single expression. */
4064
4065static tree
355fe088 4066and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4067 enum tree_code code2, tree op2a, tree op2b)
4068{
4069 tree var = gimple_assign_lhs (stmt);
4070 tree true_test_var = NULL_TREE;
4071 tree false_test_var = NULL_TREE;
4072 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4073
4074 /* Check for identities like (var AND (var == 0)) => false. */
4075 if (TREE_CODE (op2a) == SSA_NAME
4076 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4077 {
4078 if ((code2 == NE_EXPR && integer_zerop (op2b))
4079 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4080 {
4081 true_test_var = op2a;
4082 if (var == true_test_var)
4083 return var;
4084 }
4085 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4086 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4087 {
4088 false_test_var = op2a;
4089 if (var == false_test_var)
4090 return boolean_false_node;
4091 }
4092 }
4093
4094 /* If the definition is a comparison, recurse on it. */
4095 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4096 {
4097 tree t = and_comparisons_1 (innercode,
4098 gimple_assign_rhs1 (stmt),
4099 gimple_assign_rhs2 (stmt),
4100 code2,
4101 op2a,
4102 op2b);
4103 if (t)
4104 return t;
4105 }
4106
4107 /* If the definition is an AND or OR expression, we may be able to
4108 simplify by reassociating. */
eb9820c0
KT
4109 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4110 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4111 {
4112 tree inner1 = gimple_assign_rhs1 (stmt);
4113 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4114 gimple *s;
e89065a1
SL
4115 tree t;
4116 tree partial = NULL_TREE;
eb9820c0 4117 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4118
4119 /* Check for boolean identities that don't require recursive examination
4120 of inner1/inner2:
4121 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4122 inner1 AND (inner1 OR inner2) => inner1
4123 !inner1 AND (inner1 AND inner2) => false
4124 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4125 Likewise for similar cases involving inner2. */
4126 if (inner1 == true_test_var)
4127 return (is_and ? var : inner1);
4128 else if (inner2 == true_test_var)
4129 return (is_and ? var : inner2);
4130 else if (inner1 == false_test_var)
4131 return (is_and
4132 ? boolean_false_node
4133 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4134 else if (inner2 == false_test_var)
4135 return (is_and
4136 ? boolean_false_node
4137 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4138
4139 /* Next, redistribute/reassociate the AND across the inner tests.
4140 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4141 if (TREE_CODE (inner1) == SSA_NAME
4142 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4143 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4144 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4145 gimple_assign_rhs1 (s),
4146 gimple_assign_rhs2 (s),
4147 code2, op2a, op2b)))
4148 {
4149 /* Handle the AND case, where we are reassociating:
4150 (inner1 AND inner2) AND (op2a code2 op2b)
4151 => (t AND inner2)
4152 If the partial result t is a constant, we win. Otherwise
4153 continue on to try reassociating with the other inner test. */
4154 if (is_and)
4155 {
4156 if (integer_onep (t))
4157 return inner2;
4158 else if (integer_zerop (t))
4159 return boolean_false_node;
4160 }
4161
4162 /* Handle the OR case, where we are redistributing:
4163 (inner1 OR inner2) AND (op2a code2 op2b)
4164 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4165 else if (integer_onep (t))
4166 return boolean_true_node;
4167
4168 /* Save partial result for later. */
4169 partial = t;
e89065a1
SL
4170 }
4171
4172 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4173 if (TREE_CODE (inner2) == SSA_NAME
4174 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4175 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4176 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4177 gimple_assign_rhs1 (s),
4178 gimple_assign_rhs2 (s),
4179 code2, op2a, op2b)))
4180 {
4181 /* Handle the AND case, where we are reassociating:
4182 (inner1 AND inner2) AND (op2a code2 op2b)
4183 => (inner1 AND t) */
4184 if (is_and)
4185 {
4186 if (integer_onep (t))
4187 return inner1;
4188 else if (integer_zerop (t))
4189 return boolean_false_node;
8236c8eb
JJ
4190 /* If both are the same, we can apply the identity
4191 (x AND x) == x. */
4192 else if (partial && same_bool_result_p (t, partial))
4193 return t;
e89065a1
SL
4194 }
4195
4196 /* Handle the OR case. where we are redistributing:
4197 (inner1 OR inner2) AND (op2a code2 op2b)
4198 => (t OR (inner1 AND (op2a code2 op2b)))
4199 => (t OR partial) */
4200 else
4201 {
4202 if (integer_onep (t))
4203 return boolean_true_node;
4204 else if (partial)
4205 {
4206 /* We already got a simplification for the other
4207 operand to the redistributed OR expression. The
4208 interesting case is when at least one is false.
4209 Or, if both are the same, we can apply the identity
4210 (x OR x) == x. */
4211 if (integer_zerop (partial))
4212 return t;
4213 else if (integer_zerop (t))
4214 return partial;
4215 else if (same_bool_result_p (t, partial))
4216 return t;
4217 }
4218 }
4219 }
4220 }
4221 return NULL_TREE;
4222}
4223
4224/* Try to simplify the AND of two comparisons defined by
4225 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4226 If this can be done without constructing an intermediate value,
4227 return the resulting tree; otherwise NULL_TREE is returned.
4228 This function is deliberately asymmetric as it recurses on SSA_DEFs
4229 in the first comparison but not the second. */
4230
4231static tree
4232and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4233 enum tree_code code2, tree op2a, tree op2b)
4234{
ae22ac3c 4235 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4236
e89065a1
SL
4237 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4238 if (operand_equal_p (op1a, op2a, 0)
4239 && operand_equal_p (op1b, op2b, 0))
4240 {
eb9820c0 4241 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4242 tree t = combine_comparisons (UNKNOWN_LOCATION,
4243 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4244 truth_type, op1a, op1b);
e89065a1
SL
4245 if (t)
4246 return t;
4247 }
4248
4249 /* Likewise the swapped case of the above. */
4250 if (operand_equal_p (op1a, op2b, 0)
4251 && operand_equal_p (op1b, op2a, 0))
4252 {
eb9820c0 4253 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4254 tree t = combine_comparisons (UNKNOWN_LOCATION,
4255 TRUTH_ANDIF_EXPR, code1,
4256 swap_tree_comparison (code2),
31ed6226 4257 truth_type, op1a, op1b);
e89065a1
SL
4258 if (t)
4259 return t;
4260 }
4261
4262 /* If both comparisons are of the same value against constants, we might
4263 be able to merge them. */
4264 if (operand_equal_p (op1a, op2a, 0)
4265 && TREE_CODE (op1b) == INTEGER_CST
4266 && TREE_CODE (op2b) == INTEGER_CST)
4267 {
4268 int cmp = tree_int_cst_compare (op1b, op2b);
4269
4270 /* If we have (op1a == op1b), we should either be able to
4271 return that or FALSE, depending on whether the constant op1b
4272 also satisfies the other comparison against op2b. */
4273 if (code1 == EQ_EXPR)
4274 {
4275 bool done = true;
4276 bool val;
4277 switch (code2)
4278 {
4279 case EQ_EXPR: val = (cmp == 0); break;
4280 case NE_EXPR: val = (cmp != 0); break;
4281 case LT_EXPR: val = (cmp < 0); break;
4282 case GT_EXPR: val = (cmp > 0); break;
4283 case LE_EXPR: val = (cmp <= 0); break;
4284 case GE_EXPR: val = (cmp >= 0); break;
4285 default: done = false;
4286 }
4287 if (done)
4288 {
4289 if (val)
4290 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4291 else
4292 return boolean_false_node;
4293 }
4294 }
4295 /* Likewise if the second comparison is an == comparison. */
4296 else if (code2 == EQ_EXPR)
4297 {
4298 bool done = true;
4299 bool val;
4300 switch (code1)
4301 {
4302 case EQ_EXPR: val = (cmp == 0); break;
4303 case NE_EXPR: val = (cmp != 0); break;
4304 case LT_EXPR: val = (cmp > 0); break;
4305 case GT_EXPR: val = (cmp < 0); break;
4306 case LE_EXPR: val = (cmp >= 0); break;
4307 case GE_EXPR: val = (cmp <= 0); break;
4308 default: done = false;
4309 }
4310 if (done)
4311 {
4312 if (val)
4313 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4314 else
4315 return boolean_false_node;
4316 }
4317 }
4318
4319 /* Same business with inequality tests. */
4320 else if (code1 == NE_EXPR)
4321 {
4322 bool val;
4323 switch (code2)
4324 {
4325 case EQ_EXPR: val = (cmp != 0); break;
4326 case NE_EXPR: val = (cmp == 0); break;
4327 case LT_EXPR: val = (cmp >= 0); break;
4328 case GT_EXPR: val = (cmp <= 0); break;
4329 case LE_EXPR: val = (cmp > 0); break;
4330 case GE_EXPR: val = (cmp < 0); break;
4331 default:
4332 val = false;
4333 }
4334 if (val)
4335 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4336 }
4337 else if (code2 == NE_EXPR)
4338 {
4339 bool val;
4340 switch (code1)
4341 {
4342 case EQ_EXPR: val = (cmp == 0); break;
4343 case NE_EXPR: val = (cmp != 0); break;
4344 case LT_EXPR: val = (cmp <= 0); break;
4345 case GT_EXPR: val = (cmp >= 0); break;
4346 case LE_EXPR: val = (cmp < 0); break;
4347 case GE_EXPR: val = (cmp > 0); break;
4348 default:
4349 val = false;
4350 }
4351 if (val)
4352 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4353 }
4354
4355 /* Chose the more restrictive of two < or <= comparisons. */
4356 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4357 && (code2 == LT_EXPR || code2 == LE_EXPR))
4358 {
4359 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4360 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4361 else
4362 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4363 }
4364
4365 /* Likewise chose the more restrictive of two > or >= comparisons. */
4366 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4367 && (code2 == GT_EXPR || code2 == GE_EXPR))
4368 {
4369 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4370 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4371 else
4372 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4373 }
4374
4375 /* Check for singleton ranges. */
4376 else if (cmp == 0
4377 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4378 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4379 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4380
4381 /* Check for disjoint ranges. */
4382 else if (cmp <= 0
4383 && (code1 == LT_EXPR || code1 == LE_EXPR)
4384 && (code2 == GT_EXPR || code2 == GE_EXPR))
4385 return boolean_false_node;
4386 else if (cmp >= 0
4387 && (code1 == GT_EXPR || code1 == GE_EXPR)
4388 && (code2 == LT_EXPR || code2 == LE_EXPR))
4389 return boolean_false_node;
4390 }
4391
4392 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4393 NAME's definition is a truth value. See if there are any simplifications
4394 that can be done against the NAME's definition. */
4395 if (TREE_CODE (op1a) == SSA_NAME
4396 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4397 && (integer_zerop (op1b) || integer_onep (op1b)))
4398 {
4399 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4400 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4401 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4402 switch (gimple_code (stmt))
4403 {
4404 case GIMPLE_ASSIGN:
4405 /* Try to simplify by copy-propagating the definition. */
4406 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4407
4408 case GIMPLE_PHI:
4409 /* If every argument to the PHI produces the same result when
4410 ANDed with the second comparison, we win.
4411 Do not do this unless the type is bool since we need a bool
4412 result here anyway. */
4413 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4414 {
4415 tree result = NULL_TREE;
4416 unsigned i;
4417 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4418 {
4419 tree arg = gimple_phi_arg_def (stmt, i);
4420
4421 /* If this PHI has itself as an argument, ignore it.
4422 If all the other args produce the same result,
4423 we're still OK. */
4424 if (arg == gimple_phi_result (stmt))
4425 continue;
4426 else if (TREE_CODE (arg) == INTEGER_CST)
4427 {
4428 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4429 {
4430 if (!result)
4431 result = boolean_false_node;
4432 else if (!integer_zerop (result))
4433 return NULL_TREE;
4434 }
4435 else if (!result)
4436 result = fold_build2 (code2, boolean_type_node,
4437 op2a, op2b);
4438 else if (!same_bool_comparison_p (result,
4439 code2, op2a, op2b))
4440 return NULL_TREE;
4441 }
0e8b84ec
JJ
4442 else if (TREE_CODE (arg) == SSA_NAME
4443 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4444 {
6c66f733 4445 tree temp;
355fe088 4446 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4447 /* In simple cases we can look through PHI nodes,
4448 but we have to be careful with loops.
4449 See PR49073. */
4450 if (! dom_info_available_p (CDI_DOMINATORS)
4451 || gimple_bb (def_stmt) == gimple_bb (stmt)
4452 || dominated_by_p (CDI_DOMINATORS,
4453 gimple_bb (def_stmt),
4454 gimple_bb (stmt)))
4455 return NULL_TREE;
4456 temp = and_var_with_comparison (arg, invert, code2,
4457 op2a, op2b);
e89065a1
SL
4458 if (!temp)
4459 return NULL_TREE;
4460 else if (!result)
4461 result = temp;
4462 else if (!same_bool_result_p (result, temp))
4463 return NULL_TREE;
4464 }
4465 else
4466 return NULL_TREE;
4467 }
4468 return result;
4469 }
4470
4471 default:
4472 break;
4473 }
4474 }
4475 return NULL_TREE;
4476}
4477
4478/* Try to simplify the AND of two comparisons, specified by
4479 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4480 If this can be simplified to a single expression (without requiring
4481 introducing more SSA variables to hold intermediate values),
4482 return the resulting tree. Otherwise return NULL_TREE.
4483 If the result expression is non-null, it has boolean type. */
4484
4485tree
4486maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4487 enum tree_code code2, tree op2a, tree op2b)
4488{
4489 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4490 if (t)
4491 return t;
4492 else
4493 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4494}
4495
4496/* Helper function for or_comparisons_1: try to simplify the OR of the
4497 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4498 If INVERT is true, invert the value of VAR before doing the OR.
4499 Return NULL_EXPR if we can't simplify this to a single expression. */
4500
4501static tree
4502or_var_with_comparison (tree var, bool invert,
4503 enum tree_code code2, tree op2a, tree op2b)
4504{
4505 tree t;
355fe088 4506 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4507
4508 /* We can only deal with variables whose definitions are assignments. */
4509 if (!is_gimple_assign (stmt))
4510 return NULL_TREE;
4511
4512 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4513 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4514 Then we only have to consider the simpler non-inverted cases. */
4515 if (invert)
4516 t = and_var_with_comparison_1 (stmt,
4517 invert_tree_comparison (code2, false),
4518 op2a, op2b);
4519 else
4520 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4521 return canonicalize_bool (t, invert);
4522}
4523
4524/* Try to simplify the OR of the ssa variable defined by the assignment
4525 STMT with the comparison specified by (OP2A CODE2 OP2B).
4526 Return NULL_EXPR if we can't simplify this to a single expression. */
4527
4528static tree
355fe088 4529or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4530 enum tree_code code2, tree op2a, tree op2b)
4531{
4532 tree var = gimple_assign_lhs (stmt);
4533 tree true_test_var = NULL_TREE;
4534 tree false_test_var = NULL_TREE;
4535 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4536
4537 /* Check for identities like (var OR (var != 0)) => true . */
4538 if (TREE_CODE (op2a) == SSA_NAME
4539 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4540 {
4541 if ((code2 == NE_EXPR && integer_zerop (op2b))
4542 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4543 {
4544 true_test_var = op2a;
4545 if (var == true_test_var)
4546 return var;
4547 }
4548 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4549 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4550 {
4551 false_test_var = op2a;
4552 if (var == false_test_var)
4553 return boolean_true_node;
4554 }
4555 }
4556
4557 /* If the definition is a comparison, recurse on it. */
4558 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4559 {
4560 tree t = or_comparisons_1 (innercode,
4561 gimple_assign_rhs1 (stmt),
4562 gimple_assign_rhs2 (stmt),
4563 code2,
4564 op2a,
4565 op2b);
4566 if (t)
4567 return t;
4568 }
4569
4570 /* If the definition is an AND or OR expression, we may be able to
4571 simplify by reassociating. */
eb9820c0
KT
4572 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4573 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4574 {
4575 tree inner1 = gimple_assign_rhs1 (stmt);
4576 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4577 gimple *s;
e89065a1
SL
4578 tree t;
4579 tree partial = NULL_TREE;
eb9820c0 4580 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4581
4582 /* Check for boolean identities that don't require recursive examination
4583 of inner1/inner2:
4584 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4585 inner1 OR (inner1 AND inner2) => inner1
4586 !inner1 OR (inner1 OR inner2) => true
4587 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4588 */
4589 if (inner1 == true_test_var)
4590 return (is_or ? var : inner1);
4591 else if (inner2 == true_test_var)
4592 return (is_or ? var : inner2);
4593 else if (inner1 == false_test_var)
4594 return (is_or
4595 ? boolean_true_node
4596 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4597 else if (inner2 == false_test_var)
4598 return (is_or
4599 ? boolean_true_node
4600 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4601
4602 /* Next, redistribute/reassociate the OR across the inner tests.
4603 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4604 if (TREE_CODE (inner1) == SSA_NAME
4605 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4606 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4607 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4608 gimple_assign_rhs1 (s),
4609 gimple_assign_rhs2 (s),
4610 code2, op2a, op2b)))
4611 {
4612 /* Handle the OR case, where we are reassociating:
4613 (inner1 OR inner2) OR (op2a code2 op2b)
4614 => (t OR inner2)
4615 If the partial result t is a constant, we win. Otherwise
4616 continue on to try reassociating with the other inner test. */
8236c8eb 4617 if (is_or)
e89065a1
SL
4618 {
4619 if (integer_onep (t))
4620 return boolean_true_node;
4621 else if (integer_zerop (t))
4622 return inner2;
4623 }
4624
4625 /* Handle the AND case, where we are redistributing:
4626 (inner1 AND inner2) OR (op2a code2 op2b)
4627 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4628 else if (integer_zerop (t))
4629 return boolean_false_node;
4630
4631 /* Save partial result for later. */
4632 partial = t;
e89065a1
SL
4633 }
4634
4635 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4636 if (TREE_CODE (inner2) == SSA_NAME
4637 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4638 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4639 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4640 gimple_assign_rhs1 (s),
4641 gimple_assign_rhs2 (s),
4642 code2, op2a, op2b)))
4643 {
4644 /* Handle the OR case, where we are reassociating:
4645 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4646 => (inner1 OR t)
4647 => (t OR partial) */
4648 if (is_or)
e89065a1
SL
4649 {
4650 if (integer_zerop (t))
4651 return inner1;
4652 else if (integer_onep (t))
4653 return boolean_true_node;
8236c8eb
JJ
4654 /* If both are the same, we can apply the identity
4655 (x OR x) == x. */
4656 else if (partial && same_bool_result_p (t, partial))
4657 return t;
e89065a1
SL
4658 }
4659
4660 /* Handle the AND case, where we are redistributing:
4661 (inner1 AND inner2) OR (op2a code2 op2b)
4662 => (t AND (inner1 OR (op2a code2 op2b)))
4663 => (t AND partial) */
4664 else
4665 {
4666 if (integer_zerop (t))
4667 return boolean_false_node;
4668 else if (partial)
4669 {
4670 /* We already got a simplification for the other
4671 operand to the redistributed AND expression. The
4672 interesting case is when at least one is true.
4673 Or, if both are the same, we can apply the identity
8236c8eb 4674 (x AND x) == x. */
e89065a1
SL
4675 if (integer_onep (partial))
4676 return t;
4677 else if (integer_onep (t))
4678 return partial;
4679 else if (same_bool_result_p (t, partial))
8236c8eb 4680 return t;
e89065a1
SL
4681 }
4682 }
4683 }
4684 }
4685 return NULL_TREE;
4686}
4687
4688/* Try to simplify the OR of two comparisons defined by
4689 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4690 If this can be done without constructing an intermediate value,
4691 return the resulting tree; otherwise NULL_TREE is returned.
4692 This function is deliberately asymmetric as it recurses on SSA_DEFs
4693 in the first comparison but not the second. */
4694
4695static tree
4696or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4697 enum tree_code code2, tree op2a, tree op2b)
4698{
ae22ac3c 4699 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4700
e89065a1
SL
4701 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4702 if (operand_equal_p (op1a, op2a, 0)
4703 && operand_equal_p (op1b, op2b, 0))
4704 {
eb9820c0 4705 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4706 tree t = combine_comparisons (UNKNOWN_LOCATION,
4707 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4708 truth_type, op1a, op1b);
e89065a1
SL
4709 if (t)
4710 return t;
4711 }
4712
4713 /* Likewise the swapped case of the above. */
4714 if (operand_equal_p (op1a, op2b, 0)
4715 && operand_equal_p (op1b, op2a, 0))
4716 {
eb9820c0 4717 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4718 tree t = combine_comparisons (UNKNOWN_LOCATION,
4719 TRUTH_ORIF_EXPR, code1,
4720 swap_tree_comparison (code2),
31ed6226 4721 truth_type, op1a, op1b);
e89065a1
SL
4722 if (t)
4723 return t;
4724 }
4725
4726 /* If both comparisons are of the same value against constants, we might
4727 be able to merge them. */
4728 if (operand_equal_p (op1a, op2a, 0)
4729 && TREE_CODE (op1b) == INTEGER_CST
4730 && TREE_CODE (op2b) == INTEGER_CST)
4731 {
4732 int cmp = tree_int_cst_compare (op1b, op2b);
4733
4734 /* If we have (op1a != op1b), we should either be able to
4735 return that or TRUE, depending on whether the constant op1b
4736 also satisfies the other comparison against op2b. */
4737 if (code1 == NE_EXPR)
4738 {
4739 bool done = true;
4740 bool val;
4741 switch (code2)
4742 {
4743 case EQ_EXPR: val = (cmp == 0); break;
4744 case NE_EXPR: val = (cmp != 0); break;
4745 case LT_EXPR: val = (cmp < 0); break;
4746 case GT_EXPR: val = (cmp > 0); break;
4747 case LE_EXPR: val = (cmp <= 0); break;
4748 case GE_EXPR: val = (cmp >= 0); break;
4749 default: done = false;
4750 }
4751 if (done)
4752 {
4753 if (val)
4754 return boolean_true_node;
4755 else
4756 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4757 }
4758 }
4759 /* Likewise if the second comparison is a != comparison. */
4760 else if (code2 == NE_EXPR)
4761 {
4762 bool done = true;
4763 bool val;
4764 switch (code1)
4765 {
4766 case EQ_EXPR: val = (cmp == 0); break;
4767 case NE_EXPR: val = (cmp != 0); break;
4768 case LT_EXPR: val = (cmp > 0); break;
4769 case GT_EXPR: val = (cmp < 0); break;
4770 case LE_EXPR: val = (cmp >= 0); break;
4771 case GE_EXPR: val = (cmp <= 0); break;
4772 default: done = false;
4773 }
4774 if (done)
4775 {
4776 if (val)
4777 return boolean_true_node;
4778 else
4779 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4780 }
4781 }
4782
4783 /* See if an equality test is redundant with the other comparison. */
4784 else if (code1 == EQ_EXPR)
4785 {
4786 bool val;
4787 switch (code2)
4788 {
4789 case EQ_EXPR: val = (cmp == 0); break;
4790 case NE_EXPR: val = (cmp != 0); break;
4791 case LT_EXPR: val = (cmp < 0); break;
4792 case GT_EXPR: val = (cmp > 0); break;
4793 case LE_EXPR: val = (cmp <= 0); break;
4794 case GE_EXPR: val = (cmp >= 0); break;
4795 default:
4796 val = false;
4797 }
4798 if (val)
4799 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4800 }
4801 else if (code2 == EQ_EXPR)
4802 {
4803 bool val;
4804 switch (code1)
4805 {
4806 case EQ_EXPR: val = (cmp == 0); break;
4807 case NE_EXPR: val = (cmp != 0); break;
4808 case LT_EXPR: val = (cmp > 0); break;
4809 case GT_EXPR: val = (cmp < 0); break;
4810 case LE_EXPR: val = (cmp >= 0); break;
4811 case GE_EXPR: val = (cmp <= 0); break;
4812 default:
4813 val = false;
4814 }
4815 if (val)
4816 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4817 }
4818
4819 /* Chose the less restrictive of two < or <= comparisons. */
4820 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4821 && (code2 == LT_EXPR || code2 == LE_EXPR))
4822 {
4823 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4824 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4825 else
4826 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4827 }
4828
4829 /* Likewise chose the less restrictive of two > or >= comparisons. */
4830 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4831 && (code2 == GT_EXPR || code2 == GE_EXPR))
4832 {
4833 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4834 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4835 else
4836 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4837 }
4838
4839 /* Check for singleton ranges. */
4840 else if (cmp == 0
4841 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4842 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4843 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4844
4845 /* Check for less/greater pairs that don't restrict the range at all. */
4846 else if (cmp >= 0
4847 && (code1 == LT_EXPR || code1 == LE_EXPR)
4848 && (code2 == GT_EXPR || code2 == GE_EXPR))
4849 return boolean_true_node;
4850 else if (cmp <= 0
4851 && (code1 == GT_EXPR || code1 == GE_EXPR)
4852 && (code2 == LT_EXPR || code2 == LE_EXPR))
4853 return boolean_true_node;
4854 }
4855
4856 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4857 NAME's definition is a truth value. See if there are any simplifications
4858 that can be done against the NAME's definition. */
4859 if (TREE_CODE (op1a) == SSA_NAME
4860 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4861 && (integer_zerop (op1b) || integer_onep (op1b)))
4862 {
4863 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4864 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4865 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4866 switch (gimple_code (stmt))
4867 {
4868 case GIMPLE_ASSIGN:
4869 /* Try to simplify by copy-propagating the definition. */
4870 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4871
4872 case GIMPLE_PHI:
4873 /* If every argument to the PHI produces the same result when
4874 ORed with the second comparison, we win.
4875 Do not do this unless the type is bool since we need a bool
4876 result here anyway. */
4877 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4878 {
4879 tree result = NULL_TREE;
4880 unsigned i;
4881 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4882 {
4883 tree arg = gimple_phi_arg_def (stmt, i);
4884
4885 /* If this PHI has itself as an argument, ignore it.
4886 If all the other args produce the same result,
4887 we're still OK. */
4888 if (arg == gimple_phi_result (stmt))
4889 continue;
4890 else if (TREE_CODE (arg) == INTEGER_CST)
4891 {
4892 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4893 {
4894 if (!result)
4895 result = boolean_true_node;
4896 else if (!integer_onep (result))
4897 return NULL_TREE;
4898 }
4899 else if (!result)
4900 result = fold_build2 (code2, boolean_type_node,
4901 op2a, op2b);
4902 else if (!same_bool_comparison_p (result,
4903 code2, op2a, op2b))
4904 return NULL_TREE;
4905 }
0e8b84ec
JJ
4906 else if (TREE_CODE (arg) == SSA_NAME
4907 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4908 {
6c66f733 4909 tree temp;
355fe088 4910 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4911 /* In simple cases we can look through PHI nodes,
4912 but we have to be careful with loops.
4913 See PR49073. */
4914 if (! dom_info_available_p (CDI_DOMINATORS)
4915 || gimple_bb (def_stmt) == gimple_bb (stmt)
4916 || dominated_by_p (CDI_DOMINATORS,
4917 gimple_bb (def_stmt),
4918 gimple_bb (stmt)))
4919 return NULL_TREE;
4920 temp = or_var_with_comparison (arg, invert, code2,
4921 op2a, op2b);
e89065a1
SL
4922 if (!temp)
4923 return NULL_TREE;
4924 else if (!result)
4925 result = temp;
4926 else if (!same_bool_result_p (result, temp))
4927 return NULL_TREE;
4928 }
4929 else
4930 return NULL_TREE;
4931 }
4932 return result;
4933 }
4934
4935 default:
4936 break;
4937 }
4938 }
4939 return NULL_TREE;
4940}
4941
4942/* Try to simplify the OR of two comparisons, specified by
4943 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4944 If this can be simplified to a single expression (without requiring
4945 introducing more SSA variables to hold intermediate values),
4946 return the resulting tree. Otherwise return NULL_TREE.
4947 If the result expression is non-null, it has boolean type. */
4948
4949tree
4950maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4951 enum tree_code code2, tree op2a, tree op2b)
4952{
4953 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4954 if (t)
4955 return t;
4956 else
4957 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4958}
cfef45c8
RG
4959
4960
4961/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4962
4963 Either NULL_TREE, a simplified but non-constant or a constant
4964 is returned.
4965
4966 ??? This should go into a gimple-fold-inline.h file to be eventually
4967 privatized with the single valueize function used in the various TUs
4968 to avoid the indirect function call overhead. */
4969
4970tree
355fe088 4971gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 4972 tree (*gvalueize) (tree))
cfef45c8 4973{
45cc9f96
RB
4974 code_helper rcode;
4975 tree ops[3] = {};
4976 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4977 edges if there are intermediate VARYING defs. For this reason
4978 do not follow SSA edges here even though SCCVN can technically
4979 just deal fine with that. */
34050b6b 4980 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 4981 {
34050b6b 4982 tree res = NULL_TREE;
c0f62740 4983 if (gimple_simplified_result_is_gimple_val (rcode, ops))
34050b6b
RB
4984 res = ops[0];
4985 else if (mprts_hook)
4986 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4987 if (res)
45cc9f96 4988 {
34050b6b
RB
4989 if (dump_file && dump_flags & TDF_DETAILS)
4990 {
4991 fprintf (dump_file, "Match-and-simplified ");
4992 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4993 fprintf (dump_file, " to ");
4994 print_generic_expr (dump_file, res, 0);
4995 fprintf (dump_file, "\n");
4996 }
4997 return res;
45cc9f96 4998 }
45cc9f96
RB
4999 }
5000
cfef45c8
RG
5001 location_t loc = gimple_location (stmt);
5002 switch (gimple_code (stmt))
5003 {
5004 case GIMPLE_ASSIGN:
5005 {
5006 enum tree_code subcode = gimple_assign_rhs_code (stmt);
5007
5008 switch (get_gimple_rhs_class (subcode))
5009 {
5010 case GIMPLE_SINGLE_RHS:
5011 {
5012 tree rhs = gimple_assign_rhs1 (stmt);
5013 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
5014
5015 if (TREE_CODE (rhs) == SSA_NAME)
5016 {
5017 /* If the RHS is an SSA_NAME, return its known constant value,
5018 if any. */
5019 return (*valueize) (rhs);
5020 }
5021 /* Handle propagating invariant addresses into address
5022 operations. */
5023 else if (TREE_CODE (rhs) == ADDR_EXPR
5024 && !is_gimple_min_invariant (rhs))
5025 {
d25c4172 5026 HOST_WIDE_INT offset = 0;
cfef45c8
RG
5027 tree base;
5028 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
5029 &offset,
5030 valueize);
5031 if (base
5032 && (CONSTANT_CLASS_P (base)
5033 || decl_address_invariant_p (base)))
5034 return build_invariant_address (TREE_TYPE (rhs),
5035 base, offset);
5036 }
5037 else if (TREE_CODE (rhs) == CONSTRUCTOR
5038 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
5039 && (CONSTRUCTOR_NELTS (rhs)
5040 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
5041 {
5042 unsigned i;
d2a12ae7 5043 tree val, *vec;
cfef45c8 5044
d2a12ae7
RG
5045 vec = XALLOCAVEC (tree,
5046 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
5047 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
5048 {
5049 val = (*valueize) (val);
5050 if (TREE_CODE (val) == INTEGER_CST
5051 || TREE_CODE (val) == REAL_CST
5052 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5053 vec[i] = val;
cfef45c8
RG
5054 else
5055 return NULL_TREE;
5056 }
5057
d2a12ae7 5058 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5059 }
bdf37f7a
JH
5060 if (subcode == OBJ_TYPE_REF)
5061 {
5062 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5063 /* If callee is constant, we can fold away the wrapper. */
5064 if (is_gimple_min_invariant (val))
5065 return val;
5066 }
cfef45c8
RG
5067
5068 if (kind == tcc_reference)
5069 {
5070 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5071 || TREE_CODE (rhs) == REALPART_EXPR
5072 || TREE_CODE (rhs) == IMAGPART_EXPR)
5073 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5074 {
5075 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5076 return fold_unary_loc (EXPR_LOCATION (rhs),
5077 TREE_CODE (rhs),
5078 TREE_TYPE (rhs), val);
5079 }
5080 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5081 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5082 {
5083 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5084 return fold_ternary_loc (EXPR_LOCATION (rhs),
5085 TREE_CODE (rhs),
5086 TREE_TYPE (rhs), val,
5087 TREE_OPERAND (rhs, 1),
5088 TREE_OPERAND (rhs, 2));
5089 }
5090 else if (TREE_CODE (rhs) == MEM_REF
5091 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5092 {
5093 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5094 if (TREE_CODE (val) == ADDR_EXPR
5095 && is_gimple_min_invariant (val))
5096 {
5097 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5098 unshare_expr (val),
5099 TREE_OPERAND (rhs, 1));
5100 if (tem)
5101 rhs = tem;
5102 }
5103 }
5104 return fold_const_aggregate_ref_1 (rhs, valueize);
5105 }
5106 else if (kind == tcc_declaration)
5107 return get_symbol_constant_value (rhs);
5108 return rhs;
5109 }
5110
5111 case GIMPLE_UNARY_RHS:
f3582e54 5112 return NULL_TREE;
cfef45c8
RG
5113
5114 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5115 /* Translate &x + CST into an invariant form suitable for
5116 further propagation. */
5117 if (subcode == POINTER_PLUS_EXPR)
5118 {
4b1b9e64
RB
5119 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5120 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
5121 if (TREE_CODE (op0) == ADDR_EXPR
5122 && TREE_CODE (op1) == INTEGER_CST)
5123 {
5124 tree off = fold_convert (ptr_type_node, op1);
5125 return build_fold_addr_expr_loc
5126 (loc,
5127 fold_build2 (MEM_REF,
5128 TREE_TYPE (TREE_TYPE (op0)),
5129 unshare_expr (op0), off));
5130 }
5131 }
59c20dc7
RB
5132 /* Canonicalize bool != 0 and bool == 0 appearing after
5133 valueization. While gimple_simplify handles this
5134 it can get confused by the ~X == 1 -> X == 0 transform
5135 which we cant reduce to a SSA name or a constant
5136 (and we have no way to tell gimple_simplify to not
5137 consider those transforms in the first place). */
5138 else if (subcode == EQ_EXPR
5139 || subcode == NE_EXPR)
5140 {
5141 tree lhs = gimple_assign_lhs (stmt);
5142 tree op0 = gimple_assign_rhs1 (stmt);
5143 if (useless_type_conversion_p (TREE_TYPE (lhs),
5144 TREE_TYPE (op0)))
5145 {
5146 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5147 op0 = (*valueize) (op0);
8861704d
RB
5148 if (TREE_CODE (op0) == INTEGER_CST)
5149 std::swap (op0, op1);
5150 if (TREE_CODE (op1) == INTEGER_CST
5151 && ((subcode == NE_EXPR && integer_zerop (op1))
5152 || (subcode == EQ_EXPR && integer_onep (op1))))
5153 return op0;
59c20dc7
RB
5154 }
5155 }
4b1b9e64 5156 return NULL_TREE;
cfef45c8
RG
5157
5158 case GIMPLE_TERNARY_RHS:
5159 {
5160 /* Handle ternary operators that can appear in GIMPLE form. */
5161 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5162 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5163 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
5164 return fold_ternary_loc (loc, subcode,
5165 gimple_expr_type (stmt), op0, op1, op2);
5166 }
5167
5168 default:
5169 gcc_unreachable ();
5170 }
5171 }
5172
5173 case GIMPLE_CALL:
5174 {
25583c4f 5175 tree fn;
538dd0b7 5176 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5177
5178 if (gimple_call_internal_p (stmt))
31e071ae
MP
5179 {
5180 enum tree_code subcode = ERROR_MARK;
5181 switch (gimple_call_internal_fn (stmt))
5182 {
5183 case IFN_UBSAN_CHECK_ADD:
5184 subcode = PLUS_EXPR;
5185 break;
5186 case IFN_UBSAN_CHECK_SUB:
5187 subcode = MINUS_EXPR;
5188 break;
5189 case IFN_UBSAN_CHECK_MUL:
5190 subcode = MULT_EXPR;
5191 break;
5192 default:
5193 return NULL_TREE;
5194 }
368b454d
JJ
5195 tree arg0 = gimple_call_arg (stmt, 0);
5196 tree arg1 = gimple_call_arg (stmt, 1);
5197 tree op0 = (*valueize) (arg0);
5198 tree op1 = (*valueize) (arg1);
31e071ae
MP
5199
5200 if (TREE_CODE (op0) != INTEGER_CST
5201 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5202 {
5203 switch (subcode)
5204 {
5205 case MULT_EXPR:
5206 /* x * 0 = 0 * x = 0 without overflow. */
5207 if (integer_zerop (op0) || integer_zerop (op1))
5208 return build_zero_cst (TREE_TYPE (arg0));
5209 break;
5210 case MINUS_EXPR:
5211 /* y - y = 0 without overflow. */
5212 if (operand_equal_p (op0, op1, 0))
5213 return build_zero_cst (TREE_TYPE (arg0));
5214 break;
5215 default:
5216 break;
5217 }
5218 }
5219 tree res
5220 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5221 if (res
5222 && TREE_CODE (res) == INTEGER_CST
5223 && !TREE_OVERFLOW (res))
5224 return res;
5225 return NULL_TREE;
5226 }
25583c4f
RS
5227
5228 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5229 if (TREE_CODE (fn) == ADDR_EXPR
5230 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5231 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5232 && gimple_builtin_call_types_compatible_p (stmt,
5233 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5234 {
5235 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5236 tree retval;
cfef45c8
RG
5237 unsigned i;
5238 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5239 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5240 retval = fold_builtin_call_array (loc,
538dd0b7 5241 gimple_call_return_type (call_stmt),
cfef45c8 5242 fn, gimple_call_num_args (stmt), args);
cfef45c8 5243 if (retval)
5c944c6c
RB
5244 {
5245 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5246 STRIP_NOPS (retval);
538dd0b7
DM
5247 retval = fold_convert (gimple_call_return_type (call_stmt),
5248 retval);
5c944c6c 5249 }
cfef45c8
RG
5250 return retval;
5251 }
5252 return NULL_TREE;
5253 }
5254
5255 default:
5256 return NULL_TREE;
5257 }
5258}
5259
5260/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5261 Returns NULL_TREE if folding to a constant is not possible, otherwise
5262 returns a constant according to is_gimple_min_invariant. */
5263
5264tree
355fe088 5265gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
5266{
5267 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5268 if (res && is_gimple_min_invariant (res))
5269 return res;
5270 return NULL_TREE;
5271}
5272
5273
5274/* The following set of functions are supposed to fold references using
5275 their constant initializers. */
5276
cfef45c8
RG
5277/* See if we can find constructor defining value of BASE.
5278 When we know the consructor with constant offset (such as
5279 base is array[40] and we do know constructor of array), then
5280 BIT_OFFSET is adjusted accordingly.
5281
5282 As a special case, return error_mark_node when constructor
5283 is not explicitly available, but it is known to be zero
5284 such as 'static const int a;'. */
5285static tree
5286get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5287 tree (*valueize)(tree))
5288{
5289 HOST_WIDE_INT bit_offset2, size, max_size;
ee45a32d
EB
5290 bool reverse;
5291
cfef45c8
RG
5292 if (TREE_CODE (base) == MEM_REF)
5293 {
5294 if (!integer_zerop (TREE_OPERAND (base, 1)))
5295 {
9541ffee 5296 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5297 return NULL_TREE;
807e902e 5298 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5299 * BITS_PER_UNIT);
5300 }
5301
5302 if (valueize
5303 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5304 base = valueize (TREE_OPERAND (base, 0));
5305 if (!base || TREE_CODE (base) != ADDR_EXPR)
5306 return NULL_TREE;
5307 base = TREE_OPERAND (base, 0);
5308 }
5309
5310 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5311 DECL_INITIAL. If BASE is a nested reference into another
5312 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5313 the inner reference. */
5314 switch (TREE_CODE (base))
5315 {
5316 case VAR_DECL:
cfef45c8 5317 case CONST_DECL:
6a6dac52
JH
5318 {
5319 tree init = ctor_for_folding (base);
5320
688010ba 5321 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5322 NULL means unknown, while error_mark_node is 0. */
5323 if (init == error_mark_node)
5324 return NULL_TREE;
5325 if (!init)
5326 return error_mark_node;
5327 return init;
5328 }
cfef45c8
RG
5329
5330 case ARRAY_REF:
5331 case COMPONENT_REF:
ee45a32d
EB
5332 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
5333 &reverse);
cfef45c8
RG
5334 if (max_size == -1 || size != max_size)
5335 return NULL_TREE;
5336 *bit_offset += bit_offset2;
5337 return get_base_constructor (base, bit_offset, valueize);
5338
5339 case STRING_CST:
5340 case CONSTRUCTOR:
5341 return base;
5342
5343 default:
5344 return NULL_TREE;
5345 }
5346}
5347
cfef45c8
RG
5348/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5349 SIZE to the memory at bit OFFSET. */
5350
5351static tree
5352fold_array_ctor_reference (tree type, tree ctor,
5353 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5354 unsigned HOST_WIDE_INT size,
5355 tree from_decl)
cfef45c8 5356{
807e902e
KZ
5357 offset_int low_bound;
5358 offset_int elt_size;
807e902e 5359 offset_int access_index;
6a636014 5360 tree domain_type = NULL_TREE;
cfef45c8
RG
5361 HOST_WIDE_INT inner_offset;
5362
5363 /* Compute low bound and elt size. */
eb8f1123
RG
5364 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5365 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5366 if (domain_type && TYPE_MIN_VALUE (domain_type))
5367 {
5368 /* Static constructors for variably sized objects makes no sense. */
5369 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
807e902e 5370 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5371 }
5372 else
807e902e 5373 low_bound = 0;
cfef45c8 5374 /* Static constructors for variably sized objects makes no sense. */
c3284718 5375 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5376 == INTEGER_CST);
807e902e 5377 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5378
5379 /* We can handle only constantly sized accesses that are known to not
5380 be larger than size of array element. */
5381 if (!TYPE_SIZE_UNIT (type)
5382 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5383 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5384 || elt_size == 0)
cfef45c8
RG
5385 return NULL_TREE;
5386
5387 /* Compute the array index we look for. */
807e902e
KZ
5388 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5389 elt_size);
27bcd47c 5390 access_index += low_bound;
cfef45c8
RG
5391
5392 /* And offset within the access. */
27bcd47c 5393 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5394
5395 /* See if the array field is large enough to span whole access. We do not
5396 care to fold accesses spanning multiple array indexes. */
27bcd47c 5397 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8 5398 return NULL_TREE;
6a636014
AL
5399 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
5400 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
cfef45c8 5401
cfef45c8
RG
5402 /* When memory is not explicitely mentioned in constructor,
5403 it is 0 (or out of range). */
5404 return build_zero_cst (type);
5405}
5406
5407/* CTOR is CONSTRUCTOR of an aggregate or vector.
5408 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5409
5410static tree
5411fold_nonarray_ctor_reference (tree type, tree ctor,
5412 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5413 unsigned HOST_WIDE_INT size,
5414 tree from_decl)
cfef45c8
RG
5415{
5416 unsigned HOST_WIDE_INT cnt;
5417 tree cfield, cval;
5418
5419 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5420 cval)
5421 {
5422 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5423 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5424 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5425 offset_int bitoffset;
5426 offset_int bitoffset_end, access_end;
cfef45c8
RG
5427
5428 /* Variable sized objects in static constructors makes no sense,
5429 but field_size can be NULL for flexible array members. */
5430 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5431 && TREE_CODE (byte_offset) == INTEGER_CST
5432 && (field_size != NULL_TREE
5433 ? TREE_CODE (field_size) == INTEGER_CST
5434 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5435
5436 /* Compute bit offset of the field. */
807e902e
KZ
5437 bitoffset = (wi::to_offset (field_offset)
5438 + wi::lshift (wi::to_offset (byte_offset),
5439 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5440 /* Compute bit offset where the field ends. */
5441 if (field_size != NULL_TREE)
807e902e 5442 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5443 else
807e902e 5444 bitoffset_end = 0;
cfef45c8 5445
807e902e 5446 access_end = offset_int (offset) + size;
b8b2b009
JJ
5447
5448 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5449 [BITOFFSET, BITOFFSET_END)? */
807e902e 5450 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5451 && (field_size == NULL_TREE
807e902e 5452 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5453 {
807e902e 5454 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5455 /* We do have overlap. Now see if field is large enough to
5456 cover the access. Give up for accesses spanning multiple
5457 fields. */
807e902e 5458 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5459 return NULL_TREE;
807e902e 5460 if (wi::lts_p (offset, bitoffset))
b8b2b009 5461 return NULL_TREE;
cfef45c8 5462 return fold_ctor_reference (type, cval,
27bcd47c 5463 inner_offset.to_uhwi (), size,
c44c2088 5464 from_decl);
cfef45c8
RG
5465 }
5466 }
5467 /* When memory is not explicitely mentioned in constructor, it is 0. */
5468 return build_zero_cst (type);
5469}
5470
5471/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5472 to the memory at bit OFFSET. */
5473
8403c2cf 5474tree
cfef45c8 5475fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5476 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5477{
5478 tree ret;
5479
5480 /* We found the field with exact match. */
5481 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5482 && !offset)
9d60be38 5483 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5484
5485 /* We are at the end of walk, see if we can view convert the
5486 result. */
5487 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5488 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5489 && !compare_tree_int (TYPE_SIZE (type), size)
5490 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5491 {
9d60be38 5492 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5493 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5494 if (ret)
86c5a5c3 5495 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5496 return ret;
5497 }
b2505143
RB
5498 /* For constants and byte-aligned/sized reads try to go through
5499 native_encode/interpret. */
5500 if (CONSTANT_CLASS_P (ctor)
5501 && BITS_PER_UNIT == 8
5502 && offset % BITS_PER_UNIT == 0
5503 && size % BITS_PER_UNIT == 0
5504 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5505 {
5506 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
5507 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5508 offset / BITS_PER_UNIT);
5509 if (len > 0)
5510 return native_interpret_expr (type, buf, len);
b2505143 5511 }
cfef45c8
RG
5512 if (TREE_CODE (ctor) == CONSTRUCTOR)
5513 {
5514
eb8f1123
RG
5515 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5516 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5517 return fold_array_ctor_reference (type, ctor, offset, size,
5518 from_decl);
cfef45c8 5519 else
c44c2088
JH
5520 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5521 from_decl);
cfef45c8
RG
5522 }
5523
5524 return NULL_TREE;
5525}
5526
5527/* Return the tree representing the element referenced by T if T is an
5528 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5529 names using VALUEIZE. Return NULL_TREE otherwise. */
5530
5531tree
5532fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5533{
5534 tree ctor, idx, base;
5535 HOST_WIDE_INT offset, size, max_size;
5536 tree tem;
ee45a32d 5537 bool reverse;
cfef45c8 5538
f8a7df45
RG
5539 if (TREE_THIS_VOLATILE (t))
5540 return NULL_TREE;
5541
3a65ee74 5542 if (DECL_P (t))
cfef45c8
RG
5543 return get_symbol_constant_value (t);
5544
5545 tem = fold_read_from_constant_string (t);
5546 if (tem)
5547 return tem;
5548
5549 switch (TREE_CODE (t))
5550 {
5551 case ARRAY_REF:
5552 case ARRAY_RANGE_REF:
5553 /* Constant indexes are handled well by get_base_constructor.
5554 Only special case variable offsets.
5555 FIXME: This code can't handle nested references with variable indexes
5556 (they will be handled only by iteration of ccp). Perhaps we can bring
5557 get_ref_base_and_extent here and make it use a valueize callback. */
5558 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5559 && valueize
5560 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5561 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5562 {
5563 tree low_bound, unit_size;
5564
5565 /* If the resulting bit-offset is constant, track it. */
5566 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5567 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5568 && (unit_size = array_ref_element_size (t),
807e902e 5569 tree_fits_uhwi_p (unit_size)))
cfef45c8 5570 {
807e902e
KZ
5571 offset_int woffset
5572 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5573 TYPE_PRECISION (TREE_TYPE (idx)));
5574
5575 if (wi::fits_shwi_p (woffset))
5576 {
5577 offset = woffset.to_shwi ();
5578 /* TODO: This code seems wrong, multiply then check
5579 to see if it fits. */
5580 offset *= tree_to_uhwi (unit_size);
5581 offset *= BITS_PER_UNIT;
5582
5583 base = TREE_OPERAND (t, 0);
5584 ctor = get_base_constructor (base, &offset, valueize);
5585 /* Empty constructor. Always fold to 0. */
5586 if (ctor == error_mark_node)
5587 return build_zero_cst (TREE_TYPE (t));
5588 /* Out of bound array access. Value is undefined,
5589 but don't fold. */
5590 if (offset < 0)
5591 return NULL_TREE;
5592 /* We can not determine ctor. */
5593 if (!ctor)
5594 return NULL_TREE;
5595 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5596 tree_to_uhwi (unit_size)
5597 * BITS_PER_UNIT,
5598 base);
5599 }
cfef45c8
RG
5600 }
5601 }
5602 /* Fallthru. */
5603
5604 case COMPONENT_REF:
5605 case BIT_FIELD_REF:
5606 case TARGET_MEM_REF:
5607 case MEM_REF:
ee45a32d 5608 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
5609 ctor = get_base_constructor (base, &offset, valueize);
5610
5611 /* Empty constructor. Always fold to 0. */
5612 if (ctor == error_mark_node)
5613 return build_zero_cst (TREE_TYPE (t));
5614 /* We do not know precise address. */
5615 if (max_size == -1 || max_size != size)
5616 return NULL_TREE;
5617 /* We can not determine ctor. */
5618 if (!ctor)
5619 return NULL_TREE;
5620
5621 /* Out of bound array access. Value is undefined, but don't fold. */
5622 if (offset < 0)
5623 return NULL_TREE;
5624
c44c2088
JH
5625 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5626 base);
cfef45c8
RG
5627
5628 case REALPART_EXPR:
5629 case IMAGPART_EXPR:
5630 {
5631 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5632 if (c && TREE_CODE (c) == COMPLEX_CST)
5633 return fold_build1_loc (EXPR_LOCATION (t),
5634 TREE_CODE (t), TREE_TYPE (t), c);
5635 break;
5636 }
5637
5638 default:
5639 break;
5640 }
5641
5642 return NULL_TREE;
5643}
5644
5645tree
5646fold_const_aggregate_ref (tree t)
5647{
5648 return fold_const_aggregate_ref_1 (t, NULL);
5649}
06bc3ec7 5650
85942f45 5651/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5652 at OFFSET.
5653 Set CAN_REFER if non-NULL to false if method
5654 is not referable or if the virtual table is ill-formed (such as rewriten
5655 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5656
5657tree
85942f45
JH
5658gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5659 tree v,
ec77d61f
JH
5660 unsigned HOST_WIDE_INT offset,
5661 bool *can_refer)
81fa35bd 5662{
85942f45
JH
5663 tree vtable = v, init, fn;
5664 unsigned HOST_WIDE_INT size;
8c311b50
JH
5665 unsigned HOST_WIDE_INT elt_size, access_index;
5666 tree domain_type;
81fa35bd 5667
ec77d61f
JH
5668 if (can_refer)
5669 *can_refer = true;
5670
9de2f554 5671 /* First of all double check we have virtual table. */
81fa35bd 5672 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5673 || !DECL_VIRTUAL_P (v))
ec77d61f 5674 {
ec77d61f
JH
5675 /* Pass down that we lost track of the target. */
5676 if (can_refer)
5677 *can_refer = false;
5678 return NULL_TREE;
5679 }
9de2f554 5680
2aa3da06
JH
5681 init = ctor_for_folding (v);
5682
9de2f554 5683 /* The virtual tables should always be born with constructors
2aa3da06
JH
5684 and we always should assume that they are avaialble for
5685 folding. At the moment we do not stream them in all cases,
5686 but it should never happen that ctor seem unreachable. */
5687 gcc_assert (init);
5688 if (init == error_mark_node)
5689 {
5690 gcc_assert (in_lto_p);
ec77d61f
JH
5691 /* Pass down that we lost track of the target. */
5692 if (can_refer)
5693 *can_refer = false;
2aa3da06
JH
5694 return NULL_TREE;
5695 }
81fa35bd 5696 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5697 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5698 offset *= BITS_PER_UNIT;
81fa35bd 5699 offset += token * size;
9de2f554 5700
8c311b50
JH
5701 /* Lookup the value in the constructor that is assumed to be array.
5702 This is equivalent to
5703 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5704 offset, size, NULL);
5705 but in a constant time. We expect that frontend produced a simple
5706 array without indexed initializers. */
5707
5708 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5709 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5710 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5711 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5712
5713 access_index = offset / BITS_PER_UNIT / elt_size;
5714 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5715
5716 /* This code makes an assumption that there are no
5717 indexed fileds produced by C++ FE, so we can directly index the array. */
5718 if (access_index < CONSTRUCTOR_NELTS (init))
5719 {
5720 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5721 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5722 STRIP_NOPS (fn);
5723 }
5724 else
5725 fn = NULL;
9de2f554
JH
5726
5727 /* For type inconsistent program we may end up looking up virtual method
5728 in virtual table that does not contain TOKEN entries. We may overrun
5729 the virtual table and pick up a constant or RTTI info pointer.
5730 In any case the call is undefined. */
5731 if (!fn
5732 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5733 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5734 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5735 else
5736 {
5737 fn = TREE_OPERAND (fn, 0);
5738
5739 /* When cgraph node is missing and function is not public, we cannot
5740 devirtualize. This can happen in WHOPR when the actual method
5741 ends up in other partition, because we found devirtualization
5742 possibility too late. */
5743 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5744 {
5745 if (can_refer)
5746 {
5747 *can_refer = false;
5748 return fn;
5749 }
5750 return NULL_TREE;
5751 }
9de2f554 5752 }
81fa35bd 5753
7501ca28
RG
5754 /* Make sure we create a cgraph node for functions we'll reference.
5755 They can be non-existent if the reference comes from an entry
5756 of an external vtable for example. */
d52f5295 5757 cgraph_node::get_create (fn);
7501ca28 5758
81fa35bd
MJ
5759 return fn;
5760}
5761
85942f45
JH
5762/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5763 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5764 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5765 OBJ_TYPE_REF_OBJECT(REF).
5766 Set CAN_REFER if non-NULL to false if method
5767 is not referable or if the virtual table is ill-formed (such as rewriten
5768 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5769
5770tree
ec77d61f
JH
5771gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5772 bool *can_refer)
85942f45
JH
5773{
5774 unsigned HOST_WIDE_INT offset;
5775 tree v;
5776
5777 v = BINFO_VTABLE (known_binfo);
5778 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5779 if (!v)
5780 return NULL_TREE;
5781
5782 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5783 {
5784 if (can_refer)
5785 *can_refer = false;
5786 return NULL_TREE;
5787 }
5788 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5789}
5790
b184c8f1
AM
5791/* Given a pointer value OP0, return a simplified version of an
5792 indirection through OP0, or NULL_TREE if no simplification is
5793 possible. Note that the resulting type may be different from
5794 the type pointed to in the sense that it is still compatible
5795 from the langhooks point of view. */
5796
5797tree
5798gimple_fold_indirect_ref (tree t)
5799{
5800 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5801 tree sub = t;
5802 tree subtype;
5803
5804 STRIP_NOPS (sub);
5805 subtype = TREE_TYPE (sub);
5806 if (!POINTER_TYPE_P (subtype))
5807 return NULL_TREE;
5808
5809 if (TREE_CODE (sub) == ADDR_EXPR)
5810 {
5811 tree op = TREE_OPERAND (sub, 0);
5812 tree optype = TREE_TYPE (op);
5813 /* *&p => p */
5814 if (useless_type_conversion_p (type, optype))
5815 return op;
5816
5817 /* *(foo *)&fooarray => fooarray[0] */
5818 if (TREE_CODE (optype) == ARRAY_TYPE
5819 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5820 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5821 {
5822 tree type_domain = TYPE_DOMAIN (optype);
5823 tree min_val = size_zero_node;
5824 if (type_domain && TYPE_MIN_VALUE (type_domain))
5825 min_val = TYPE_MIN_VALUE (type_domain);
5826 if (TREE_CODE (min_val) == INTEGER_CST)
5827 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5828 }
5829 /* *(foo *)&complexfoo => __real__ complexfoo */
5830 else if (TREE_CODE (optype) == COMPLEX_TYPE
5831 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5832 return fold_build1 (REALPART_EXPR, type, op);
5833 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5834 else if (TREE_CODE (optype) == VECTOR_TYPE
5835 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5836 {
5837 tree part_width = TYPE_SIZE (type);
5838 tree index = bitsize_int (0);
5839 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5840 }
5841 }
5842
5843 /* *(p + CST) -> ... */
5844 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5845 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5846 {
5847 tree addr = TREE_OPERAND (sub, 0);
5848 tree off = TREE_OPERAND (sub, 1);
5849 tree addrtype;
5850
5851 STRIP_NOPS (addr);
5852 addrtype = TREE_TYPE (addr);
5853
5854 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5855 if (TREE_CODE (addr) == ADDR_EXPR
5856 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5857 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5858 && tree_fits_uhwi_p (off))
b184c8f1 5859 {
ae7e9ddd 5860 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5861 tree part_width = TYPE_SIZE (type);
5862 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5863 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5864 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5865 tree index = bitsize_int (indexi);
5866 if (offset / part_widthi
e934916c 5867 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5868 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5869 part_width, index);
5870 }
5871
5872 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5873 if (TREE_CODE (addr) == ADDR_EXPR
5874 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5875 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5876 {
5877 tree size = TYPE_SIZE_UNIT (type);
5878 if (tree_int_cst_equal (size, off))
5879 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5880 }
5881
5882 /* *(p + CST) -> MEM_REF <p, CST>. */
5883 if (TREE_CODE (addr) != ADDR_EXPR
5884 || DECL_P (TREE_OPERAND (addr, 0)))
5885 return fold_build2 (MEM_REF, type,
5886 addr,
807e902e 5887 wide_int_to_tree (ptype, off));
b184c8f1
AM
5888 }
5889
5890 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5891 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5892 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5893 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5894 {
5895 tree type_domain;
5896 tree min_val = size_zero_node;
5897 tree osub = sub;
5898 sub = gimple_fold_indirect_ref (sub);
5899 if (! sub)
5900 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5901 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5902 if (type_domain && TYPE_MIN_VALUE (type_domain))
5903 min_val = TYPE_MIN_VALUE (type_domain);
5904 if (TREE_CODE (min_val) == INTEGER_CST)
5905 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5906 }
5907
5908 return NULL_TREE;
5909}
19e51b40
JJ
5910
5911/* Return true if CODE is an operation that when operating on signed
5912 integer types involves undefined behavior on overflow and the
5913 operation can be expressed with unsigned arithmetic. */
5914
5915bool
5916arith_code_with_undefined_signed_overflow (tree_code code)
5917{
5918 switch (code)
5919 {
5920 case PLUS_EXPR:
5921 case MINUS_EXPR:
5922 case MULT_EXPR:
5923 case NEGATE_EXPR:
5924 case POINTER_PLUS_EXPR:
5925 return true;
5926 default:
5927 return false;
5928 }
5929}
5930
5931/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5932 operation that can be transformed to unsigned arithmetic by converting
5933 its operand, carrying out the operation in the corresponding unsigned
5934 type and converting the result back to the original type.
5935
5936 Returns a sequence of statements that replace STMT and also contain
5937 a modified form of STMT itself. */
5938
5939gimple_seq
355fe088 5940rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
5941{
5942 if (dump_file && (dump_flags & TDF_DETAILS))
5943 {
5944 fprintf (dump_file, "rewriting stmt with undefined signed "
5945 "overflow ");
5946 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5947 }
5948
5949 tree lhs = gimple_assign_lhs (stmt);
5950 tree type = unsigned_type_for (TREE_TYPE (lhs));
5951 gimple_seq stmts = NULL;
5952 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
5953 {
74e3c262
RB
5954 tree op = gimple_op (stmt, i);
5955 op = gimple_convert (&stmts, type, op);
5956 gimple_set_op (stmt, i, op);
19e51b40
JJ
5957 }
5958 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
5959 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
5960 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
5961 gimple_seq_add_stmt (&stmts, stmt);
355fe088 5962 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
5963 gimple_seq_add_stmt (&stmts, cvt);
5964
5965 return stmts;
5966}
d4f5cd5e 5967
3d2cf79f 5968
c26de36d
RB
5969/* The valueization hook we use for the gimple_build API simplification.
5970 This makes us match fold_buildN behavior by only combining with
5971 statements in the sequence(s) we are currently building. */
5972
5973static tree
5974gimple_build_valueize (tree op)
5975{
5976 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
5977 return op;
5978 return NULL_TREE;
5979}
5980
3d2cf79f 5981/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 5982 simplifying it first if possible. Returns the built
3d2cf79f
RB
5983 expression value and appends statements possibly defining it
5984 to SEQ. */
5985
5986tree
5987gimple_build (gimple_seq *seq, location_t loc,
c26de36d 5988 enum tree_code code, tree type, tree op0)
3d2cf79f 5989{
c26de36d 5990 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
5991 if (!res)
5992 {
5993 if (gimple_in_ssa_p (cfun))
b731b390 5994 res = make_ssa_name (type);
3d2cf79f 5995 else
b731b390 5996 res = create_tmp_reg (type);
355fe088 5997 gimple *stmt;
3d2cf79f
RB
5998 if (code == REALPART_EXPR
5999 || code == IMAGPART_EXPR
6000 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6001 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6002 else
0d0e4a03 6003 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6004 gimple_set_location (stmt, loc);
6005 gimple_seq_add_stmt_without_update (seq, stmt);
6006 }
6007 return res;
6008}
6009
6010/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6011 simplifying it first if possible. Returns the built
3d2cf79f
RB
6012 expression value and appends statements possibly defining it
6013 to SEQ. */
6014
6015tree
6016gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6017 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6018{
c26de36d 6019 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6020 if (!res)
6021 {
6022 if (gimple_in_ssa_p (cfun))
b731b390 6023 res = make_ssa_name (type);
3d2cf79f 6024 else
b731b390 6025 res = create_tmp_reg (type);
355fe088 6026 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6027 gimple_set_location (stmt, loc);
6028 gimple_seq_add_stmt_without_update (seq, stmt);
6029 }
6030 return res;
6031}
6032
6033/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6034 simplifying it first if possible. Returns the built
3d2cf79f
RB
6035 expression value and appends statements possibly defining it
6036 to SEQ. */
6037
6038tree
6039gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6040 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6041{
6042 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6043 seq, gimple_build_valueize);
3d2cf79f
RB
6044 if (!res)
6045 {
6046 if (gimple_in_ssa_p (cfun))
b731b390 6047 res = make_ssa_name (type);
3d2cf79f 6048 else
b731b390 6049 res = create_tmp_reg (type);
355fe088 6050 gimple *stmt;
3d2cf79f 6051 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6052 stmt = gimple_build_assign (res, code,
6053 build3 (code, type, op0, op1, op2));
3d2cf79f 6054 else
0d0e4a03 6055 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6056 gimple_set_location (stmt, loc);
6057 gimple_seq_add_stmt_without_update (seq, stmt);
6058 }
6059 return res;
6060}
6061
6062/* Build the call FN (ARG0) with a result of type TYPE
6063 (or no result if TYPE is void) with location LOC,
c26de36d 6064 simplifying it first if possible. Returns the built
3d2cf79f
RB
6065 expression value (or NULL_TREE if TYPE is void) and appends
6066 statements possibly defining it to SEQ. */
6067
6068tree
6069gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6070 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6071{
c26de36d 6072 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6073 if (!res)
6074 {
6075 tree decl = builtin_decl_implicit (fn);
355fe088 6076 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6077 if (!VOID_TYPE_P (type))
6078 {
6079 if (gimple_in_ssa_p (cfun))
b731b390 6080 res = make_ssa_name (type);
3d2cf79f 6081 else
b731b390 6082 res = create_tmp_reg (type);
3d2cf79f
RB
6083 gimple_call_set_lhs (stmt, res);
6084 }
6085 gimple_set_location (stmt, loc);
6086 gimple_seq_add_stmt_without_update (seq, stmt);
6087 }
6088 return res;
6089}
6090
6091/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6092 (or no result if TYPE is void) with location LOC,
c26de36d 6093 simplifying it first if possible. Returns the built
3d2cf79f
RB
6094 expression value (or NULL_TREE if TYPE is void) and appends
6095 statements possibly defining it to SEQ. */
6096
6097tree
6098gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6099 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6100{
c26de36d 6101 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6102 if (!res)
6103 {
6104 tree decl = builtin_decl_implicit (fn);
355fe088 6105 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6106 if (!VOID_TYPE_P (type))
6107 {
6108 if (gimple_in_ssa_p (cfun))
b731b390 6109 res = make_ssa_name (type);
3d2cf79f 6110 else
b731b390 6111 res = create_tmp_reg (type);
3d2cf79f
RB
6112 gimple_call_set_lhs (stmt, res);
6113 }
6114 gimple_set_location (stmt, loc);
6115 gimple_seq_add_stmt_without_update (seq, stmt);
6116 }
6117 return res;
6118}
6119
6120/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6121 (or no result if TYPE is void) with location LOC,
c26de36d 6122 simplifying it first if possible. Returns the built
3d2cf79f
RB
6123 expression value (or NULL_TREE if TYPE is void) and appends
6124 statements possibly defining it to SEQ. */
6125
6126tree
6127gimple_build (gimple_seq *seq, location_t loc,
6128 enum built_in_function fn, tree type,
c26de36d 6129 tree arg0, tree arg1, tree arg2)
3d2cf79f 6130{
c26de36d
RB
6131 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6132 seq, gimple_build_valueize);
3d2cf79f
RB
6133 if (!res)
6134 {
6135 tree decl = builtin_decl_implicit (fn);
355fe088 6136 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
6137 if (!VOID_TYPE_P (type))
6138 {
6139 if (gimple_in_ssa_p (cfun))
b731b390 6140 res = make_ssa_name (type);
3d2cf79f 6141 else
b731b390 6142 res = create_tmp_reg (type);
3d2cf79f
RB
6143 gimple_call_set_lhs (stmt, res);
6144 }
6145 gimple_set_location (stmt, loc);
6146 gimple_seq_add_stmt_without_update (seq, stmt);
6147 }
6148 return res;
6149}
6150
6151/* Build the conversion (TYPE) OP with a result of type TYPE
6152 with location LOC if such conversion is neccesary in GIMPLE,
6153 simplifying it first.
6154 Returns the built expression value and appends
6155 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6156
6157tree
6158gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6159{
6160 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6161 return op;
3d2cf79f 6162 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6163}
68e57f04 6164
74e3c262
RB
6165/* Build the conversion (ptrofftype) OP with a result of a type
6166 compatible with ptrofftype with location LOC if such conversion
6167 is neccesary in GIMPLE, simplifying it first.
6168 Returns the built expression value and appends
6169 statements possibly defining it to SEQ. */
6170
6171tree
6172gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
6173{
6174 if (ptrofftype_p (TREE_TYPE (op)))
6175 return op;
6176 return gimple_convert (seq, loc, sizetype, op);
6177}
6178
68e57f04
RS
6179/* Return true if the result of assignment STMT is known to be non-negative.
6180 If the return value is based on the assumption that signed overflow is
6181 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6182 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6183
6184static bool
6185gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6186 int depth)
6187{
6188 enum tree_code code = gimple_assign_rhs_code (stmt);
6189 switch (get_gimple_rhs_class (code))
6190 {
6191 case GIMPLE_UNARY_RHS:
6192 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6193 gimple_expr_type (stmt),
6194 gimple_assign_rhs1 (stmt),
6195 strict_overflow_p, depth);
6196 case GIMPLE_BINARY_RHS:
6197 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
6198 gimple_expr_type (stmt),
6199 gimple_assign_rhs1 (stmt),
6200 gimple_assign_rhs2 (stmt),
6201 strict_overflow_p, depth);
6202 case GIMPLE_TERNARY_RHS:
6203 return false;
6204 case GIMPLE_SINGLE_RHS:
6205 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
6206 strict_overflow_p, depth);
6207 case GIMPLE_INVALID_RHS:
6208 break;
6209 }
6210 gcc_unreachable ();
6211}
6212
6213/* Return true if return value of call STMT is known to be non-negative.
6214 If the return value is based on the assumption that signed overflow is
6215 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6216 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6217
6218static bool
6219gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6220 int depth)
6221{
6222 tree arg0 = gimple_call_num_args (stmt) > 0 ?
6223 gimple_call_arg (stmt, 0) : NULL_TREE;
6224 tree arg1 = gimple_call_num_args (stmt) > 1 ?
6225 gimple_call_arg (stmt, 1) : NULL_TREE;
6226
6227 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 6228 gimple_call_combined_fn (stmt),
68e57f04
RS
6229 arg0,
6230 arg1,
6231 strict_overflow_p, depth);
6232}
6233
4534c203
RB
6234/* Return true if return value of call STMT is known to be non-negative.
6235 If the return value is based on the assumption that signed overflow is
6236 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6237 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6238
6239static bool
6240gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6241 int depth)
6242{
6243 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6244 {
6245 tree arg = gimple_phi_arg_def (stmt, i);
6246 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
6247 return false;
6248 }
6249 return true;
6250}
6251
68e57f04
RS
6252/* Return true if STMT is known to compute a non-negative value.
6253 If the return value is based on the assumption that signed overflow is
6254 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6255 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6256
6257bool
6258gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
6259 int depth)
6260{
6261 switch (gimple_code (stmt))
6262 {
6263 case GIMPLE_ASSIGN:
6264 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
6265 depth);
6266 case GIMPLE_CALL:
6267 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
6268 depth);
4534c203
RB
6269 case GIMPLE_PHI:
6270 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
6271 depth);
68e57f04
RS
6272 default:
6273 return false;
6274 }
6275}
67dbe582
RS
6276
6277/* Return true if the floating-point value computed by assignment STMT
6278 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 6279 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
6280
6281 DEPTH is the current nesting depth of the query. */
6282
6283static bool
6284gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
6285{
6286 enum tree_code code = gimple_assign_rhs_code (stmt);
6287 switch (get_gimple_rhs_class (code))
6288 {
6289 case GIMPLE_UNARY_RHS:
6290 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
6291 gimple_assign_rhs1 (stmt), depth);
6292 case GIMPLE_BINARY_RHS:
6293 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
6294 gimple_assign_rhs1 (stmt),
6295 gimple_assign_rhs2 (stmt), depth);
6296 case GIMPLE_TERNARY_RHS:
6297 return false;
6298 case GIMPLE_SINGLE_RHS:
6299 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
6300 case GIMPLE_INVALID_RHS:
6301 break;
6302 }
6303 gcc_unreachable ();
6304}
6305
6306/* Return true if the floating-point value computed by call STMT is known
6307 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6308 considered integer values. Return false for signaling NaN.
67dbe582
RS
6309
6310 DEPTH is the current nesting depth of the query. */
6311
6312static bool
6313gimple_call_integer_valued_real_p (gimple *stmt, int depth)
6314{
6315 tree arg0 = (gimple_call_num_args (stmt) > 0
6316 ? gimple_call_arg (stmt, 0)
6317 : NULL_TREE);
6318 tree arg1 = (gimple_call_num_args (stmt) > 1
6319 ? gimple_call_arg (stmt, 1)
6320 : NULL_TREE);
1d9da71f 6321 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
6322 arg0, arg1, depth);
6323}
6324
6325/* Return true if the floating-point result of phi STMT is known to have
6326 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 6327 integer values. Return false for signaling NaN.
67dbe582
RS
6328
6329 DEPTH is the current nesting depth of the query. */
6330
6331static bool
6332gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
6333{
6334 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
6335 {
6336 tree arg = gimple_phi_arg_def (stmt, i);
6337 if (!integer_valued_real_single_p (arg, depth + 1))
6338 return false;
6339 }
6340 return true;
6341}
6342
6343/* Return true if the floating-point value computed by STMT is known
6344 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 6345 considered integer values. Return false for signaling NaN.
67dbe582
RS
6346
6347 DEPTH is the current nesting depth of the query. */
6348
6349bool
6350gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
6351{
6352 switch (gimple_code (stmt))
6353 {
6354 case GIMPLE_ASSIGN:
6355 return gimple_assign_integer_valued_real_p (stmt, depth);
6356 case GIMPLE_CALL:
6357 return gimple_call_integer_valued_real_p (stmt, depth);
6358 case GIMPLE_PHI:
6359 return gimple_phi_integer_valued_real_p (stmt, depth);
6360 default:
6361 return false;
6362 }
6363}