]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Rename scop->ctx to isl_context
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
5624e564 2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
9fdcd34e 25#include "predict.h"
cbdd87d4 26#include "tree.h"
c7131fb2 27#include "gimple.h"
36566b39 28#include "rtl.h"
c7131fb2
AM
29#include "ssa.h"
30#include "alias.h"
31#include "fold-const.h"
36566b39 32#include "flags.h"
36566b39
PK
33#include "insn-config.h"
34#include "expmed.h"
35#include "dojump.h"
36#include "explow.h"
37#include "calls.h"
38#include "emit-rtl.h"
39#include "varasm.h"
40#include "stmt.h"
41#include "expr.h"
42#include "stor-layout.h"
7ee2468b 43#include "dumpfile.h"
60393bbc 44#include "dominance.h"
2fb9a547
AM
45#include "internal-fn.h"
46#include "gimple-fold.h"
45b0be94 47#include "gimplify.h"
5be5c238 48#include "gimple-iterator.h"
442b4905
AM
49#include "tree-into-ssa.h"
50#include "tree-dfa.h"
7a300452 51#include "tree-ssa.h"
cbdd87d4 52#include "tree-ssa-propagate.h"
cbdd87d4 53#include "target.h"
c582198b 54#include "cgraph.h"
450ad0cd
JH
55#include "ipa-utils.h"
56#include "gimple-pretty-print.h"
4484a35a 57#include "tree-ssa-address.h"
862d0b35 58#include "langhooks.h"
19e51b40 59#include "gimplify-me.h"
2b5f0895 60#include "dbgcnt.h"
9b2b7279 61#include "builtins.h"
fef5a0d9 62#include "output.h"
e0ee10ed
RB
63#include "tree-eh.h"
64#include "gimple-match.h"
48126138 65#include "gomp-constants.h"
cbdd87d4 66
b3b9f3d0 67/* Return true when DECL can be referenced from current unit.
c44c2088
JH
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
70 reasons:
1389294c 71
1389294c
JH
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
77 set.
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
82 declaring the body.
83 3) COMDAT functions referred by external vtables that
3e89949e 84 we devirtualize only during final compilation stage.
b3b9f3d0
JH
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
87 directly. */
88
1389294c 89static bool
c44c2088 90can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 91{
2c8326a5 92 varpool_node *vnode;
1389294c 93 struct cgraph_node *node;
5e20cdc9 94 symtab_node *snode;
c44c2088 95
00de328a 96 if (DECL_ABSTRACT_P (decl))
1632a686
JH
97 return false;
98
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
101 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
102 return true;
103
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
106 {
3aaf0529
JH
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
3dafb85c 109 if (symtab->function_flags_ready)
3aaf0529 110 return true;
d52f5295 111 snode = symtab_node::get (decl);
3aaf0529 112 if (!snode || !snode->definition)
1632a686 113 return false;
7de90a6c 114 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
115 return !node || !node->global.inlined_to;
116 }
117
6da8be89 118 /* We will later output the initializer, so we can refer to it.
c44c2088 119 So we are concerned only when DECL comes from initializer of
3aaf0529 120 external var or var that has been optimized out. */
c44c2088
JH
121 if (!from_decl
122 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 123 || (!DECL_EXTERNAL (from_decl)
9041d2e6 124 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 125 && vnode->definition)
6da8be89 126 || (flag_ltrans
9041d2e6 127 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 128 && vnode->in_other_partition))
c44c2088 129 return true;
c44c2088
JH
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl)
134 && DECL_EXTERNAL (decl)
a33a931b 135 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 136 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 137 return false;
b3b9f3d0
JH
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
142 return true;
3aaf0529
JH
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
146
147 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 148 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
153 was privatized. */
3dafb85c 154 if (!symtab->function_flags_ready)
b3b9f3d0 155 return true;
c44c2088 156
d52f5295 157 snode = symtab_node::get (decl);
3aaf0529
JH
158 if (!snode
159 || ((!snode->definition || DECL_EXTERNAL (decl))
160 && (!snode->in_other_partition
161 || (!snode->forced_by_abi && !snode->force_output))))
162 return false;
163 node = dyn_cast <cgraph_node *> (snode);
164 return !node || !node->global.inlined_to;
1389294c
JH
165}
166
0038d4e0 167/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
170
171tree
c44c2088 172canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 173{
50619002
EB
174 tree orig_cval = cval;
175 STRIP_NOPS (cval);
315f5f1b
RG
176 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 178 {
315f5f1b
RG
179 tree ptr = TREE_OPERAND (cval, 0);
180 if (is_gimple_min_invariant (ptr))
181 cval = build1_loc (EXPR_LOCATION (cval),
182 ADDR_EXPR, TREE_TYPE (ptr),
183 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
184 ptr,
185 fold_convert (ptr_type_node,
186 TREE_OPERAND (cval, 1))));
17f39a39
JH
187 }
188 if (TREE_CODE (cval) == ADDR_EXPR)
189 {
5a27a197
RG
190 tree base = NULL_TREE;
191 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
192 {
193 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
194 if (base)
195 TREE_OPERAND (cval, 0) = base;
196 }
5a27a197
RG
197 else
198 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
199 if (!base)
200 return NULL_TREE;
b3b9f3d0 201
7501ca28
RG
202 if ((TREE_CODE (base) == VAR_DECL
203 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 204 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 205 return NULL_TREE;
7501ca28 206 if (TREE_CODE (base) == VAR_DECL)
46eb666a 207 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
208 else if (TREE_CODE (base) == FUNCTION_DECL)
209 {
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
d52f5295 213 cgraph_node::get_create (base);
7501ca28 214 }
0038d4e0 215 /* Fixup types in global initializers. */
73aef89e
RG
216 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
217 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
218
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
220 cval = fold_convert (TREE_TYPE (orig_cval), cval);
221 return cval;
17f39a39 222 }
846abd0d
RB
223 if (TREE_OVERFLOW_P (cval))
224 return drop_tree_overflow (cval);
50619002 225 return orig_cval;
17f39a39 226}
cbdd87d4
RG
227
228/* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
230
231tree
232get_symbol_constant_value (tree sym)
233{
6a6dac52
JH
234 tree val = ctor_for_folding (sym);
235 if (val != error_mark_node)
cbdd87d4 236 {
cbdd87d4
RG
237 if (val)
238 {
9d60be38 239 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 240 if (val && is_gimple_min_invariant (val))
17f39a39 241 return val;
1389294c
JH
242 else
243 return NULL_TREE;
cbdd87d4
RG
244 }
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
248 if (!val
b8a8c472 249 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 250 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
251 }
252
253 return NULL_TREE;
254}
255
256
cbdd87d4
RG
257
258/* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
262
263static tree
264maybe_fold_reference (tree expr, bool is_lhs)
265{
17f39a39 266 tree result;
cbdd87d4 267
f0eddb90
RG
268 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
269 || TREE_CODE (expr) == REALPART_EXPR
270 || TREE_CODE (expr) == IMAGPART_EXPR)
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_unary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0));
276 else if (TREE_CODE (expr) == BIT_FIELD_REF
277 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
278 return fold_ternary_loc (EXPR_LOCATION (expr),
279 TREE_CODE (expr),
280 TREE_TYPE (expr),
281 TREE_OPERAND (expr, 0),
282 TREE_OPERAND (expr, 1),
283 TREE_OPERAND (expr, 2));
284
f0eddb90
RG
285 if (!is_lhs
286 && (result = fold_const_aggregate_ref (expr))
287 && is_gimple_min_invariant (result))
288 return result;
cbdd87d4 289
cbdd87d4
RG
290 return NULL_TREE;
291}
292
293
294/* Attempt to fold an assignment statement pointed-to by SI. Returns a
295 replacement rhs for the statement or NULL_TREE if no simplification
296 could be made. It is assumed that the operands have been previously
297 folded. */
298
299static tree
300fold_gimple_assign (gimple_stmt_iterator *si)
301{
355fe088 302 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
303 enum tree_code subcode = gimple_assign_rhs_code (stmt);
304 location_t loc = gimple_location (stmt);
305
306 tree result = NULL_TREE;
307
308 switch (get_gimple_rhs_class (subcode))
309 {
310 case GIMPLE_SINGLE_RHS:
311 {
312 tree rhs = gimple_assign_rhs1 (stmt);
313
8c00ba08
JW
314 if (TREE_CLOBBER_P (rhs))
315 return NULL_TREE;
316
4e71066d 317 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
318 return maybe_fold_reference (rhs, false);
319
bdf37f7a
JH
320 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
321 {
322 tree val = OBJ_TYPE_REF_EXPR (rhs);
323 if (is_gimple_min_invariant (val))
324 return val;
f8a39967 325 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
326 {
327 bool final;
328 vec <cgraph_node *>targets
f8a39967 329 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 330 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 331 {
2b5f0895
XDL
332 if (dump_enabled_p ())
333 {
807b7d62 334 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
335 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
336 "resolving virtual function address "
337 "reference to function %s\n",
338 targets.length () == 1
339 ? targets[0]->name ()
3ef276e4 340 : "NULL");
2b5f0895 341 }
3ef276e4
RB
342 if (targets.length () == 1)
343 {
344 val = fold_convert (TREE_TYPE (val),
345 build_fold_addr_expr_loc
346 (loc, targets[0]->decl));
347 STRIP_USELESS_TYPE_CONVERSION (val);
348 }
349 else
350 /* We can not use __builtin_unreachable here because it
351 can not have address taken. */
352 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
353 return val;
354 }
355 }
356
357 }
cbdd87d4
RG
358 else if (TREE_CODE (rhs) == ADDR_EXPR)
359 {
70f34814
RG
360 tree ref = TREE_OPERAND (rhs, 0);
361 tree tem = maybe_fold_reference (ref, true);
362 if (tem
363 && TREE_CODE (tem) == MEM_REF
364 && integer_zerop (TREE_OPERAND (tem, 1)))
365 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
366 else if (tem)
cbdd87d4
RG
367 result = fold_convert (TREE_TYPE (rhs),
368 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
369 else if (TREE_CODE (ref) == MEM_REF
370 && integer_zerop (TREE_OPERAND (ref, 1)))
371 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
372 }
373
374 else if (TREE_CODE (rhs) == CONSTRUCTOR
375 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
376 && (CONSTRUCTOR_NELTS (rhs)
377 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
378 {
379 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
380 unsigned i;
381 tree val;
382
383 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
384 if (TREE_CODE (val) != INTEGER_CST
385 && TREE_CODE (val) != REAL_CST
386 && TREE_CODE (val) != FIXED_CST)
387 return NULL_TREE;
388
389 return build_vector_from_ctor (TREE_TYPE (rhs),
390 CONSTRUCTOR_ELTS (rhs));
391 }
392
393 else if (DECL_P (rhs))
9d60be38 394 return get_symbol_constant_value (rhs);
cbdd87d4
RG
395
396 /* If we couldn't fold the RHS, hand over to the generic
397 fold routines. */
398 if (result == NULL_TREE)
399 result = fold (rhs);
400
401 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
402 that may have been added by fold, and "useless" type
403 conversions that might now be apparent due to propagation. */
404 STRIP_USELESS_TYPE_CONVERSION (result);
405
406 if (result != rhs && valid_gimple_rhs_p (result))
407 return result;
408
409 return NULL_TREE;
410 }
411 break;
412
413 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
414 break;
415
416 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
417 break;
418
0354c0c7 419 case GIMPLE_TERNARY_RHS:
5c099d40
RB
420 result = fold_ternary_loc (loc, subcode,
421 TREE_TYPE (gimple_assign_lhs (stmt)),
422 gimple_assign_rhs1 (stmt),
423 gimple_assign_rhs2 (stmt),
424 gimple_assign_rhs3 (stmt));
0354c0c7
BS
425
426 if (result)
427 {
428 STRIP_USELESS_TYPE_CONVERSION (result);
429 if (valid_gimple_rhs_p (result))
430 return result;
0354c0c7
BS
431 }
432 break;
433
cbdd87d4
RG
434 case GIMPLE_INVALID_RHS:
435 gcc_unreachable ();
436 }
437
438 return NULL_TREE;
439}
440
fef5a0d9
RB
441
442/* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
446
447static void
448gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
449{
355fe088 450 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
451
452 if (gimple_has_location (stmt))
453 annotate_all_with_location (stmts, gimple_location (stmt));
454
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
355fe088 457 gimple *laststore = NULL;
fef5a0d9
RB
458 for (gimple_stmt_iterator i = gsi_last (stmts);
459 !gsi_end_p (i); gsi_prev (&i))
460 {
355fe088 461 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
462 if ((gimple_assign_single_p (new_stmt)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
464 || (is_gimple_call (new_stmt)
465 && (gimple_call_flags (new_stmt)
466 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
467 {
468 tree vdef;
469 if (!laststore)
470 vdef = gimple_vdef (stmt);
471 else
472 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
473 gimple_set_vdef (new_stmt, vdef);
474 if (vdef && TREE_CODE (vdef) == SSA_NAME)
475 SSA_NAME_DEF_STMT (vdef) = new_stmt;
476 laststore = new_stmt;
477 }
478 }
479
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse = gimple_vuse (stmt);
483 for (gimple_stmt_iterator i = gsi_start (stmts);
484 !gsi_end_p (i); gsi_next (&i))
485 {
355fe088 486 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt))
490 gimple_set_vuse (new_stmt, reaching_vuse);
491 gimple_set_modified (new_stmt, true);
492 if (gimple_vdef (new_stmt))
493 reaching_vuse = gimple_vdef (new_stmt);
494 }
495
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
498 if (reaching_vuse
499 && reaching_vuse == gimple_vuse (stmt))
500 {
501 tree vdef = gimple_vdef (stmt);
502 if (vdef
503 && TREE_CODE (vdef) == SSA_NAME)
504 {
505 unlink_stmt_vdef (stmt);
506 release_ssa_name (vdef);
507 }
508 }
509
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p, stmts, false);
512}
513
cbdd87d4
RG
514/* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
cbdd87d4
RG
523
524void
525gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
526{
527 tree lhs;
355fe088 528 gimple *stmt, *new_stmt;
cbdd87d4 529 gimple_stmt_iterator i;
355a7673 530 gimple_seq stmts = NULL;
cbdd87d4
RG
531
532 stmt = gsi_stmt (*si_p);
533
534 gcc_assert (is_gimple_call (stmt));
535
45852dcc 536 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 537
e256dfce 538 lhs = gimple_call_lhs (stmt);
cbdd87d4 539 if (lhs == NULL_TREE)
6e572326
RG
540 {
541 gimplify_and_add (expr, &stmts);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts))
545 {
9fdc58de 546 pop_gimplify_context (NULL);
6e572326
RG
547 if (gimple_in_ssa_p (cfun))
548 {
549 unlink_stmt_vdef (stmt);
550 release_defs (stmt);
551 }
f6b4dc28 552 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
553 return;
554 }
555 }
cbdd87d4 556 else
e256dfce
RG
557 {
558 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
559 new_stmt = gimple_build_assign (lhs, tmp);
560 i = gsi_last (stmts);
561 gsi_insert_after_without_update (&i, new_stmt,
562 GSI_CONTINUE_LINKING);
563 }
cbdd87d4
RG
564
565 pop_gimplify_context (NULL);
566
fef5a0d9
RB
567 gsi_replace_with_seq_vops (si_p, stmts);
568}
cbdd87d4 569
fef5a0d9
RB
570
571/* Replace the call at *GSI with the gimple value VAL. */
572
573static void
574replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
575{
355fe088 576 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 577 tree lhs = gimple_call_lhs (stmt);
355fe088 578 gimple *repl;
fef5a0d9 579 if (lhs)
e256dfce 580 {
fef5a0d9
RB
581 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
582 val = fold_convert (TREE_TYPE (lhs), val);
583 repl = gimple_build_assign (lhs, val);
584 }
585 else
586 repl = gimple_build_nop ();
587 tree vdef = gimple_vdef (stmt);
588 if (vdef && TREE_CODE (vdef) == SSA_NAME)
589 {
590 unlink_stmt_vdef (stmt);
591 release_ssa_name (vdef);
592 }
f6b4dc28 593 gsi_replace (gsi, repl, false);
fef5a0d9
RB
594}
595
596/* Replace the call at *GSI with the new call REPL and fold that
597 again. */
598
599static void
355fe088 600replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 601{
355fe088 602 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
603 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
604 gimple_set_location (repl, gimple_location (stmt));
605 if (gimple_vdef (stmt)
606 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
607 {
608 gimple_set_vdef (repl, gimple_vdef (stmt));
609 gimple_set_vuse (repl, gimple_vuse (stmt));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
611 }
f6b4dc28 612 gsi_replace (gsi, repl, false);
fef5a0d9
RB
613 fold_stmt (gsi);
614}
615
616/* Return true if VAR is a VAR_DECL or a component thereof. */
617
618static bool
619var_decl_component_p (tree var)
620{
621 tree inner = var;
622 while (handled_component_p (inner))
623 inner = TREE_OPERAND (inner, 0);
624 return SSA_VAR_P (inner);
625}
626
627/* Fold function call to builtin mem{{,p}cpy,move}. Return
86c5a5c3 628 false if no simplification can be made.
fef5a0d9
RB
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
633 (memmove). */
634
635static bool
636gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
637 tree dest, tree src, int endp)
638{
355fe088 639 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
640 tree lhs = gimple_call_lhs (stmt);
641 tree len = gimple_call_arg (stmt, 2);
642 tree destvar, srcvar;
643 location_t loc = gimple_location (stmt);
644
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len))
647 {
355fe088 648 gimple *repl;
fef5a0d9
RB
649 if (gimple_call_lhs (stmt))
650 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
651 else
652 repl = gimple_build_nop ();
653 tree vdef = gimple_vdef (stmt);
654 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 655 {
fef5a0d9
RB
656 unlink_stmt_vdef (stmt);
657 release_ssa_name (vdef);
658 }
f6b4dc28 659 gsi_replace (gsi, repl, false);
fef5a0d9
RB
660 return true;
661 }
662
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src, dest, 0))
666 {
667 unlink_stmt_vdef (stmt);
668 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
669 release_ssa_name (gimple_vdef (stmt));
670 if (!lhs)
671 {
f6b4dc28 672 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
673 return true;
674 }
675 goto done;
676 }
677 else
678 {
679 tree srctype, desttype;
680 unsigned int src_align, dest_align;
681 tree off0;
682
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
685 ptr_mode, true), 0);
686
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align = get_pointer_alignment (src);
692 dest_align = get_pointer_alignment (dest);
693 if (tree_fits_uhwi_p (len)
694 && compare_tree_int (len, MOVE_MAX) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 reason. */
699 && !c_strlen (src, 2))
700 {
701 unsigned ilen = tree_to_uhwi (len);
702 if (exact_log2 (ilen) != -1)
703 {
704 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
705 if (type
706 && TYPE_MODE (type) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
708 == ilen * 8)
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
713 {
714 tree srctype = type;
715 tree desttype = type;
716 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
717 srctype = build_aligned_type (type, src_align);
718 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
719 tree tem = fold_const_aggregate_ref (srcmem);
720 if (tem)
721 srcmem = tem;
722 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
723 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
724 src_align))
725 srcmem = NULL_TREE;
726 if (srcmem)
727 {
355fe088 728 gimple *new_stmt;
fef5a0d9
RB
729 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
730 {
731 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
732 if (gimple_in_ssa_p (cfun))
733 srcmem = make_ssa_name (TREE_TYPE (srcmem),
734 new_stmt);
735 else
b731b390 736 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
737 gimple_assign_set_lhs (new_stmt, srcmem);
738 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
739 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
740 }
741 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
742 desttype = build_aligned_type (type, dest_align);
743 new_stmt
744 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
745 dest, off0),
746 srcmem);
747 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
748 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
749 if (gimple_vdef (new_stmt)
750 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
751 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
752 if (!lhs)
753 {
f6b4dc28 754 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
755 return true;
756 }
757 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
758 goto done;
759 }
760 }
761 }
762 }
763
764 if (endp == 3)
765 {
766 /* Both DEST and SRC must be pointer types.
767 ??? This is what old code did. Is the testing for pointer types
768 really mandatory?
769
770 If either SRC is readonly or length is 1, we can use memcpy. */
771 if (!dest_align || !src_align)
772 return false;
773 if (readonly_data_expr (src)
774 || (tree_fits_uhwi_p (len)
775 && (MIN (src_align, dest_align) / BITS_PER_UNIT
776 >= tree_to_uhwi (len))))
777 {
778 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
779 if (!fn)
780 return false;
781 gimple_call_set_fndecl (stmt, fn);
782 gimple_call_set_arg (stmt, 0, dest);
783 gimple_call_set_arg (stmt, 1, src);
784 fold_stmt (gsi);
785 return true;
786 }
787
788 /* If *src and *dest can't overlap, optimize into memcpy as well. */
789 if (TREE_CODE (src) == ADDR_EXPR
790 && TREE_CODE (dest) == ADDR_EXPR)
791 {
792 tree src_base, dest_base, fn;
793 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
794 HOST_WIDE_INT size = -1;
795 HOST_WIDE_INT maxsize = -1;
796
797 srcvar = TREE_OPERAND (src, 0);
798 src_base = get_ref_base_and_extent (srcvar, &src_offset,
799 &size, &maxsize);
800 destvar = TREE_OPERAND (dest, 0);
801 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
802 &size, &maxsize);
803 if (tree_fits_uhwi_p (len))
804 maxsize = tree_to_uhwi (len);
805 else
806 maxsize = -1;
807 src_offset /= BITS_PER_UNIT;
808 dest_offset /= BITS_PER_UNIT;
809 if (SSA_VAR_P (src_base)
810 && SSA_VAR_P (dest_base))
811 {
812 if (operand_equal_p (src_base, dest_base, 0)
813 && ranges_overlap_p (src_offset, maxsize,
814 dest_offset, maxsize))
815 return false;
816 }
817 else if (TREE_CODE (src_base) == MEM_REF
818 && TREE_CODE (dest_base) == MEM_REF)
819 {
820 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
821 TREE_OPERAND (dest_base, 0), 0))
822 return false;
823 offset_int off = mem_ref_offset (src_base) + src_offset;
824 if (!wi::fits_shwi_p (off))
825 return false;
826 src_offset = off.to_shwi ();
827
828 off = mem_ref_offset (dest_base) + dest_offset;
829 if (!wi::fits_shwi_p (off))
830 return false;
831 dest_offset = off.to_shwi ();
832 if (ranges_overlap_p (src_offset, maxsize,
833 dest_offset, maxsize))
834 return false;
835 }
836 else
837 return false;
838
839 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
840 if (!fn)
841 return false;
842 gimple_call_set_fndecl (stmt, fn);
843 gimple_call_set_arg (stmt, 0, dest);
844 gimple_call_set_arg (stmt, 1, src);
845 fold_stmt (gsi);
846 return true;
847 }
848
849 /* If the destination and source do not alias optimize into
850 memcpy as well. */
851 if ((is_gimple_min_invariant (dest)
852 || TREE_CODE (dest) == SSA_NAME)
853 && (is_gimple_min_invariant (src)
854 || TREE_CODE (src) == SSA_NAME))
855 {
856 ao_ref destr, srcr;
857 ao_ref_init_from_ptr_and_size (&destr, dest, len);
858 ao_ref_init_from_ptr_and_size (&srcr, src, len);
859 if (!refs_may_alias_p_1 (&destr, &srcr, false))
860 {
861 tree fn;
862 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
863 if (!fn)
864 return false;
865 gimple_call_set_fndecl (stmt, fn);
866 gimple_call_set_arg (stmt, 0, dest);
867 gimple_call_set_arg (stmt, 1, src);
868 fold_stmt (gsi);
869 return true;
870 }
871 }
872
873 return false;
874 }
875
876 if (!tree_fits_shwi_p (len))
877 return false;
878 /* FIXME:
879 This logic lose for arguments like (type *)malloc (sizeof (type)),
880 since we strip the casts of up to VOID return value from malloc.
881 Perhaps we ought to inherit type from non-VOID argument here? */
882 STRIP_NOPS (src);
883 STRIP_NOPS (dest);
884 if (!POINTER_TYPE_P (TREE_TYPE (src))
885 || !POINTER_TYPE_P (TREE_TYPE (dest)))
886 return false;
887 /* In the following try to find a type that is most natural to be
888 used for the memcpy source and destination and that allows
889 the most optimization when memcpy is turned into a plain assignment
890 using that type. In theory we could always use a char[len] type
891 but that only gains us that the destination and source possibly
892 no longer will have their address taken. */
893 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
894 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
895 {
896 tree tem = TREE_OPERAND (src, 0);
897 STRIP_NOPS (tem);
898 if (tem != TREE_OPERAND (src, 0))
899 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
900 }
901 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
902 {
903 tree tem = TREE_OPERAND (dest, 0);
904 STRIP_NOPS (tem);
905 if (tem != TREE_OPERAND (dest, 0))
906 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
907 }
908 srctype = TREE_TYPE (TREE_TYPE (src));
909 if (TREE_CODE (srctype) == ARRAY_TYPE
910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
911 {
912 srctype = TREE_TYPE (srctype);
913 STRIP_NOPS (src);
914 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
915 }
916 desttype = TREE_TYPE (TREE_TYPE (dest));
917 if (TREE_CODE (desttype) == ARRAY_TYPE
918 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
919 {
920 desttype = TREE_TYPE (desttype);
921 STRIP_NOPS (dest);
922 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
923 }
924 if (TREE_ADDRESSABLE (srctype)
925 || TREE_ADDRESSABLE (desttype))
926 return false;
927
928 /* Make sure we are not copying using a floating-point mode or
929 a type whose size possibly does not match its precision. */
930 if (FLOAT_MODE_P (TYPE_MODE (desttype))
931 || TREE_CODE (desttype) == BOOLEAN_TYPE
932 || TREE_CODE (desttype) == ENUMERAL_TYPE)
933 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
934 if (FLOAT_MODE_P (TYPE_MODE (srctype))
935 || TREE_CODE (srctype) == BOOLEAN_TYPE
936 || TREE_CODE (srctype) == ENUMERAL_TYPE)
937 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
938 if (!srctype)
939 srctype = desttype;
940 if (!desttype)
941 desttype = srctype;
942 if (!srctype)
943 return false;
944
945 src_align = get_pointer_alignment (src);
946 dest_align = get_pointer_alignment (dest);
947 if (dest_align < TYPE_ALIGN (desttype)
948 || src_align < TYPE_ALIGN (srctype))
949 return false;
950
951 destvar = dest;
952 STRIP_NOPS (destvar);
953 if (TREE_CODE (destvar) == ADDR_EXPR
954 && var_decl_component_p (TREE_OPERAND (destvar, 0))
955 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
956 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
957 else
958 destvar = NULL_TREE;
959
960 srcvar = src;
961 STRIP_NOPS (srcvar);
962 if (TREE_CODE (srcvar) == ADDR_EXPR
963 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
964 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
965 {
966 if (!destvar
967 || src_align >= TYPE_ALIGN (desttype))
968 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
969 srcvar, off0);
970 else if (!STRICT_ALIGNMENT)
971 {
972 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
973 src_align);
974 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
975 }
e256dfce 976 else
fef5a0d9
RB
977 srcvar = NULL_TREE;
978 }
979 else
980 srcvar = NULL_TREE;
981
982 if (srcvar == NULL_TREE && destvar == NULL_TREE)
983 return false;
984
985 if (srcvar == NULL_TREE)
986 {
987 STRIP_NOPS (src);
988 if (src_align >= TYPE_ALIGN (desttype))
989 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
990 else
991 {
992 if (STRICT_ALIGNMENT)
993 return false;
994 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
995 src_align);
996 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
997 }
998 }
999 else if (destvar == NULL_TREE)
1000 {
1001 STRIP_NOPS (dest);
1002 if (dest_align >= TYPE_ALIGN (srctype))
1003 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1004 else
1005 {
1006 if (STRICT_ALIGNMENT)
1007 return false;
1008 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1009 dest_align);
1010 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1011 }
1012 }
1013
355fe088 1014 gimple *new_stmt;
fef5a0d9
RB
1015 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1016 {
1017 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1018 if (gimple_in_ssa_p (cfun))
1019 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1020 else
b731b390 1021 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1022 gimple_assign_set_lhs (new_stmt, srcvar);
1023 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1024 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1025 }
1026 new_stmt = gimple_build_assign (destvar, srcvar);
1027 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1028 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1029 if (gimple_vdef (new_stmt)
1030 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1032 if (!lhs)
1033 {
f6b4dc28 1034 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1035 return true;
1036 }
1037 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1038 }
1039
1040done:
1041 if (endp == 0 || endp == 3)
1042 len = NULL_TREE;
1043 else if (endp == 2)
1044 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1045 ssize_int (1));
1046 if (endp == 2 || endp == 1)
1047 dest = fold_build_pointer_plus_loc (loc, dest, len);
1048
1049 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1050 GSI_SAME_STMT);
355fe088 1051 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1052 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1053 return true;
1054}
1055
1056/* Fold function call to builtin memset or bzero at *GSI setting the
1057 memory of size LEN to VAL. Return whether a simplification was made. */
1058
1059static bool
1060gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1061{
355fe088 1062 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1063 tree etype;
1064 unsigned HOST_WIDE_INT length, cval;
1065
1066 /* If the LEN parameter is zero, return DEST. */
1067 if (integer_zerop (len))
1068 {
1069 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1070 return true;
1071 }
1072
1073 if (! tree_fits_uhwi_p (len))
1074 return false;
1075
1076 if (TREE_CODE (c) != INTEGER_CST)
1077 return false;
1078
1079 tree dest = gimple_call_arg (stmt, 0);
1080 tree var = dest;
1081 if (TREE_CODE (var) != ADDR_EXPR)
1082 return false;
1083
1084 var = TREE_OPERAND (var, 0);
1085 if (TREE_THIS_VOLATILE (var))
1086 return false;
1087
1088 etype = TREE_TYPE (var);
1089 if (TREE_CODE (etype) == ARRAY_TYPE)
1090 etype = TREE_TYPE (etype);
1091
1092 if (!INTEGRAL_TYPE_P (etype)
1093 && !POINTER_TYPE_P (etype))
1094 return NULL_TREE;
1095
1096 if (! var_decl_component_p (var))
1097 return NULL_TREE;
1098
1099 length = tree_to_uhwi (len);
1100 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1101 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1102 return NULL_TREE;
1103
1104 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1105 return NULL_TREE;
1106
1107 if (integer_zerop (c))
1108 cval = 0;
1109 else
1110 {
1111 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1112 return NULL_TREE;
1113
1114 cval = TREE_INT_CST_LOW (c);
1115 cval &= 0xff;
1116 cval |= cval << 8;
1117 cval |= cval << 16;
1118 cval |= (cval << 31) << 1;
1119 }
1120
1121 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1122 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
fef5a0d9
RB
1123 gimple_set_vuse (store, gimple_vuse (stmt));
1124 tree vdef = gimple_vdef (stmt);
1125 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1126 {
1127 gimple_set_vdef (store, gimple_vdef (stmt));
1128 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1129 }
1130 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1131 if (gimple_call_lhs (stmt))
1132 {
355fe088 1133 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1134 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1135 }
1136 else
1137 {
1138 gimple_stmt_iterator gsi2 = *gsi;
1139 gsi_prev (gsi);
1140 gsi_remove (&gsi2, true);
1141 }
1142
1143 return true;
1144}
1145
1146
1147/* Return the string length, maximum string length or maximum value of
1148 ARG in LENGTH.
1149 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1150 is not NULL and, for TYPE == 0, its value is not equal to the length
1151 we determine or if we are unable to determine the length or value,
1152 return false. VISITED is a bitmap of visited variables.
1153 TYPE is 0 if string length should be returned, 1 for maximum string
1154 length and 2 for maximum value ARG can have. */
1155
1156static bool
dcb7fae2 1157get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1158{
1159 tree var, val;
355fe088 1160 gimple *def_stmt;
fef5a0d9
RB
1161
1162 if (TREE_CODE (arg) != SSA_NAME)
1163 {
1164 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1165 if (TREE_CODE (arg) == ADDR_EXPR
1166 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1167 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1168 {
1169 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1170 if (TREE_CODE (aop0) == INDIRECT_REF
1171 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1172 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1173 length, visited, type);
1174 }
1175
1176 if (type == 2)
1177 {
1178 val = arg;
1179 if (TREE_CODE (val) != INTEGER_CST
1180 || tree_int_cst_sgn (val) < 0)
1181 return false;
1182 }
1183 else
1184 val = c_strlen (arg, 1);
1185 if (!val)
1186 return false;
1187
1188 if (*length)
1189 {
1190 if (type > 0)
1191 {
1192 if (TREE_CODE (*length) != INTEGER_CST
1193 || TREE_CODE (val) != INTEGER_CST)
1194 return false;
1195
1196 if (tree_int_cst_lt (*length, val))
1197 *length = val;
1198 return true;
1199 }
1200 else if (simple_cst_equal (val, *length) != 1)
1201 return false;
1202 }
1203
1204 *length = val;
1205 return true;
1206 }
1207
1208 /* If ARG is registered for SSA update we cannot look at its defining
1209 statement. */
1210 if (name_registered_for_update_p (arg))
1211 return false;
1212
1213 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1214 if (!*visited)
1215 *visited = BITMAP_ALLOC (NULL);
1216 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1217 return true;
1218
1219 var = arg;
1220 def_stmt = SSA_NAME_DEF_STMT (var);
1221
1222 switch (gimple_code (def_stmt))
1223 {
1224 case GIMPLE_ASSIGN:
1225 /* The RHS of the statement defining VAR must either have a
1226 constant length or come from another SSA_NAME with a constant
1227 length. */
1228 if (gimple_assign_single_p (def_stmt)
1229 || gimple_assign_unary_nop_p (def_stmt))
1230 {
1231 tree rhs = gimple_assign_rhs1 (def_stmt);
1232 return get_maxval_strlen (rhs, length, visited, type);
1233 }
1234 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1235 {
1236 tree op2 = gimple_assign_rhs2 (def_stmt);
1237 tree op3 = gimple_assign_rhs3 (def_stmt);
1238 return get_maxval_strlen (op2, length, visited, type)
1239 && get_maxval_strlen (op3, length, visited, type);
1240 }
1241 return false;
1242
1243 case GIMPLE_PHI:
1244 {
1245 /* All the arguments of the PHI node must have the same constant
1246 length. */
1247 unsigned i;
1248
1249 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1250 {
1251 tree arg = gimple_phi_arg (def_stmt, i)->def;
1252
1253 /* If this PHI has itself as an argument, we cannot
1254 determine the string length of this argument. However,
1255 if we can find a constant string length for the other
1256 PHI args then we can still be sure that this is a
1257 constant string length. So be optimistic and just
1258 continue with the next argument. */
1259 if (arg == gimple_phi_result (def_stmt))
1260 continue;
1261
1262 if (!get_maxval_strlen (arg, length, visited, type))
1263 return false;
1264 }
1265 }
1266 return true;
1267
1268 default:
1269 return false;
1270 }
1271}
1272
dcb7fae2
RB
1273tree
1274get_maxval_strlen (tree arg, int type)
1275{
1276 bitmap visited = NULL;
1277 tree len = NULL_TREE;
1278 if (!get_maxval_strlen (arg, &len, &visited, type))
1279 len = NULL_TREE;
1280 if (visited)
1281 BITMAP_FREE (visited);
1282
1283 return len;
1284}
1285
fef5a0d9
RB
1286
1287/* Fold function call to builtin strcpy with arguments DEST and SRC.
1288 If LEN is not NULL, it represents the length of the string to be
1289 copied. Return NULL_TREE if no simplification can be made. */
1290
1291static bool
1292gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1293 tree dest, tree src)
fef5a0d9 1294{
dcb7fae2 1295 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1296 tree fn;
1297
1298 /* If SRC and DEST are the same (and not volatile), return DEST. */
1299 if (operand_equal_p (src, dest, 0))
1300 {
1301 replace_call_with_value (gsi, dest);
1302 return true;
1303 }
1304
1305 if (optimize_function_for_size_p (cfun))
1306 return false;
1307
1308 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1309 if (!fn)
1310 return false;
1311
1579e1f8 1312 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1313 if (!len)
dcb7fae2 1314 return false;
fef5a0d9
RB
1315
1316 len = fold_convert_loc (loc, size_type_node, len);
1317 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1318 len = force_gimple_operand_gsi (gsi, len, true,
1319 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1320 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1321 replace_call_with_call_and_fold (gsi, repl);
1322 return true;
1323}
1324
1325/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1326 If SLEN is not NULL, it represents the length of the source string.
1327 Return NULL_TREE if no simplification can be made. */
1328
1329static bool
dcb7fae2
RB
1330gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1331 tree dest, tree src, tree len)
fef5a0d9 1332{
dcb7fae2 1333 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1334 tree fn;
1335
1336 /* If the LEN parameter is zero, return DEST. */
1337 if (integer_zerop (len))
1338 {
1339 replace_call_with_value (gsi, dest);
1340 return true;
1341 }
1342
1343 /* We can't compare slen with len as constants below if len is not a
1344 constant. */
dcb7fae2 1345 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1346 return false;
1347
fef5a0d9 1348 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1349 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1350 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1351 return false;
1352
1353 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1354
1355 /* We do not support simplification of this case, though we do
1356 support it when expanding trees into RTL. */
1357 /* FIXME: generate a call to __builtin_memset. */
1358 if (tree_int_cst_lt (slen, len))
1359 return false;
1360
1361 /* OK transform into builtin memcpy. */
1362 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1363 if (!fn)
1364 return false;
1365
1366 len = fold_convert_loc (loc, size_type_node, len);
1367 len = force_gimple_operand_gsi (gsi, len, true,
1368 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1369 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1370 replace_call_with_call_and_fold (gsi, repl);
1371 return true;
1372}
1373
1374/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1375 to the call.
1376
1377 Return NULL_TREE if no simplification was possible, otherwise return the
1378 simplified form of the call as a tree.
1379
1380 The simplified form may be a constant or other expression which
1381 computes the same value, but in a more efficient manner (including
1382 calls to other builtin functions).
1383
1384 The call may contain arguments which need to be evaluated, but
1385 which are not useful to determine the result of the call. In
1386 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1387 COMPOUND_EXPR will be an argument which must be evaluated.
1388 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1389 COMPOUND_EXPR in the chain will contain the tree for the simplified
1390 form of the builtin function call. */
1391
1392static bool
dcb7fae2 1393gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 1394{
355fe088 1395 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1396 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1397
1398 const char *p = c_getstr (src);
1399
1400 /* If the string length is zero, return the dst parameter. */
1401 if (p && *p == '\0')
1402 {
1403 replace_call_with_value (gsi, dst);
1404 return true;
1405 }
1406
1407 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1408 return false;
1409
1410 /* See if we can store by pieces into (dst + strlen(dst)). */
1411 tree newdst;
1412 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1413 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1414
1415 if (!strlen_fn || !memcpy_fn)
1416 return false;
1417
1418 /* If the length of the source string isn't computable don't
1419 split strcat into strlen and memcpy. */
dcb7fae2 1420 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1421 if (! len)
fef5a0d9
RB
1422 return false;
1423
1424 /* Create strlen (dst). */
1425 gimple_seq stmts = NULL, stmts2;
355fe088 1426 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9
RB
1427 gimple_set_location (repl, loc);
1428 if (gimple_in_ssa_p (cfun))
b731b390 1429 newdst = make_ssa_name (size_type_node);
fef5a0d9 1430 else
b731b390 1431 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1432 gimple_call_set_lhs (repl, newdst);
1433 gimple_seq_add_stmt_without_update (&stmts, repl);
1434
1435 /* Create (dst p+ strlen (dst)). */
1436 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1437 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1438 gimple_seq_add_seq_without_update (&stmts, stmts2);
1439
1440 len = fold_convert_loc (loc, size_type_node, len);
1441 len = size_binop_loc (loc, PLUS_EXPR, len,
1442 build_int_cst (size_type_node, 1));
1443 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1444 gimple_seq_add_seq_without_update (&stmts, stmts2);
1445
1446 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1447 gimple_seq_add_stmt_without_update (&stmts, repl);
1448 if (gimple_call_lhs (stmt))
1449 {
1450 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1451 gimple_seq_add_stmt_without_update (&stmts, repl);
1452 gsi_replace_with_seq_vops (gsi, stmts);
1453 /* gsi now points at the assignment to the lhs, get a
1454 stmt iterator to the memcpy call.
1455 ??? We can't use gsi_for_stmt as that doesn't work when the
1456 CFG isn't built yet. */
1457 gimple_stmt_iterator gsi2 = *gsi;
1458 gsi_prev (&gsi2);
1459 fold_stmt (&gsi2);
1460 }
1461 else
1462 {
1463 gsi_replace_with_seq_vops (gsi, stmts);
1464 fold_stmt (gsi);
1465 }
1466 return true;
1467}
1468
07f1cf56
RB
1469/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1470 are the arguments to the call. */
1471
1472static bool
1473gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1474{
355fe088 1475 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
1476 tree dest = gimple_call_arg (stmt, 0);
1477 tree src = gimple_call_arg (stmt, 1);
1478 tree size = gimple_call_arg (stmt, 2);
1479 tree fn;
1480 const char *p;
1481
1482
1483 p = c_getstr (src);
1484 /* If the SRC parameter is "", return DEST. */
1485 if (p && *p == '\0')
1486 {
1487 replace_call_with_value (gsi, dest);
1488 return true;
1489 }
1490
1491 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1492 return false;
1493
1494 /* If __builtin_strcat_chk is used, assume strcat is available. */
1495 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1496 if (!fn)
1497 return false;
1498
355fe088 1499 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
1500 replace_call_with_call_and_fold (gsi, repl);
1501 return true;
1502}
1503
ad03a744
RB
1504/* Simplify a call to the strncat builtin. */
1505
1506static bool
1507gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1508{
1509 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1510 tree dst = gimple_call_arg (stmt, 0);
1511 tree src = gimple_call_arg (stmt, 1);
1512 tree len = gimple_call_arg (stmt, 2);
1513
1514 const char *p = c_getstr (src);
1515
1516 /* If the requested length is zero, or the src parameter string
1517 length is zero, return the dst parameter. */
1518 if (integer_zerop (len) || (p && *p == '\0'))
1519 {
1520 replace_call_with_value (gsi, dst);
1521 return true;
1522 }
1523
1524 /* If the requested len is greater than or equal to the string
1525 length, call strcat. */
1526 if (TREE_CODE (len) == INTEGER_CST && p
1527 && compare_tree_int (len, strlen (p)) >= 0)
1528 {
1529 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1530
1531 /* If the replacement _DECL isn't initialized, don't do the
1532 transformation. */
1533 if (!fn)
1534 return false;
1535
1536 gcall *repl = gimple_build_call (fn, 2, dst, src);
1537 replace_call_with_call_and_fold (gsi, repl);
1538 return true;
1539 }
1540
1541 return false;
1542}
1543
745583f9
RB
1544/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1545 LEN, and SIZE. */
1546
1547static bool
1548gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1549{
355fe088 1550 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
1551 tree dest = gimple_call_arg (stmt, 0);
1552 tree src = gimple_call_arg (stmt, 1);
1553 tree len = gimple_call_arg (stmt, 2);
1554 tree size = gimple_call_arg (stmt, 3);
1555 tree fn;
1556 const char *p;
1557
1558 p = c_getstr (src);
1559 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1560 if ((p && *p == '\0')
1561 || integer_zerop (len))
1562 {
1563 replace_call_with_value (gsi, dest);
1564 return true;
1565 }
1566
1567 if (! tree_fits_uhwi_p (size))
1568 return false;
1569
1570 if (! integer_all_onesp (size))
1571 {
1572 tree src_len = c_strlen (src, 1);
1573 if (src_len
1574 && tree_fits_uhwi_p (src_len)
1575 && tree_fits_uhwi_p (len)
1576 && ! tree_int_cst_lt (len, src_len))
1577 {
1578 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1579 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1580 if (!fn)
1581 return false;
1582
355fe088 1583 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
1584 replace_call_with_call_and_fold (gsi, repl);
1585 return true;
1586 }
1587 return false;
1588 }
1589
1590 /* If __builtin_strncat_chk is used, assume strncat is available. */
1591 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1592 if (!fn)
1593 return false;
1594
355fe088 1595 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
1596 replace_call_with_call_and_fold (gsi, repl);
1597 return true;
1598}
1599
fef5a0d9
RB
1600/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1601 to the call. IGNORE is true if the value returned
1602 by the builtin will be ignored. UNLOCKED is true is true if this
1603 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1604 the known length of the string. Return NULL_TREE if no simplification
1605 was possible. */
1606
1607static bool
1608gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1609 tree arg0, tree arg1,
dcb7fae2 1610 bool unlocked)
fef5a0d9 1611{
355fe088 1612 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1613
fef5a0d9
RB
1614 /* If we're using an unlocked function, assume the other unlocked
1615 functions exist explicitly. */
1616 tree const fn_fputc = (unlocked
1617 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1618 : builtin_decl_implicit (BUILT_IN_FPUTC));
1619 tree const fn_fwrite = (unlocked
1620 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1621 : builtin_decl_implicit (BUILT_IN_FWRITE));
1622
1623 /* If the return value is used, don't do the transformation. */
dcb7fae2 1624 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1625 return false;
1626
fef5a0d9
RB
1627 /* Get the length of the string passed to fputs. If the length
1628 can't be determined, punt. */
dcb7fae2 1629 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1630 if (!len
1631 || TREE_CODE (len) != INTEGER_CST)
1632 return false;
1633
1634 switch (compare_tree_int (len, 1))
1635 {
1636 case -1: /* length is 0, delete the call entirely . */
1637 replace_call_with_value (gsi, integer_zero_node);
1638 return true;
1639
1640 case 0: /* length is 1, call fputc. */
1641 {
1642 const char *p = c_getstr (arg0);
1643 if (p != NULL)
1644 {
1645 if (!fn_fputc)
1646 return false;
1647
355fe088 1648 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
1649 build_int_cst
1650 (integer_type_node, p[0]), arg1);
1651 replace_call_with_call_and_fold (gsi, repl);
1652 return true;
1653 }
1654 }
1655 /* FALLTHROUGH */
1656 case 1: /* length is greater than 1, call fwrite. */
1657 {
1658 /* If optimizing for size keep fputs. */
1659 if (optimize_function_for_size_p (cfun))
1660 return false;
1661 /* New argument list transforming fputs(string, stream) to
1662 fwrite(string, 1, len, stream). */
1663 if (!fn_fwrite)
1664 return false;
1665
355fe088 1666 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
1667 size_one_node, len, arg1);
1668 replace_call_with_call_and_fold (gsi, repl);
1669 return true;
1670 }
1671 default:
1672 gcc_unreachable ();
1673 }
1674 return false;
1675}
1676
1677/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1678 DEST, SRC, LEN, and SIZE are the arguments to the call.
1679 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1680 code of the builtin. If MAXLEN is not NULL, it is maximum length
1681 passed as third argument. */
1682
1683static bool
1684gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1685 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1686 enum built_in_function fcode)
1687{
355fe088 1688 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1689 location_t loc = gimple_location (stmt);
1690 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1691 tree fn;
1692
1693 /* If SRC and DEST are the same (and not volatile), return DEST
1694 (resp. DEST+LEN for __mempcpy_chk). */
1695 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1696 {
1697 if (fcode != BUILT_IN_MEMPCPY_CHK)
1698 {
1699 replace_call_with_value (gsi, dest);
1700 return true;
1701 }
1702 else
1703 {
1704 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1705 temp = force_gimple_operand_gsi (gsi, temp,
1706 false, NULL_TREE, true,
1707 GSI_SAME_STMT);
1708 replace_call_with_value (gsi, temp);
1709 return true;
1710 }
1711 }
1712
1713 if (! tree_fits_uhwi_p (size))
1714 return false;
1715
dcb7fae2 1716 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1717 if (! integer_all_onesp (size))
1718 {
1719 if (! tree_fits_uhwi_p (len))
1720 {
1721 /* If LEN is not constant, try MAXLEN too.
1722 For MAXLEN only allow optimizing into non-_ocs function
1723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1724 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1725 {
1726 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1727 {
1728 /* (void) __mempcpy_chk () can be optimized into
1729 (void) __memcpy_chk (). */
1730 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1731 if (!fn)
1732 return false;
1733
355fe088 1734 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1735 replace_call_with_call_and_fold (gsi, repl);
1736 return true;
1737 }
1738 return false;
1739 }
1740 }
1741 else
1742 maxlen = len;
1743
1744 if (tree_int_cst_lt (size, maxlen))
1745 return false;
1746 }
1747
1748 fn = NULL_TREE;
1749 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1750 mem{cpy,pcpy,move,set} is available. */
1751 switch (fcode)
1752 {
1753 case BUILT_IN_MEMCPY_CHK:
1754 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1755 break;
1756 case BUILT_IN_MEMPCPY_CHK:
1757 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1758 break;
1759 case BUILT_IN_MEMMOVE_CHK:
1760 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1761 break;
1762 case BUILT_IN_MEMSET_CHK:
1763 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1764 break;
1765 default:
1766 break;
1767 }
1768
1769 if (!fn)
1770 return false;
1771
355fe088 1772 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1773 replace_call_with_call_and_fold (gsi, repl);
1774 return true;
1775}
1776
1777/* Fold a call to the __st[rp]cpy_chk builtin.
1778 DEST, SRC, and SIZE are the arguments to the call.
1779 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1780 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1781 strings passed as second argument. */
1782
1783static bool
1784gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1785 tree dest,
fef5a0d9 1786 tree src, tree size,
fef5a0d9
RB
1787 enum built_in_function fcode)
1788{
355fe088 1789 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
1790 location_t loc = gimple_location (stmt);
1791 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1792 tree len, fn;
1793
1794 /* If SRC and DEST are the same (and not volatile), return DEST. */
1795 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1796 {
1797 replace_call_with_value (gsi, dest);
1798 return true;
1799 }
1800
1801 if (! tree_fits_uhwi_p (size))
1802 return false;
1803
dcb7fae2 1804 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1805 if (! integer_all_onesp (size))
1806 {
1807 len = c_strlen (src, 1);
1808 if (! len || ! tree_fits_uhwi_p (len))
1809 {
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1814 {
1815 if (fcode == BUILT_IN_STPCPY_CHK)
1816 {
1817 if (! ignore)
1818 return false;
1819
1820 /* If return value of __stpcpy_chk is ignored,
1821 optimize into __strcpy_chk. */
1822 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1823 if (!fn)
1824 return false;
1825
355fe088 1826 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
1827 replace_call_with_call_and_fold (gsi, repl);
1828 return true;
1829 }
1830
1831 if (! len || TREE_SIDE_EFFECTS (len))
1832 return false;
1833
1834 /* If c_strlen returned something, but not a constant,
1835 transform __strcpy_chk into __memcpy_chk. */
1836 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1837 if (!fn)
1838 return false;
1839
1840 len = fold_convert_loc (loc, size_type_node, len);
1841 len = size_binop_loc (loc, PLUS_EXPR, len,
1842 build_int_cst (size_type_node, 1));
1843 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1844 true, GSI_SAME_STMT);
355fe088 1845 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1846 replace_call_with_call_and_fold (gsi, repl);
1847 return true;
1848 }
e256dfce 1849 }
fef5a0d9
RB
1850 else
1851 maxlen = len;
1852
1853 if (! tree_int_cst_lt (maxlen, size))
1854 return false;
e256dfce
RG
1855 }
1856
fef5a0d9
RB
1857 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1858 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1859 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1860 if (!fn)
1861 return false;
1862
355fe088 1863 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
1864 replace_call_with_call_and_fold (gsi, repl);
1865 return true;
1866}
1867
1868/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1869 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1870 length passed as third argument. IGNORE is true if return value can be
1871 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1872
1873static bool
1874gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1875 tree dest, tree src,
dcb7fae2 1876 tree len, tree size,
fef5a0d9
RB
1877 enum built_in_function fcode)
1878{
355fe088 1879 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 1880 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1881 tree fn;
1882
1883 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1884 {
fef5a0d9
RB
1885 /* If return value of __stpncpy_chk is ignored,
1886 optimize into __strncpy_chk. */
1887 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1888 if (fn)
1889 {
355fe088 1890 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1893 }
cbdd87d4
RG
1894 }
1895
fef5a0d9
RB
1896 if (! tree_fits_uhwi_p (size))
1897 return false;
1898
dcb7fae2 1899 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 1900 if (! integer_all_onesp (size))
cbdd87d4 1901 {
fef5a0d9 1902 if (! tree_fits_uhwi_p (len))
fe2ef088 1903 {
fef5a0d9
RB
1904 /* If LEN is not constant, try MAXLEN too.
1905 For MAXLEN only allow optimizing into non-_ocs function
1906 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1907 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1908 return false;
8a1561bc 1909 }
fef5a0d9
RB
1910 else
1911 maxlen = len;
1912
1913 if (tree_int_cst_lt (size, maxlen))
1914 return false;
cbdd87d4
RG
1915 }
1916
fef5a0d9
RB
1917 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1918 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1919 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1920 if (!fn)
1921 return false;
1922
355fe088 1923 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1924 replace_call_with_call_and_fold (gsi, repl);
1925 return true;
cbdd87d4
RG
1926}
1927
2625bb5d
RB
1928/* Fold function call to builtin stpcpy with arguments DEST and SRC.
1929 Return NULL_TREE if no simplification can be made. */
1930
1931static bool
1932gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
1933{
1934 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1935 location_t loc = gimple_location (stmt);
1936 tree dest = gimple_call_arg (stmt, 0);
1937 tree src = gimple_call_arg (stmt, 1);
1938 tree fn, len, lenp1;
1939
1940 /* If the result is unused, replace stpcpy with strcpy. */
1941 if (gimple_call_lhs (stmt) == NULL_TREE)
1942 {
1943 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
1944 if (!fn)
1945 return false;
1946 gimple_call_set_fndecl (stmt, fn);
1947 fold_stmt (gsi);
1948 return true;
1949 }
1950
1951 len = c_strlen (src, 1);
1952 if (!len
1953 || TREE_CODE (len) != INTEGER_CST)
1954 return false;
1955
1956 if (optimize_function_for_size_p (cfun)
1957 /* If length is zero it's small enough. */
1958 && !integer_zerop (len))
1959 return false;
1960
1961 /* If the source has a known length replace stpcpy with memcpy. */
1962 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1963 if (!fn)
1964 return false;
1965
1966 gimple_seq stmts = NULL;
1967 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
1968 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
1969 tem, build_int_cst (size_type_node, 1));
1970 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1971 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
1972 gimple_set_vuse (repl, gimple_vuse (stmt));
1973 gimple_set_vdef (repl, gimple_vdef (stmt));
1974 if (gimple_vdef (repl)
1975 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
1976 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
1977 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
1978 /* Replace the result with dest + len. */
1979 stmts = NULL;
1980 tem = gimple_convert (&stmts, loc, sizetype, len);
1981 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1982 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
1983 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 1984 gsi_replace (gsi, ret, false);
2625bb5d
RB
1985 /* Finally fold the memcpy call. */
1986 gimple_stmt_iterator gsi2 = *gsi;
1987 gsi_prev (&gsi2);
1988 fold_stmt (&gsi2);
1989 return true;
1990}
1991
fef5a0d9
RB
1992/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1993 NULL_TREE if a normal call should be emitted rather than expanding
1994 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1995 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1996 passed as second argument. */
cbdd87d4
RG
1997
1998static bool
fef5a0d9 1999gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2000 enum built_in_function fcode)
cbdd87d4 2001{
538dd0b7 2002 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2003 tree dest, size, len, fn, fmt, flag;
2004 const char *fmt_str;
cbdd87d4 2005
fef5a0d9
RB
2006 /* Verify the required arguments in the original call. */
2007 if (gimple_call_num_args (stmt) < 5)
2008 return false;
cbdd87d4 2009
fef5a0d9
RB
2010 dest = gimple_call_arg (stmt, 0);
2011 len = gimple_call_arg (stmt, 1);
2012 flag = gimple_call_arg (stmt, 2);
2013 size = gimple_call_arg (stmt, 3);
2014 fmt = gimple_call_arg (stmt, 4);
2015
2016 if (! tree_fits_uhwi_p (size))
2017 return false;
2018
2019 if (! integer_all_onesp (size))
2020 {
dcb7fae2 2021 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2022 if (! tree_fits_uhwi_p (len))
cbdd87d4 2023 {
fef5a0d9
RB
2024 /* If LEN is not constant, try MAXLEN too.
2025 For MAXLEN only allow optimizing into non-_ocs function
2026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2027 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2028 return false;
2029 }
2030 else
fef5a0d9 2031 maxlen = len;
cbdd87d4 2032
fef5a0d9
RB
2033 if (tree_int_cst_lt (size, maxlen))
2034 return false;
2035 }
cbdd87d4 2036
fef5a0d9
RB
2037 if (!init_target_chars ())
2038 return false;
cbdd87d4 2039
fef5a0d9
RB
2040 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2041 or if format doesn't contain % chars or is "%s". */
2042 if (! integer_zerop (flag))
2043 {
2044 fmt_str = c_getstr (fmt);
2045 if (fmt_str == NULL)
2046 return false;
2047 if (strchr (fmt_str, target_percent) != NULL
2048 && strcmp (fmt_str, target_percent_s))
2049 return false;
cbdd87d4
RG
2050 }
2051
fef5a0d9
RB
2052 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2053 available. */
2054 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2055 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2056 if (!fn)
491e0b9b
RG
2057 return false;
2058
fef5a0d9
RB
2059 /* Replace the called function and the first 5 argument by 3 retaining
2060 trailing varargs. */
2061 gimple_call_set_fndecl (stmt, fn);
2062 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2063 gimple_call_set_arg (stmt, 0, dest);
2064 gimple_call_set_arg (stmt, 1, len);
2065 gimple_call_set_arg (stmt, 2, fmt);
2066 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2067 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2068 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2069 fold_stmt (gsi);
2070 return true;
2071}
cbdd87d4 2072
fef5a0d9
RB
2073/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2074 Return NULL_TREE if a normal call should be emitted rather than
2075 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2076 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2077
fef5a0d9
RB
2078static bool
2079gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2080 enum built_in_function fcode)
2081{
538dd0b7 2082 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2083 tree dest, size, len, fn, fmt, flag;
2084 const char *fmt_str;
2085 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2086
fef5a0d9
RB
2087 /* Verify the required arguments in the original call. */
2088 if (nargs < 4)
2089 return false;
2090 dest = gimple_call_arg (stmt, 0);
2091 flag = gimple_call_arg (stmt, 1);
2092 size = gimple_call_arg (stmt, 2);
2093 fmt = gimple_call_arg (stmt, 3);
2094
2095 if (! tree_fits_uhwi_p (size))
2096 return false;
2097
2098 len = NULL_TREE;
2099
2100 if (!init_target_chars ())
2101 return false;
2102
2103 /* Check whether the format is a literal string constant. */
2104 fmt_str = c_getstr (fmt);
2105 if (fmt_str != NULL)
2106 {
2107 /* If the format doesn't contain % args or %%, we know the size. */
2108 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2109 {
fef5a0d9
RB
2110 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2111 len = build_int_cstu (size_type_node, strlen (fmt_str));
2112 }
2113 /* If the format is "%s" and first ... argument is a string literal,
2114 we know the size too. */
2115 else if (fcode == BUILT_IN_SPRINTF_CHK
2116 && strcmp (fmt_str, target_percent_s) == 0)
2117 {
2118 tree arg;
cbdd87d4 2119
fef5a0d9
RB
2120 if (nargs == 5)
2121 {
2122 arg = gimple_call_arg (stmt, 4);
2123 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2124 {
2125 len = c_strlen (arg, 1);
2126 if (! len || ! tree_fits_uhwi_p (len))
2127 len = NULL_TREE;
2128 }
2129 }
2130 }
2131 }
cbdd87d4 2132
fef5a0d9
RB
2133 if (! integer_all_onesp (size))
2134 {
2135 if (! len || ! tree_int_cst_lt (len, size))
2136 return false;
2137 }
cbdd87d4 2138
fef5a0d9
RB
2139 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2140 or if format doesn't contain % chars or is "%s". */
2141 if (! integer_zerop (flag))
2142 {
2143 if (fmt_str == NULL)
2144 return false;
2145 if (strchr (fmt_str, target_percent) != NULL
2146 && strcmp (fmt_str, target_percent_s))
2147 return false;
2148 }
cbdd87d4 2149
fef5a0d9
RB
2150 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2151 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2152 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2153 if (!fn)
2154 return false;
2155
2156 /* Replace the called function and the first 4 argument by 2 retaining
2157 trailing varargs. */
2158 gimple_call_set_fndecl (stmt, fn);
2159 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2160 gimple_call_set_arg (stmt, 0, dest);
2161 gimple_call_set_arg (stmt, 1, fmt);
2162 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2163 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2164 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2165 fold_stmt (gsi);
2166 return true;
2167}
2168
35770bb2
RB
2169/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2170 ORIG may be null if this is a 2-argument call. We don't attempt to
2171 simplify calls with more than 3 arguments.
2172
2173 Return NULL_TREE if no simplification was possible, otherwise return the
2174 simplified form of the call as a tree. If IGNORED is true, it means that
2175 the caller does not use the returned value of the function. */
2176
2177static bool
dcb7fae2 2178gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 2179{
355fe088 2180 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
2181 tree dest = gimple_call_arg (stmt, 0);
2182 tree fmt = gimple_call_arg (stmt, 1);
2183 tree orig = NULL_TREE;
2184 const char *fmt_str = NULL;
2185
2186 /* Verify the required arguments in the original call. We deal with two
2187 types of sprintf() calls: 'sprintf (str, fmt)' and
2188 'sprintf (dest, "%s", orig)'. */
2189 if (gimple_call_num_args (stmt) > 3)
2190 return false;
2191
2192 if (gimple_call_num_args (stmt) == 3)
2193 orig = gimple_call_arg (stmt, 2);
2194
2195 /* Check whether the format is a literal string constant. */
2196 fmt_str = c_getstr (fmt);
2197 if (fmt_str == NULL)
2198 return false;
2199
2200 if (!init_target_chars ())
2201 return false;
2202
2203 /* If the format doesn't contain % args or %%, use strcpy. */
2204 if (strchr (fmt_str, target_percent) == NULL)
2205 {
2206 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2207
2208 if (!fn)
2209 return false;
2210
2211 /* Don't optimize sprintf (buf, "abc", ptr++). */
2212 if (orig)
2213 return false;
2214
2215 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2216 'format' is known to contain no % formats. */
2217 gimple_seq stmts = NULL;
355fe088 2218 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
35770bb2
RB
2219 gimple_seq_add_stmt_without_update (&stmts, repl);
2220 if (gimple_call_lhs (stmt))
2221 {
2222 repl = gimple_build_assign (gimple_call_lhs (stmt),
2223 build_int_cst (integer_type_node,
2224 strlen (fmt_str)));
2225 gimple_seq_add_stmt_without_update (&stmts, repl);
2226 gsi_replace_with_seq_vops (gsi, stmts);
2227 /* gsi now points at the assignment to the lhs, get a
2228 stmt iterator to the memcpy call.
2229 ??? We can't use gsi_for_stmt as that doesn't work when the
2230 CFG isn't built yet. */
2231 gimple_stmt_iterator gsi2 = *gsi;
2232 gsi_prev (&gsi2);
2233 fold_stmt (&gsi2);
2234 }
2235 else
2236 {
2237 gsi_replace_with_seq_vops (gsi, stmts);
2238 fold_stmt (gsi);
2239 }
2240 return true;
2241 }
2242
2243 /* If the format is "%s", use strcpy if the result isn't used. */
2244 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2245 {
2246 tree fn;
2247 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2248
2249 if (!fn)
2250 return false;
2251
2252 /* Don't crash on sprintf (str1, "%s"). */
2253 if (!orig)
2254 return false;
2255
dcb7fae2
RB
2256 tree orig_len = NULL_TREE;
2257 if (gimple_call_lhs (stmt))
35770bb2 2258 {
dcb7fae2 2259 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2260 if (!orig_len)
35770bb2
RB
2261 return false;
2262 }
2263
2264 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2265 gimple_seq stmts = NULL;
355fe088 2266 gimple *repl = gimple_build_call (fn, 2, dest, orig);
35770bb2
RB
2267 gimple_seq_add_stmt_without_update (&stmts, repl);
2268 if (gimple_call_lhs (stmt))
2269 {
d7e78447
RB
2270 if (!useless_type_conversion_p (integer_type_node,
2271 TREE_TYPE (orig_len)))
2272 orig_len = fold_convert (integer_type_node, orig_len);
2273 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2274 gimple_seq_add_stmt_without_update (&stmts, repl);
2275 gsi_replace_with_seq_vops (gsi, stmts);
2276 /* gsi now points at the assignment to the lhs, get a
2277 stmt iterator to the memcpy call.
2278 ??? We can't use gsi_for_stmt as that doesn't work when the
2279 CFG isn't built yet. */
2280 gimple_stmt_iterator gsi2 = *gsi;
2281 gsi_prev (&gsi2);
2282 fold_stmt (&gsi2);
2283 }
2284 else
2285 {
2286 gsi_replace_with_seq_vops (gsi, stmts);
2287 fold_stmt (gsi);
2288 }
2289 return true;
2290 }
2291 return false;
2292}
2293
d7e78447
RB
2294/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2295 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2296 attempt to simplify calls with more than 4 arguments.
35770bb2 2297
d7e78447
RB
2298 Return NULL_TREE if no simplification was possible, otherwise return the
2299 simplified form of the call as a tree. If IGNORED is true, it means that
2300 the caller does not use the returned value of the function. */
2301
2302static bool
dcb7fae2 2303gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2304{
538dd0b7 2305 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2306 tree dest = gimple_call_arg (stmt, 0);
2307 tree destsize = gimple_call_arg (stmt, 1);
2308 tree fmt = gimple_call_arg (stmt, 2);
2309 tree orig = NULL_TREE;
2310 const char *fmt_str = NULL;
2311
2312 if (gimple_call_num_args (stmt) > 4)
2313 return false;
2314
2315 if (gimple_call_num_args (stmt) == 4)
2316 orig = gimple_call_arg (stmt, 3);
2317
2318 if (!tree_fits_uhwi_p (destsize))
2319 return false;
2320 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2321
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str = c_getstr (fmt);
2324 if (fmt_str == NULL)
2325 return false;
2326
2327 if (!init_target_chars ())
2328 return false;
2329
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str, target_percent) == NULL)
2332 {
2333 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2334 if (!fn)
2335 return false;
2336
2337 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2338 if (orig)
2339 return false;
2340
2341 /* We could expand this as
2342 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2343 or to
2344 memcpy (str, fmt_with_nul_at_cstm1, cst);
2345 but in the former case that might increase code size
2346 and in the latter case grow .rodata section too much.
2347 So punt for now. */
2348 size_t len = strlen (fmt_str);
2349 if (len >= destlen)
2350 return false;
2351
2352 gimple_seq stmts = NULL;
355fe088 2353 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447
RB
2354 gimple_seq_add_stmt_without_update (&stmts, repl);
2355 if (gimple_call_lhs (stmt))
2356 {
2357 repl = gimple_build_assign (gimple_call_lhs (stmt),
2358 build_int_cst (integer_type_node, len));
2359 gimple_seq_add_stmt_without_update (&stmts, repl);
2360 gsi_replace_with_seq_vops (gsi, stmts);
2361 /* gsi now points at the assignment to the lhs, get a
2362 stmt iterator to the memcpy call.
2363 ??? We can't use gsi_for_stmt as that doesn't work when the
2364 CFG isn't built yet. */
2365 gimple_stmt_iterator gsi2 = *gsi;
2366 gsi_prev (&gsi2);
2367 fold_stmt (&gsi2);
2368 }
2369 else
2370 {
2371 gsi_replace_with_seq_vops (gsi, stmts);
2372 fold_stmt (gsi);
2373 }
2374 return true;
2375 }
2376
2377 /* If the format is "%s", use strcpy if the result isn't used. */
2378 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2379 {
2380 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2381 if (!fn)
2382 return false;
2383
2384 /* Don't crash on snprintf (str1, cst, "%s"). */
2385 if (!orig)
2386 return false;
2387
dcb7fae2 2388 tree orig_len = get_maxval_strlen (orig, 0);
af9db3a7 2389 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 2390 return false;
d7e78447
RB
2391
2392 /* We could expand this as
2393 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2394 or to
2395 memcpy (str1, str2_with_nul_at_cstm1, cst);
2396 but in the former case that might increase code size
2397 and in the latter case grow .rodata section too much.
2398 So punt for now. */
2399 if (compare_tree_int (orig_len, destlen) >= 0)
2400 return false;
2401
2402 /* Convert snprintf (str1, cst, "%s", str2) into
2403 strcpy (str1, str2) if strlen (str2) < cst. */
2404 gimple_seq stmts = NULL;
355fe088 2405 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447
RB
2406 gimple_seq_add_stmt_without_update (&stmts, repl);
2407 if (gimple_call_lhs (stmt))
2408 {
2409 if (!useless_type_conversion_p (integer_type_node,
2410 TREE_TYPE (orig_len)))
2411 orig_len = fold_convert (integer_type_node, orig_len);
2412 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2413 gimple_seq_add_stmt_without_update (&stmts, repl);
2414 gsi_replace_with_seq_vops (gsi, stmts);
2415 /* gsi now points at the assignment to the lhs, get a
2416 stmt iterator to the memcpy call.
2417 ??? We can't use gsi_for_stmt as that doesn't work when the
2418 CFG isn't built yet. */
2419 gimple_stmt_iterator gsi2 = *gsi;
2420 gsi_prev (&gsi2);
2421 fold_stmt (&gsi2);
2422 }
2423 else
2424 {
2425 gsi_replace_with_seq_vops (gsi, stmts);
2426 fold_stmt (gsi);
2427 }
2428 return true;
2429 }
2430 return false;
2431}
35770bb2 2432
edd7ae68
RB
2433/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2434 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2435 more than 3 arguments, and ARG may be null in the 2-argument case.
2436
2437 Return NULL_TREE if no simplification was possible, otherwise return the
2438 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2439 code of the function to be simplified. */
2440
2441static bool
2442gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2443 tree fp, tree fmt, tree arg,
2444 enum built_in_function fcode)
2445{
2446 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2447 tree fn_fputc, fn_fputs;
2448 const char *fmt_str = NULL;
2449
2450 /* If the return value is used, don't do the transformation. */
2451 if (gimple_call_lhs (stmt) != NULL_TREE)
2452 return false;
2453
2454 /* Check whether the format is a literal string constant. */
2455 fmt_str = c_getstr (fmt);
2456 if (fmt_str == NULL)
2457 return false;
2458
2459 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2460 {
2461 /* If we're using an unlocked function, assume the other
2462 unlocked functions exist explicitly. */
2463 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2464 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2465 }
2466 else
2467 {
2468 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2469 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2470 }
2471
2472 if (!init_target_chars ())
2473 return false;
2474
2475 /* If the format doesn't contain % args or %%, use strcpy. */
2476 if (strchr (fmt_str, target_percent) == NULL)
2477 {
2478 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2479 && arg)
2480 return false;
2481
2482 /* If the format specifier was "", fprintf does nothing. */
2483 if (fmt_str[0] == '\0')
2484 {
2485 replace_call_with_value (gsi, NULL_TREE);
2486 return true;
2487 }
2488
2489 /* When "string" doesn't contain %, replace all cases of
2490 fprintf (fp, string) with fputs (string, fp). The fputs
2491 builtin will take care of special cases like length == 1. */
2492 if (fn_fputs)
2493 {
2494 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2495 replace_call_with_call_and_fold (gsi, repl);
2496 return true;
2497 }
2498 }
2499
2500 /* The other optimizations can be done only on the non-va_list variants. */
2501 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2502 return false;
2503
2504 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2505 else if (strcmp (fmt_str, target_percent_s) == 0)
2506 {
2507 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2508 return false;
2509 if (fn_fputs)
2510 {
2511 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2512 replace_call_with_call_and_fold (gsi, repl);
2513 return true;
2514 }
2515 }
2516
2517 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2518 else if (strcmp (fmt_str, target_percent_c) == 0)
2519 {
2520 if (!arg
2521 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2522 return false;
2523 if (fn_fputc)
2524 {
2525 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2526 replace_call_with_call_and_fold (gsi, repl);
2527 return true;
2528 }
2529 }
2530
2531 return false;
2532}
2533
ad03a744
RB
2534/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2535 FMT and ARG are the arguments to the call; we don't fold cases with
2536 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2537
2538 Return NULL_TREE if no simplification was possible, otherwise return the
2539 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2540 code of the function to be simplified. */
2541
2542static bool
2543gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2544 tree arg, enum built_in_function fcode)
2545{
2546 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2547 tree fn_putchar, fn_puts, newarg;
2548 const char *fmt_str = NULL;
2549
2550 /* If the return value is used, don't do the transformation. */
2551 if (gimple_call_lhs (stmt) != NULL_TREE)
2552 return false;
2553
2554 /* Check whether the format is a literal string constant. */
2555 fmt_str = c_getstr (fmt);
2556 if (fmt_str == NULL)
2557 return false;
2558
2559 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2560 {
2561 /* If we're using an unlocked function, assume the other
2562 unlocked functions exist explicitly. */
2563 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2564 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2565 }
2566 else
2567 {
2568 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2569 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2570 }
2571
2572 if (!init_target_chars ())
2573 return false;
2574
2575 if (strcmp (fmt_str, target_percent_s) == 0
2576 || strchr (fmt_str, target_percent) == NULL)
2577 {
2578 const char *str;
2579
2580 if (strcmp (fmt_str, target_percent_s) == 0)
2581 {
2582 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2583 return false;
2584
2585 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2586 return false;
2587
2588 str = c_getstr (arg);
2589 if (str == NULL)
2590 return false;
2591 }
2592 else
2593 {
2594 /* The format specifier doesn't contain any '%' characters. */
2595 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2596 && arg)
2597 return false;
2598 str = fmt_str;
2599 }
2600
2601 /* If the string was "", printf does nothing. */
2602 if (str[0] == '\0')
2603 {
2604 replace_call_with_value (gsi, NULL_TREE);
2605 return true;
2606 }
2607
2608 /* If the string has length of 1, call putchar. */
2609 if (str[1] == '\0')
2610 {
2611 /* Given printf("c"), (where c is any one character,)
2612 convert "c"[0] to an int and pass that to the replacement
2613 function. */
2614 newarg = build_int_cst (integer_type_node, str[0]);
2615 if (fn_putchar)
2616 {
2617 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2618 replace_call_with_call_and_fold (gsi, repl);
2619 return true;
2620 }
2621 }
2622 else
2623 {
2624 /* If the string was "string\n", call puts("string"). */
2625 size_t len = strlen (str);
2626 if ((unsigned char)str[len - 1] == target_newline
2627 && (size_t) (int) len == len
2628 && (int) len > 0)
2629 {
2630 char *newstr;
2631 tree offset_node, string_cst;
2632
2633 /* Create a NUL-terminated string that's one char shorter
2634 than the original, stripping off the trailing '\n'. */
2635 newarg = build_string_literal (len, str);
2636 string_cst = string_constant (newarg, &offset_node);
2637 gcc_checking_assert (string_cst
2638 && (TREE_STRING_LENGTH (string_cst)
2639 == (int) len)
2640 && integer_zerop (offset_node)
2641 && (unsigned char)
2642 TREE_STRING_POINTER (string_cst)[len - 1]
2643 == target_newline);
2644 /* build_string_literal creates a new STRING_CST,
2645 modify it in place to avoid double copying. */
2646 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2647 newstr[len - 1] = '\0';
2648 if (fn_puts)
2649 {
2650 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2651 replace_call_with_call_and_fold (gsi, repl);
2652 return true;
2653 }
2654 }
2655 else
2656 /* We'd like to arrange to call fputs(string,stdout) here,
2657 but we need stdout and don't have a way to get it yet. */
2658 return false;
2659 }
2660 }
2661
2662 /* The other optimizations can be done only on the non-va_list variants. */
2663 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2664 return false;
2665
2666 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2667 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2668 {
2669 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2670 return false;
2671 if (fn_puts)
2672 {
2673 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2674 replace_call_with_call_and_fold (gsi, repl);
2675 return true;
2676 }
2677 }
2678
2679 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2680 else if (strcmp (fmt_str, target_percent_c) == 0)
2681 {
2682 if (!arg || ! useless_type_conversion_p (integer_type_node,
2683 TREE_TYPE (arg)))
2684 return false;
2685 if (fn_putchar)
2686 {
2687 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2688 replace_call_with_call_and_fold (gsi, repl);
2689 return true;
2690 }
2691 }
2692
2693 return false;
2694}
2695
edd7ae68 2696
fef5a0d9
RB
2697
2698/* Fold a call to __builtin_strlen with known length LEN. */
2699
2700static bool
dcb7fae2 2701gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2702{
355fe088 2703 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2704 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2705 if (!len)
2706 return false;
2813904b 2707 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2708 replace_call_with_value (gsi, len);
2709 return true;
cbdd87d4
RG
2710}
2711
48126138
NS
2712/* Fold a call to __builtin_acc_on_device. */
2713
2714static bool
2715gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
2716{
2717 /* Defer folding until we know which compiler we're in. */
2718 if (symtab->state != EXPANSION)
2719 return false;
2720
2721 unsigned val_host = GOMP_DEVICE_HOST;
2722 unsigned val_dev = GOMP_DEVICE_NONE;
2723
2724#ifdef ACCEL_COMPILER
2725 val_host = GOMP_DEVICE_NOT_HOST;
2726 val_dev = ACCEL_COMPILER_acc_device;
2727#endif
2728
2729 location_t loc = gimple_location (gsi_stmt (*gsi));
2730
2731 tree host_eq = make_ssa_name (boolean_type_node);
2732 gimple *host_ass = gimple_build_assign
2733 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
2734 gimple_set_location (host_ass, loc);
2735 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
2736
2737 tree dev_eq = make_ssa_name (boolean_type_node);
2738 gimple *dev_ass = gimple_build_assign
2739 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
2740 gimple_set_location (dev_ass, loc);
2741 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
2742
2743 tree result = make_ssa_name (boolean_type_node);
2744 gimple *result_ass = gimple_build_assign
2745 (result, BIT_IOR_EXPR, host_eq, dev_eq);
2746 gimple_set_location (result_ass, loc);
2747 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
2748
2749 replace_call_with_value (gsi, result);
2750
2751 return true;
2752}
cbdd87d4 2753
dcb7fae2
RB
2754/* Fold the non-target builtin at *GSI and return whether any simplification
2755 was made. */
cbdd87d4 2756
fef5a0d9 2757static bool
dcb7fae2 2758gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2759{
538dd0b7 2760 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2761 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2762
dcb7fae2
RB
2763 /* Give up for always_inline inline builtins until they are
2764 inlined. */
2765 if (avoid_folding_inline_builtin (callee))
2766 return false;
cbdd87d4 2767
edd7ae68
RB
2768 unsigned n = gimple_call_num_args (stmt);
2769 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2770 switch (fcode)
cbdd87d4 2771 {
dcb7fae2
RB
2772 case BUILT_IN_BZERO:
2773 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2774 gimple_call_arg (stmt, 1));
2775 case BUILT_IN_MEMSET:
2776 return gimple_fold_builtin_memset (gsi,
2777 gimple_call_arg (stmt, 1),
2778 gimple_call_arg (stmt, 2));
2779 case BUILT_IN_BCOPY:
2780 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2781 gimple_call_arg (stmt, 0), 3);
2782 case BUILT_IN_MEMCPY:
2783 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2784 gimple_call_arg (stmt, 1), 0);
2785 case BUILT_IN_MEMPCPY:
2786 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2787 gimple_call_arg (stmt, 1), 1);
2788 case BUILT_IN_MEMMOVE:
2789 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2790 gimple_call_arg (stmt, 1), 3);
2791 case BUILT_IN_SPRINTF_CHK:
2792 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2793 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2794 case BUILT_IN_STRCAT_CHK:
2795 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2796 case BUILT_IN_STRNCAT_CHK:
2797 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2798 case BUILT_IN_STRLEN:
dcb7fae2 2799 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2800 case BUILT_IN_STRCPY:
dcb7fae2 2801 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2802 gimple_call_arg (stmt, 0),
dcb7fae2 2803 gimple_call_arg (stmt, 1));
cbdd87d4 2804 case BUILT_IN_STRNCPY:
dcb7fae2 2805 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2806 gimple_call_arg (stmt, 0),
2807 gimple_call_arg (stmt, 1),
dcb7fae2 2808 gimple_call_arg (stmt, 2));
9a7eefec 2809 case BUILT_IN_STRCAT:
dcb7fae2
RB
2810 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2811 gimple_call_arg (stmt, 1));
ad03a744
RB
2812 case BUILT_IN_STRNCAT:
2813 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2814 case BUILT_IN_FPUTS:
dcb7fae2
RB
2815 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2816 gimple_call_arg (stmt, 1), false);
cbdd87d4 2817 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2818 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2819 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2820 case BUILT_IN_MEMCPY_CHK:
2821 case BUILT_IN_MEMPCPY_CHK:
2822 case BUILT_IN_MEMMOVE_CHK:
2823 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2824 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2825 gimple_call_arg (stmt, 0),
2826 gimple_call_arg (stmt, 1),
2827 gimple_call_arg (stmt, 2),
2828 gimple_call_arg (stmt, 3),
edd7ae68 2829 fcode);
2625bb5d
RB
2830 case BUILT_IN_STPCPY:
2831 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2832 case BUILT_IN_STRCPY_CHK:
2833 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2834 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2835 gimple_call_arg (stmt, 0),
2836 gimple_call_arg (stmt, 1),
2837 gimple_call_arg (stmt, 2),
edd7ae68 2838 fcode);
cbdd87d4 2839 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2840 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2841 return gimple_fold_builtin_stxncpy_chk (gsi,
2842 gimple_call_arg (stmt, 0),
2843 gimple_call_arg (stmt, 1),
2844 gimple_call_arg (stmt, 2),
2845 gimple_call_arg (stmt, 3),
edd7ae68 2846 fcode);
cbdd87d4
RG
2847 case BUILT_IN_SNPRINTF_CHK:
2848 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2849 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2850 case BUILT_IN_SNPRINTF:
dcb7fae2 2851 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2852 case BUILT_IN_SPRINTF:
dcb7fae2 2853 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2854 case BUILT_IN_FPRINTF:
2855 case BUILT_IN_FPRINTF_UNLOCKED:
2856 case BUILT_IN_VFPRINTF:
2857 if (n == 2 || n == 3)
2858 return gimple_fold_builtin_fprintf (gsi,
2859 gimple_call_arg (stmt, 0),
2860 gimple_call_arg (stmt, 1),
2861 n == 3
2862 ? gimple_call_arg (stmt, 2)
2863 : NULL_TREE,
2864 fcode);
2865 break;
2866 case BUILT_IN_FPRINTF_CHK:
2867 case BUILT_IN_VFPRINTF_CHK:
2868 if (n == 3 || n == 4)
2869 return gimple_fold_builtin_fprintf (gsi,
2870 gimple_call_arg (stmt, 0),
2871 gimple_call_arg (stmt, 2),
2872 n == 4
2873 ? gimple_call_arg (stmt, 3)
2874 : NULL_TREE,
2875 fcode);
2876 break;
ad03a744
RB
2877 case BUILT_IN_PRINTF:
2878 case BUILT_IN_PRINTF_UNLOCKED:
2879 case BUILT_IN_VPRINTF:
2880 if (n == 1 || n == 2)
2881 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2882 n == 2
2883 ? gimple_call_arg (stmt, 1)
2884 : NULL_TREE, fcode);
2885 break;
2886 case BUILT_IN_PRINTF_CHK:
2887 case BUILT_IN_VPRINTF_CHK:
2888 if (n == 2 || n == 3)
2889 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2890 n == 3
2891 ? gimple_call_arg (stmt, 2)
2892 : NULL_TREE, fcode);
48126138
NS
2893 case BUILT_IN_ACC_ON_DEVICE:
2894 return gimple_fold_builtin_acc_on_device (gsi,
2895 gimple_call_arg (stmt, 0));
fef5a0d9
RB
2896 default:;
2897 }
2898
2899 /* Try the generic builtin folder. */
2900 bool ignore = (gimple_call_lhs (stmt) == NULL);
2901 tree result = fold_call_stmt (stmt, ignore);
2902 if (result)
2903 {
2904 if (ignore)
2905 STRIP_NOPS (result);
2906 else
2907 result = fold_convert (gimple_call_return_type (stmt), result);
2908 if (!update_call_from_tree (gsi, result))
2909 gimplify_and_update_call_from_tree (gsi, result);
2910 return true;
2911 }
2912
2913 return false;
2914}
2915
1304953e
JJ
2916/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2917 doesn't fit into TYPE. The test for overflow should be regardless of
2918 -fwrapv, and even for unsigned types. */
2919
2920bool
2921arith_overflowed_p (enum tree_code code, const_tree type,
2922 const_tree arg0, const_tree arg1)
2923{
2924 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
2925 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
2926 widest2_int_cst;
2927 widest2_int warg0 = widest2_int_cst (arg0);
2928 widest2_int warg1 = widest2_int_cst (arg1);
2929 widest2_int wres;
2930 switch (code)
2931 {
2932 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
2933 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
2934 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
2935 default: gcc_unreachable ();
2936 }
2937 signop sign = TYPE_SIGN (type);
2938 if (sign == UNSIGNED && wi::neg_p (wres))
2939 return true;
2940 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
2941}
2942
cbdd87d4
RG
2943/* Attempt to fold a call statement referenced by the statement iterator GSI.
2944 The statement may be replaced by another statement, e.g., if the call
2945 simplifies to a constant value. Return true if any changes were made.
2946 It is assumed that the operands have been previously folded. */
2947
e021c122 2948static bool
ceeffab0 2949gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 2950{
538dd0b7 2951 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 2952 tree callee;
e021c122
RG
2953 bool changed = false;
2954 unsigned i;
cbdd87d4 2955
e021c122
RG
2956 /* Fold *& in call arguments. */
2957 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2958 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2959 {
2960 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2961 if (tmp)
2962 {
2963 gimple_call_set_arg (stmt, i, tmp);
2964 changed = true;
2965 }
2966 }
3b45a007
RG
2967
2968 /* Check for virtual calls that became direct calls. */
2969 callee = gimple_call_fn (stmt);
25583c4f 2970 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 2971 {
49c471e3
MJ
2972 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2973 {
450ad0cd
JH
2974 if (dump_file && virtual_method_call_p (callee)
2975 && !possible_polymorphic_call_target_p
6f8091fc
JH
2976 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
2977 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
2978 {
2979 fprintf (dump_file,
a70e9985 2980 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
2981 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2982 fprintf (dump_file, " to ");
2983 print_generic_expr (dump_file, callee, TDF_SLIM);
2984 fprintf (dump_file, "\n");
2985 }
2986
49c471e3 2987 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
2988 changed = true;
2989 }
a70e9985 2990 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 2991 {
61dd6a2e
JH
2992 bool final;
2993 vec <cgraph_node *>targets
058d0a90 2994 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 2995 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 2996 {
a70e9985 2997 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
2998 if (dump_enabled_p ())
2999 {
807b7d62 3000 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3001 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3002 "folding virtual function call to %s\n",
3003 targets.length () == 1
3004 ? targets[0]->name ()
3005 : "__builtin_unreachable");
3006 }
61dd6a2e 3007 if (targets.length () == 1)
cf3e5a89
JJ
3008 {
3009 gimple_call_set_fndecl (stmt, targets[0]->decl);
3010 changed = true;
a70e9985
JJ
3011 /* If the call becomes noreturn, remove the lhs. */
3012 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3013 {
3014 if (TREE_CODE (lhs) == SSA_NAME)
3015 {
b731b390 3016 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3017 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 3018 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
3019 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3020 }
3021 gimple_call_set_lhs (stmt, NULL_TREE);
3022 }
0b986c6a 3023 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 3024 }
a70e9985 3025 else
cf3e5a89
JJ
3026 {
3027 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 3028 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 3029 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3030 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3031 {
b731b390 3032 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3033 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3034
3035 /* To satisfy condition for
3036 cgraph_update_edges_for_call_stmt_node,
3037 we need to preserve GIMPLE_CALL statement
3038 at position of GSI iterator. */
a70e9985 3039 update_call_from_tree (gsi, def);
eb14a79f 3040 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3041 }
3042 else
42e52a51
RB
3043 {
3044 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3045 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3046 gsi_replace (gsi, new_stmt, false);
3047 }
cf3e5a89
JJ
3048 return true;
3049 }
e021c122 3050 }
49c471e3 3051 }
e021c122 3052 }
49c471e3 3053
f2d3d07e
RH
3054 /* Check for indirect calls that became direct calls, and then
3055 no longer require a static chain. */
3056 if (gimple_call_chain (stmt))
3057 {
3058 tree fn = gimple_call_fndecl (stmt);
3059 if (fn && !DECL_STATIC_CHAIN (fn))
3060 {
3061 gimple_call_set_chain (stmt, NULL);
3062 changed = true;
3063 }
3064 else
3065 {
3066 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3067 if (tmp)
3068 {
3069 gimple_call_set_chain (stmt, tmp);
3070 changed = true;
3071 }
3072 }
3073 }
3074
e021c122
RG
3075 if (inplace)
3076 return changed;
3077
3078 /* Check for builtins that CCP can handle using information not
3079 available in the generic fold routines. */
fef5a0d9
RB
3080 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3081 {
3082 if (gimple_fold_builtin (gsi))
3083 changed = true;
3084 }
3085 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3086 {
ea679d55 3087 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3088 }
368b454d 3089 else if (gimple_call_internal_p (stmt))
ed9c79e1 3090 {
368b454d
JJ
3091 enum tree_code subcode = ERROR_MARK;
3092 tree result = NULL_TREE;
1304953e
JJ
3093 bool cplx_result = false;
3094 tree overflow = NULL_TREE;
368b454d
JJ
3095 switch (gimple_call_internal_fn (stmt))
3096 {
3097 case IFN_BUILTIN_EXPECT:
3098 result = fold_builtin_expect (gimple_location (stmt),
3099 gimple_call_arg (stmt, 0),
3100 gimple_call_arg (stmt, 1),
3101 gimple_call_arg (stmt, 2));
3102 break;
0e82f089
MP
3103 case IFN_UBSAN_OBJECT_SIZE:
3104 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3105 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3106 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3107 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3108 gimple_call_arg (stmt, 2))))
3109 {
f6b4dc28 3110 gsi_replace (gsi, gimple_build_nop (), false);
0e82f089
MP
3111 unlink_stmt_vdef (stmt);
3112 release_defs (stmt);
3113 return true;
3114 }
3115 break;
368b454d
JJ
3116 case IFN_UBSAN_CHECK_ADD:
3117 subcode = PLUS_EXPR;
3118 break;
3119 case IFN_UBSAN_CHECK_SUB:
3120 subcode = MINUS_EXPR;
3121 break;
3122 case IFN_UBSAN_CHECK_MUL:
3123 subcode = MULT_EXPR;
3124 break;
1304953e
JJ
3125 case IFN_ADD_OVERFLOW:
3126 subcode = PLUS_EXPR;
3127 cplx_result = true;
3128 break;
3129 case IFN_SUB_OVERFLOW:
3130 subcode = MINUS_EXPR;
3131 cplx_result = true;
3132 break;
3133 case IFN_MUL_OVERFLOW:
3134 subcode = MULT_EXPR;
3135 cplx_result = true;
3136 break;
368b454d
JJ
3137 default:
3138 break;
3139 }
3140 if (subcode != ERROR_MARK)
3141 {
3142 tree arg0 = gimple_call_arg (stmt, 0);
3143 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3144 tree type = TREE_TYPE (arg0);
3145 if (cplx_result)
3146 {
3147 tree lhs = gimple_call_lhs (stmt);
3148 if (lhs == NULL_TREE)
3149 type = NULL_TREE;
3150 else
3151 type = TREE_TYPE (TREE_TYPE (lhs));
3152 }
3153 if (type == NULL_TREE)
3154 ;
368b454d 3155 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3156 else if (integer_zerop (arg1))
3157 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3158 /* x = 0 + y; x = 0 * y; */
3159 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3160 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3161 /* x = y - y; */
3162 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3163 result = integer_zero_node;
368b454d 3164 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3165 else if (subcode == MULT_EXPR && integer_onep (arg1))
3166 result = arg0;
3167 else if (subcode == MULT_EXPR && integer_onep (arg0))
3168 result = arg1;
3169 else if (TREE_CODE (arg0) == INTEGER_CST
3170 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3171 {
1304953e
JJ
3172 if (cplx_result)
3173 result = int_const_binop (subcode, fold_convert (type, arg0),
3174 fold_convert (type, arg1));
3175 else
3176 result = int_const_binop (subcode, arg0, arg1);
3177 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3178 {
3179 if (cplx_result)
3180 overflow = build_one_cst (type);
3181 else
3182 result = NULL_TREE;
3183 }
3184 }
3185 if (result)
3186 {
3187 if (result == integer_zero_node)
3188 result = build_zero_cst (type);
3189 else if (cplx_result && TREE_TYPE (result) != type)
3190 {
3191 if (TREE_CODE (result) == INTEGER_CST)
3192 {
3193 if (arith_overflowed_p (PLUS_EXPR, type, result,
3194 integer_zero_node))
3195 overflow = build_one_cst (type);
3196 }
3197 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3198 && TYPE_UNSIGNED (type))
3199 || (TYPE_PRECISION (type)
3200 < (TYPE_PRECISION (TREE_TYPE (result))
3201 + (TYPE_UNSIGNED (TREE_TYPE (result))
3202 && !TYPE_UNSIGNED (type)))))
3203 result = NULL_TREE;
3204 if (result)
3205 result = fold_convert (type, result);
3206 }
368b454d
JJ
3207 }
3208 }
1304953e 3209
ed9c79e1
JJ
3210 if (result)
3211 {
1304953e
JJ
3212 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3213 result = drop_tree_overflow (result);
3214 if (cplx_result)
3215 {
3216 if (overflow == NULL_TREE)
3217 overflow = build_zero_cst (TREE_TYPE (result));
3218 tree ctype = build_complex_type (TREE_TYPE (result));
3219 if (TREE_CODE (result) == INTEGER_CST
3220 && TREE_CODE (overflow) == INTEGER_CST)
3221 result = build_complex (ctype, result, overflow);
3222 else
3223 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3224 ctype, result, overflow);
3225 }
ed9c79e1
JJ
3226 if (!update_call_from_tree (gsi, result))
3227 gimplify_and_update_call_from_tree (gsi, result);
3228 changed = true;
3229 }
3230 }
3b45a007 3231
e021c122 3232 return changed;
cbdd87d4
RG
3233}
3234
e0ee10ed 3235
89a79e96
RB
3236/* Return true whether NAME has a use on STMT. */
3237
3238static bool
355fe088 3239has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
3240{
3241 imm_use_iterator iter;
3242 use_operand_p use_p;
3243 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
3244 if (USE_STMT (use_p) == stmt)
3245 return true;
3246 return false;
3247}
3248
e0ee10ed
RB
3249/* Worker for fold_stmt_1 dispatch to pattern based folding with
3250 gimple_simplify.
3251
3252 Replaces *GSI with the simplification result in RCODE and OPS
3253 and the associated statements in *SEQ. Does the replacement
3254 according to INPLACE and returns true if the operation succeeded. */
3255
3256static bool
3257replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3258 code_helper rcode, tree *ops,
3259 gimple_seq *seq, bool inplace)
3260{
355fe088 3261 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed
RB
3262
3263 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
3264 newly created statements. See also maybe_push_res_to_seq.
3265 As an exception allow such uses if there was a use of the
3266 same SSA name on the old stmt. */
e0ee10ed 3267 if ((TREE_CODE (ops[0]) == SSA_NAME
89a79e96
RB
3268 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
3269 && !has_use_on_stmt (ops[0], stmt))
e0ee10ed
RB
3270 || (ops[1]
3271 && TREE_CODE (ops[1]) == SSA_NAME
89a79e96
RB
3272 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
3273 && !has_use_on_stmt (ops[1], stmt))
e0ee10ed
RB
3274 || (ops[2]
3275 && TREE_CODE (ops[2]) == SSA_NAME
89a79e96
RB
3276 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
3277 && !has_use_on_stmt (ops[2], stmt)))
e0ee10ed
RB
3278 return false;
3279
538dd0b7 3280 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3281 {
3282 gcc_assert (rcode.is_tree_code ());
3283 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3284 /* GIMPLE_CONDs condition may not throw. */
3285 && (!flag_exceptions
3286 || !cfun->can_throw_non_call_exceptions
3287 || !operation_could_trap_p (rcode,
3288 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3289 false, NULL_TREE)))
538dd0b7 3290 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3291 else if (rcode == SSA_NAME)
538dd0b7 3292 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3293 build_zero_cst (TREE_TYPE (ops[0])));
3294 else if (rcode == INTEGER_CST)
3295 {
3296 if (integer_zerop (ops[0]))
538dd0b7 3297 gimple_cond_make_false (cond_stmt);
e0ee10ed 3298 else
538dd0b7 3299 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3300 }
3301 else if (!inplace)
3302 {
3303 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3304 ops, seq);
3305 if (!res)
3306 return false;
538dd0b7 3307 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3308 build_zero_cst (TREE_TYPE (res)));
3309 }
3310 else
3311 return false;
3312 if (dump_file && (dump_flags & TDF_DETAILS))
3313 {
3314 fprintf (dump_file, "gimple_simplified to ");
3315 if (!gimple_seq_empty_p (*seq))
3316 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3317 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3318 0, TDF_SLIM);
3319 }
3320 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3321 return true;
3322 }
3323 else if (is_gimple_assign (stmt)
3324 && rcode.is_tree_code ())
3325 {
3326 if (!inplace
f3582e54 3327 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3328 {
3329 maybe_build_generic_op (rcode,
3330 TREE_TYPE (gimple_assign_lhs (stmt)),
3331 &ops[0], ops[1], ops[2]);
00d66391 3332 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3333 if (dump_file && (dump_flags & TDF_DETAILS))
3334 {
3335 fprintf (dump_file, "gimple_simplified to ");
3336 if (!gimple_seq_empty_p (*seq))
3337 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3338 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3339 0, TDF_SLIM);
3340 }
3341 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3342 return true;
3343 }
3344 }
37d486ab
RB
3345 else if (rcode.is_fn_code ()
3346 && gimple_call_builtin_p (stmt, rcode))
3347 {
3348 unsigned i;
3349 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3350 {
3351 gcc_assert (ops[i] != NULL_TREE);
3352 gimple_call_set_arg (stmt, i, ops[i]);
3353 }
3354 if (i < 3)
3355 gcc_assert (ops[i] == NULL_TREE);
4d20d00a 3356 gcc_assert (gimple_seq_empty_p (*seq));
37d486ab
RB
3357 return true;
3358 }
e0ee10ed
RB
3359 else if (!inplace)
3360 {
3361 if (gimple_has_lhs (stmt))
3362 {
3363 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3364 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3365 ops, seq, lhs))
3366 return false;
e0ee10ed
RB
3367 if (dump_file && (dump_flags & TDF_DETAILS))
3368 {
3369 fprintf (dump_file, "gimple_simplified to ");
3370 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3371 }
3372 gsi_replace_with_seq_vops (gsi, *seq);
3373 return true;
3374 }
3375 else
3376 gcc_unreachable ();
3377 }
3378
3379 return false;
3380}
3381
040292e7
RB
3382/* Canonicalize MEM_REFs invariant address operand after propagation. */
3383
3384static bool
3385maybe_canonicalize_mem_ref_addr (tree *t)
3386{
3387 bool res = false;
3388
3389 if (TREE_CODE (*t) == ADDR_EXPR)
3390 t = &TREE_OPERAND (*t, 0);
3391
3392 while (handled_component_p (*t))
3393 t = &TREE_OPERAND (*t, 0);
3394
3395 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3396 of invariant addresses into a SSA name MEM_REF address. */
3397 if (TREE_CODE (*t) == MEM_REF
3398 || TREE_CODE (*t) == TARGET_MEM_REF)
3399 {
3400 tree addr = TREE_OPERAND (*t, 0);
3401 if (TREE_CODE (addr) == ADDR_EXPR
3402 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3403 || handled_component_p (TREE_OPERAND (addr, 0))))
3404 {
3405 tree base;
3406 HOST_WIDE_INT coffset;
3407 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3408 &coffset);
3409 if (!base)
3410 gcc_unreachable ();
3411
3412 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3413 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3414 TREE_OPERAND (*t, 1),
3415 size_int (coffset));
3416 res = true;
3417 }
3418 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3419 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3420 }
3421
3422 /* Canonicalize back MEM_REFs to plain reference trees if the object
3423 accessed is a decl that has the same access semantics as the MEM_REF. */
3424 if (TREE_CODE (*t) == MEM_REF
3425 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3426 && integer_zerop (TREE_OPERAND (*t, 1))
3427 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3428 {
3429 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3430 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3431 if (/* Same volatile qualification. */
3432 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3433 /* Same TBAA behavior with -fstrict-aliasing. */
3434 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3435 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3436 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3437 /* Same alignment. */
3438 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3439 /* We have to look out here to not drop a required conversion
3440 from the rhs to the lhs if *t appears on the lhs or vice-versa
3441 if it appears on the rhs. Thus require strict type
3442 compatibility. */
3443 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3444 {
3445 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3446 res = true;
3447 }
3448 }
3449
3450 /* Canonicalize TARGET_MEM_REF in particular with respect to
3451 the indexes becoming constant. */
3452 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3453 {
3454 tree tem = maybe_fold_tmr (*t);
3455 if (tem)
3456 {
3457 *t = tem;
3458 res = true;
3459 }
3460 }
3461
3462 return res;
3463}
3464
cbdd87d4
RG
3465/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3466 distinguishes both cases. */
3467
3468static bool
e0ee10ed 3469fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3470{
3471 bool changed = false;
355fe088 3472 gimple *stmt = gsi_stmt (*gsi);
cbdd87d4
RG
3473 unsigned i;
3474
040292e7
RB
3475 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3476 after propagation.
3477 ??? This shouldn't be done in generic folding but in the
3478 propagation helpers which also know whether an address was
89a79e96
RB
3479 propagated.
3480 Also canonicalize operand order. */
040292e7
RB
3481 switch (gimple_code (stmt))
3482 {
3483 case GIMPLE_ASSIGN:
3484 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3485 {
3486 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3487 if ((REFERENCE_CLASS_P (*rhs)
3488 || TREE_CODE (*rhs) == ADDR_EXPR)
3489 && maybe_canonicalize_mem_ref_addr (rhs))
3490 changed = true;
3491 tree *lhs = gimple_assign_lhs_ptr (stmt);
3492 if (REFERENCE_CLASS_P (*lhs)
3493 && maybe_canonicalize_mem_ref_addr (lhs))
3494 changed = true;
3495 }
89a79e96
RB
3496 else
3497 {
3498 /* Canonicalize operand order. */
3499 enum tree_code code = gimple_assign_rhs_code (stmt);
3500 if (TREE_CODE_CLASS (code) == tcc_comparison
3501 || commutative_tree_code (code)
3502 || commutative_ternary_tree_code (code))
3503 {
3504 tree rhs1 = gimple_assign_rhs1 (stmt);
3505 tree rhs2 = gimple_assign_rhs2 (stmt);
3506 if (tree_swap_operands_p (rhs1, rhs2, false))
3507 {
3508 gimple_assign_set_rhs1 (stmt, rhs2);
3509 gimple_assign_set_rhs2 (stmt, rhs1);
3510 if (TREE_CODE_CLASS (code) == tcc_comparison)
3511 gimple_assign_set_rhs_code (stmt,
3512 swap_tree_comparison (code));
3513 changed = true;
3514 }
3515 }
3516 }
040292e7
RB
3517 break;
3518 case GIMPLE_CALL:
3519 {
3520 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3521 {
3522 tree *arg = gimple_call_arg_ptr (stmt, i);
3523 if (REFERENCE_CLASS_P (*arg)
3524 && maybe_canonicalize_mem_ref_addr (arg))
3525 changed = true;
3526 }
3527 tree *lhs = gimple_call_lhs_ptr (stmt);
3528 if (*lhs
3529 && REFERENCE_CLASS_P (*lhs)
3530 && maybe_canonicalize_mem_ref_addr (lhs))
3531 changed = true;
3532 break;
3533 }
3534 case GIMPLE_ASM:
3535 {
538dd0b7
DM
3536 gasm *asm_stmt = as_a <gasm *> (stmt);
3537 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3538 {
538dd0b7 3539 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3540 tree op = TREE_VALUE (link);
3541 if (REFERENCE_CLASS_P (op)
3542 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3543 changed = true;
3544 }
538dd0b7 3545 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3546 {
538dd0b7 3547 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3548 tree op = TREE_VALUE (link);
3549 if ((REFERENCE_CLASS_P (op)
3550 || TREE_CODE (op) == ADDR_EXPR)
3551 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3552 changed = true;
3553 }
3554 }
3555 break;
3556 case GIMPLE_DEBUG:
3557 if (gimple_debug_bind_p (stmt))
3558 {
3559 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3560 if (*val
3561 && (REFERENCE_CLASS_P (*val)
3562 || TREE_CODE (*val) == ADDR_EXPR)
3563 && maybe_canonicalize_mem_ref_addr (val))
3564 changed = true;
3565 }
3566 break;
89a79e96
RB
3567 case GIMPLE_COND:
3568 {
3569 /* Canonicalize operand order. */
3570 tree lhs = gimple_cond_lhs (stmt);
3571 tree rhs = gimple_cond_rhs (stmt);
3572 if (tree_swap_operands_p (lhs, rhs, false))
3573 {
3574 gcond *gc = as_a <gcond *> (stmt);
3575 gimple_cond_set_lhs (gc, rhs);
3576 gimple_cond_set_rhs (gc, lhs);
3577 gimple_cond_set_code (gc,
3578 swap_tree_comparison (gimple_cond_code (gc)));
3579 changed = true;
3580 }
3581 }
040292e7
RB
3582 default:;
3583 }
3584
e0ee10ed
RB
3585 /* Dispatch to pattern-based folding. */
3586 if (!inplace
3587 || is_gimple_assign (stmt)
3588 || gimple_code (stmt) == GIMPLE_COND)
3589 {
3590 gimple_seq seq = NULL;
3591 code_helper rcode;
3592 tree ops[3] = {};
0ff093d8
RB
3593 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3594 valueize, valueize))
e0ee10ed
RB
3595 {
3596 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3597 changed = true;
3598 else
3599 gimple_seq_discard (seq);
3600 }
3601 }
3602
3603 stmt = gsi_stmt (*gsi);
3604
cbdd87d4
RG
3605 /* Fold the main computation performed by the statement. */
3606 switch (gimple_code (stmt))
3607 {
3608 case GIMPLE_ASSIGN:
3609 {
819ec64c
RB
3610 /* Try to canonicalize for boolean-typed X the comparisons
3611 X == 0, X == 1, X != 0, and X != 1. */
3612 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
3613 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 3614 {
819ec64c
RB
3615 tree lhs = gimple_assign_lhs (stmt);
3616 tree op1 = gimple_assign_rhs1 (stmt);
3617 tree op2 = gimple_assign_rhs2 (stmt);
3618 tree type = TREE_TYPE (op1);
3619
3620 /* Check whether the comparison operands are of the same boolean
3621 type as the result type is.
3622 Check that second operand is an integer-constant with value
3623 one or zero. */
3624 if (TREE_CODE (op2) == INTEGER_CST
3625 && (integer_zerop (op2) || integer_onep (op2))
3626 && useless_type_conversion_p (TREE_TYPE (lhs), type))
3627 {
3628 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
3629 bool is_logical_not = false;
3630
3631 /* X == 0 and X != 1 is a logical-not.of X
3632 X == 1 and X != 0 is X */
3633 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
3634 || (cmp_code == NE_EXPR && integer_onep (op2)))
3635 is_logical_not = true;
3636
3637 if (is_logical_not == false)
3638 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
3639 /* Only for one-bit precision typed X the transformation
3640 !X -> ~X is valied. */
3641 else if (TYPE_PRECISION (type) == 1)
3642 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
3643 /* Otherwise we use !X -> X ^ 1. */
3644 else
3645 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
3646 build_int_cst (type, 1));
3647 changed = true;
3648 break;
3649 }
5fbcc0ed 3650 }
819ec64c
RB
3651
3652 unsigned old_num_ops = gimple_num_ops (stmt);
3653 tree lhs = gimple_assign_lhs (stmt);
3654 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3655 if (new_rhs
3656 && !useless_type_conversion_p (TREE_TYPE (lhs),
3657 TREE_TYPE (new_rhs)))
3658 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3659 if (new_rhs
3660 && (!inplace
3661 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3662 {
3663 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3664 changed = true;
3665 }
3666 break;
3667 }
3668
cbdd87d4 3669 case GIMPLE_CALL:
ceeffab0 3670 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3671 break;
3672
3673 case GIMPLE_ASM:
3674 /* Fold *& in asm operands. */
38384150 3675 {
538dd0b7 3676 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3677 size_t noutputs;
3678 const char **oconstraints;
3679 const char *constraint;
3680 bool allows_mem, allows_reg;
3681
538dd0b7 3682 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3683 oconstraints = XALLOCAVEC (const char *, noutputs);
3684
538dd0b7 3685 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3686 {
538dd0b7 3687 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3688 tree op = TREE_VALUE (link);
3689 oconstraints[i]
3690 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3691 if (REFERENCE_CLASS_P (op)
3692 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3693 {
3694 TREE_VALUE (link) = op;
3695 changed = true;
3696 }
3697 }
538dd0b7 3698 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3699 {
538dd0b7 3700 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3701 tree op = TREE_VALUE (link);
3702 constraint
3703 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3704 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3705 oconstraints, &allows_mem, &allows_reg);
3706 if (REFERENCE_CLASS_P (op)
3707 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3708 != NULL_TREE)
3709 {
3710 TREE_VALUE (link) = op;
3711 changed = true;
3712 }
3713 }
3714 }
cbdd87d4
RG
3715 break;
3716
bd422c4a
RG
3717 case GIMPLE_DEBUG:
3718 if (gimple_debug_bind_p (stmt))
3719 {
3720 tree val = gimple_debug_bind_get_value (stmt);
3721 if (val
3722 && REFERENCE_CLASS_P (val))
3723 {
3724 tree tem = maybe_fold_reference (val, false);
3725 if (tem)
3726 {
3727 gimple_debug_bind_set_value (stmt, tem);
3728 changed = true;
3729 }
3730 }
3e888a5e
RG
3731 else if (val
3732 && TREE_CODE (val) == ADDR_EXPR)
3733 {
3734 tree ref = TREE_OPERAND (val, 0);
3735 tree tem = maybe_fold_reference (ref, false);
3736 if (tem)
3737 {
3738 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3739 gimple_debug_bind_set_value (stmt, tem);
3740 changed = true;
3741 }
3742 }
bd422c4a
RG
3743 }
3744 break;
3745
cbdd87d4
RG
3746 default:;
3747 }
3748
3749 stmt = gsi_stmt (*gsi);
3750
37376165
RB
3751 /* Fold *& on the lhs. */
3752 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3753 {
3754 tree lhs = gimple_get_lhs (stmt);
3755 if (lhs && REFERENCE_CLASS_P (lhs))
3756 {
3757 tree new_lhs = maybe_fold_reference (lhs, true);
3758 if (new_lhs)
3759 {
3760 gimple_set_lhs (stmt, new_lhs);
3761 changed = true;
3762 }
3763 }
3764 }
3765
3766 return changed;
3767}
3768
e0ee10ed
RB
3769/* Valueziation callback that ends up not following SSA edges. */
3770
3771tree
3772no_follow_ssa_edges (tree)
3773{
3774 return NULL_TREE;
3775}
3776
45cc9f96
RB
3777/* Valueization callback that ends up following single-use SSA edges only. */
3778
3779tree
3780follow_single_use_edges (tree val)
3781{
3782 if (TREE_CODE (val) == SSA_NAME
3783 && !has_single_use (val))
3784 return NULL_TREE;
3785 return val;
3786}
3787
cbdd87d4
RG
3788/* Fold the statement pointed to by GSI. In some cases, this function may
3789 replace the whole statement with a new one. Returns true iff folding
3790 makes any changes.
3791 The statement pointed to by GSI should be in valid gimple form but may
3792 be in unfolded state as resulting from for example constant propagation
3793 which can produce *&x = 0. */
3794
3795bool
3796fold_stmt (gimple_stmt_iterator *gsi)
3797{
e0ee10ed
RB
3798 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3799}
3800
3801bool
3802fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3803{
3804 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3805}
3806
59401b92 3807/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3808 *&x created by constant propagation are handled. The statement cannot
3809 be replaced with a new one. Return true if the statement was
3810 changed, false otherwise.
59401b92 3811 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3812 be in unfolded state as resulting from for example constant propagation
3813 which can produce *&x = 0. */
3814
3815bool
59401b92 3816fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3817{
355fe088 3818 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 3819 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3820 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3821 return changed;
3822}
3823
e89065a1
SL
3824/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3825 if EXPR is null or we don't know how.
3826 If non-null, the result always has boolean type. */
3827
3828static tree
3829canonicalize_bool (tree expr, bool invert)
3830{
3831 if (!expr)
3832 return NULL_TREE;
3833 else if (invert)
3834 {
3835 if (integer_nonzerop (expr))
3836 return boolean_false_node;
3837 else if (integer_zerop (expr))
3838 return boolean_true_node;
3839 else if (TREE_CODE (expr) == SSA_NAME)
3840 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3841 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3842 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3843 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3844 boolean_type_node,
3845 TREE_OPERAND (expr, 0),
3846 TREE_OPERAND (expr, 1));
3847 else
3848 return NULL_TREE;
3849 }
3850 else
3851 {
3852 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3853 return expr;
3854 if (integer_nonzerop (expr))
3855 return boolean_true_node;
3856 else if (integer_zerop (expr))
3857 return boolean_false_node;
3858 else if (TREE_CODE (expr) == SSA_NAME)
3859 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3860 build_int_cst (TREE_TYPE (expr), 0));
98209db3 3861 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
3862 return fold_build2 (TREE_CODE (expr),
3863 boolean_type_node,
3864 TREE_OPERAND (expr, 0),
3865 TREE_OPERAND (expr, 1));
3866 else
3867 return NULL_TREE;
3868 }
3869}
3870
3871/* Check to see if a boolean expression EXPR is logically equivalent to the
3872 comparison (OP1 CODE OP2). Check for various identities involving
3873 SSA_NAMEs. */
3874
3875static bool
3876same_bool_comparison_p (const_tree expr, enum tree_code code,
3877 const_tree op1, const_tree op2)
3878{
355fe088 3879 gimple *s;
e89065a1
SL
3880
3881 /* The obvious case. */
3882 if (TREE_CODE (expr) == code
3883 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3884 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3885 return true;
3886
3887 /* Check for comparing (name, name != 0) and the case where expr
3888 is an SSA_NAME with a definition matching the comparison. */
3889 if (TREE_CODE (expr) == SSA_NAME
3890 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3891 {
3892 if (operand_equal_p (expr, op1, 0))
3893 return ((code == NE_EXPR && integer_zerop (op2))
3894 || (code == EQ_EXPR && integer_nonzerop (op2)));
3895 s = SSA_NAME_DEF_STMT (expr);
3896 if (is_gimple_assign (s)
3897 && gimple_assign_rhs_code (s) == code
3898 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3899 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3900 return true;
3901 }
3902
3903 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3904 of name is a comparison, recurse. */
3905 if (TREE_CODE (op1) == SSA_NAME
3906 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3907 {
3908 s = SSA_NAME_DEF_STMT (op1);
3909 if (is_gimple_assign (s)
3910 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3911 {
3912 enum tree_code c = gimple_assign_rhs_code (s);
3913 if ((c == NE_EXPR && integer_zerop (op2))
3914 || (c == EQ_EXPR && integer_nonzerop (op2)))
3915 return same_bool_comparison_p (expr, c,
3916 gimple_assign_rhs1 (s),
3917 gimple_assign_rhs2 (s));
3918 if ((c == EQ_EXPR && integer_zerop (op2))
3919 || (c == NE_EXPR && integer_nonzerop (op2)))
3920 return same_bool_comparison_p (expr,
3921 invert_tree_comparison (c, false),
3922 gimple_assign_rhs1 (s),
3923 gimple_assign_rhs2 (s));
3924 }
3925 }
3926 return false;
3927}
3928
3929/* Check to see if two boolean expressions OP1 and OP2 are logically
3930 equivalent. */
3931
3932static bool
3933same_bool_result_p (const_tree op1, const_tree op2)
3934{
3935 /* Simple cases first. */
3936 if (operand_equal_p (op1, op2, 0))
3937 return true;
3938
3939 /* Check the cases where at least one of the operands is a comparison.
3940 These are a bit smarter than operand_equal_p in that they apply some
3941 identifies on SSA_NAMEs. */
98209db3 3942 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
3943 && same_bool_comparison_p (op1, TREE_CODE (op2),
3944 TREE_OPERAND (op2, 0),
3945 TREE_OPERAND (op2, 1)))
3946 return true;
98209db3 3947 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
3948 && same_bool_comparison_p (op2, TREE_CODE (op1),
3949 TREE_OPERAND (op1, 0),
3950 TREE_OPERAND (op1, 1)))
3951 return true;
3952
3953 /* Default case. */
3954 return false;
3955}
3956
3957/* Forward declarations for some mutually recursive functions. */
3958
3959static tree
3960and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3961 enum tree_code code2, tree op2a, tree op2b);
3962static tree
3963and_var_with_comparison (tree var, bool invert,
3964 enum tree_code code2, tree op2a, tree op2b);
3965static tree
355fe088 3966and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
3967 enum tree_code code2, tree op2a, tree op2b);
3968static tree
3969or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3970 enum tree_code code2, tree op2a, tree op2b);
3971static tree
3972or_var_with_comparison (tree var, bool invert,
3973 enum tree_code code2, tree op2a, tree op2b);
3974static tree
355fe088 3975or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
3976 enum tree_code code2, tree op2a, tree op2b);
3977
3978/* Helper function for and_comparisons_1: try to simplify the AND of the
3979 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3980 If INVERT is true, invert the value of the VAR before doing the AND.
3981 Return NULL_EXPR if we can't simplify this to a single expression. */
3982
3983static tree
3984and_var_with_comparison (tree var, bool invert,
3985 enum tree_code code2, tree op2a, tree op2b)
3986{
3987 tree t;
355fe088 3988 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
3989
3990 /* We can only deal with variables whose definitions are assignments. */
3991 if (!is_gimple_assign (stmt))
3992 return NULL_TREE;
3993
3994 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3995 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3996 Then we only have to consider the simpler non-inverted cases. */
3997 if (invert)
3998 t = or_var_with_comparison_1 (stmt,
3999 invert_tree_comparison (code2, false),
4000 op2a, op2b);
4001 else
4002 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
4003 return canonicalize_bool (t, invert);
4004}
4005
4006/* Try to simplify the AND of the ssa variable defined by the assignment
4007 STMT with the comparison specified by (OP2A CODE2 OP2B).
4008 Return NULL_EXPR if we can't simplify this to a single expression. */
4009
4010static tree
355fe088 4011and_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4012 enum tree_code code2, tree op2a, tree op2b)
4013{
4014 tree var = gimple_assign_lhs (stmt);
4015 tree true_test_var = NULL_TREE;
4016 tree false_test_var = NULL_TREE;
4017 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4018
4019 /* Check for identities like (var AND (var == 0)) => false. */
4020 if (TREE_CODE (op2a) == SSA_NAME
4021 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4022 {
4023 if ((code2 == NE_EXPR && integer_zerop (op2b))
4024 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4025 {
4026 true_test_var = op2a;
4027 if (var == true_test_var)
4028 return var;
4029 }
4030 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4031 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4032 {
4033 false_test_var = op2a;
4034 if (var == false_test_var)
4035 return boolean_false_node;
4036 }
4037 }
4038
4039 /* If the definition is a comparison, recurse on it. */
4040 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4041 {
4042 tree t = and_comparisons_1 (innercode,
4043 gimple_assign_rhs1 (stmt),
4044 gimple_assign_rhs2 (stmt),
4045 code2,
4046 op2a,
4047 op2b);
4048 if (t)
4049 return t;
4050 }
4051
4052 /* If the definition is an AND or OR expression, we may be able to
4053 simplify by reassociating. */
eb9820c0
KT
4054 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4055 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4056 {
4057 tree inner1 = gimple_assign_rhs1 (stmt);
4058 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4059 gimple *s;
e89065a1
SL
4060 tree t;
4061 tree partial = NULL_TREE;
eb9820c0 4062 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4063
4064 /* Check for boolean identities that don't require recursive examination
4065 of inner1/inner2:
4066 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4067 inner1 AND (inner1 OR inner2) => inner1
4068 !inner1 AND (inner1 AND inner2) => false
4069 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4070 Likewise for similar cases involving inner2. */
4071 if (inner1 == true_test_var)
4072 return (is_and ? var : inner1);
4073 else if (inner2 == true_test_var)
4074 return (is_and ? var : inner2);
4075 else if (inner1 == false_test_var)
4076 return (is_and
4077 ? boolean_false_node
4078 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4079 else if (inner2 == false_test_var)
4080 return (is_and
4081 ? boolean_false_node
4082 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4083
4084 /* Next, redistribute/reassociate the AND across the inner tests.
4085 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4086 if (TREE_CODE (inner1) == SSA_NAME
4087 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4088 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4089 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4090 gimple_assign_rhs1 (s),
4091 gimple_assign_rhs2 (s),
4092 code2, op2a, op2b)))
4093 {
4094 /* Handle the AND case, where we are reassociating:
4095 (inner1 AND inner2) AND (op2a code2 op2b)
4096 => (t AND inner2)
4097 If the partial result t is a constant, we win. Otherwise
4098 continue on to try reassociating with the other inner test. */
4099 if (is_and)
4100 {
4101 if (integer_onep (t))
4102 return inner2;
4103 else if (integer_zerop (t))
4104 return boolean_false_node;
4105 }
4106
4107 /* Handle the OR case, where we are redistributing:
4108 (inner1 OR inner2) AND (op2a code2 op2b)
4109 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4110 else if (integer_onep (t))
4111 return boolean_true_node;
4112
4113 /* Save partial result for later. */
4114 partial = t;
e89065a1
SL
4115 }
4116
4117 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4118 if (TREE_CODE (inner2) == SSA_NAME
4119 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4120 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4121 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4122 gimple_assign_rhs1 (s),
4123 gimple_assign_rhs2 (s),
4124 code2, op2a, op2b)))
4125 {
4126 /* Handle the AND case, where we are reassociating:
4127 (inner1 AND inner2) AND (op2a code2 op2b)
4128 => (inner1 AND t) */
4129 if (is_and)
4130 {
4131 if (integer_onep (t))
4132 return inner1;
4133 else if (integer_zerop (t))
4134 return boolean_false_node;
8236c8eb
JJ
4135 /* If both are the same, we can apply the identity
4136 (x AND x) == x. */
4137 else if (partial && same_bool_result_p (t, partial))
4138 return t;
e89065a1
SL
4139 }
4140
4141 /* Handle the OR case. where we are redistributing:
4142 (inner1 OR inner2) AND (op2a code2 op2b)
4143 => (t OR (inner1 AND (op2a code2 op2b)))
4144 => (t OR partial) */
4145 else
4146 {
4147 if (integer_onep (t))
4148 return boolean_true_node;
4149 else if (partial)
4150 {
4151 /* We already got a simplification for the other
4152 operand to the redistributed OR expression. The
4153 interesting case is when at least one is false.
4154 Or, if both are the same, we can apply the identity
4155 (x OR x) == x. */
4156 if (integer_zerop (partial))
4157 return t;
4158 else if (integer_zerop (t))
4159 return partial;
4160 else if (same_bool_result_p (t, partial))
4161 return t;
4162 }
4163 }
4164 }
4165 }
4166 return NULL_TREE;
4167}
4168
4169/* Try to simplify the AND of two comparisons defined by
4170 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4171 If this can be done without constructing an intermediate value,
4172 return the resulting tree; otherwise NULL_TREE is returned.
4173 This function is deliberately asymmetric as it recurses on SSA_DEFs
4174 in the first comparison but not the second. */
4175
4176static tree
4177and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4178 enum tree_code code2, tree op2a, tree op2b)
4179{
ae22ac3c 4180 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4181
e89065a1
SL
4182 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4183 if (operand_equal_p (op1a, op2a, 0)
4184 && operand_equal_p (op1b, op2b, 0))
4185 {
eb9820c0 4186 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4187 tree t = combine_comparisons (UNKNOWN_LOCATION,
4188 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4189 truth_type, op1a, op1b);
e89065a1
SL
4190 if (t)
4191 return t;
4192 }
4193
4194 /* Likewise the swapped case of the above. */
4195 if (operand_equal_p (op1a, op2b, 0)
4196 && operand_equal_p (op1b, op2a, 0))
4197 {
eb9820c0 4198 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4199 tree t = combine_comparisons (UNKNOWN_LOCATION,
4200 TRUTH_ANDIF_EXPR, code1,
4201 swap_tree_comparison (code2),
31ed6226 4202 truth_type, op1a, op1b);
e89065a1
SL
4203 if (t)
4204 return t;
4205 }
4206
4207 /* If both comparisons are of the same value against constants, we might
4208 be able to merge them. */
4209 if (operand_equal_p (op1a, op2a, 0)
4210 && TREE_CODE (op1b) == INTEGER_CST
4211 && TREE_CODE (op2b) == INTEGER_CST)
4212 {
4213 int cmp = tree_int_cst_compare (op1b, op2b);
4214
4215 /* If we have (op1a == op1b), we should either be able to
4216 return that or FALSE, depending on whether the constant op1b
4217 also satisfies the other comparison against op2b. */
4218 if (code1 == EQ_EXPR)
4219 {
4220 bool done = true;
4221 bool val;
4222 switch (code2)
4223 {
4224 case EQ_EXPR: val = (cmp == 0); break;
4225 case NE_EXPR: val = (cmp != 0); break;
4226 case LT_EXPR: val = (cmp < 0); break;
4227 case GT_EXPR: val = (cmp > 0); break;
4228 case LE_EXPR: val = (cmp <= 0); break;
4229 case GE_EXPR: val = (cmp >= 0); break;
4230 default: done = false;
4231 }
4232 if (done)
4233 {
4234 if (val)
4235 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4236 else
4237 return boolean_false_node;
4238 }
4239 }
4240 /* Likewise if the second comparison is an == comparison. */
4241 else if (code2 == EQ_EXPR)
4242 {
4243 bool done = true;
4244 bool val;
4245 switch (code1)
4246 {
4247 case EQ_EXPR: val = (cmp == 0); break;
4248 case NE_EXPR: val = (cmp != 0); break;
4249 case LT_EXPR: val = (cmp > 0); break;
4250 case GT_EXPR: val = (cmp < 0); break;
4251 case LE_EXPR: val = (cmp >= 0); break;
4252 case GE_EXPR: val = (cmp <= 0); break;
4253 default: done = false;
4254 }
4255 if (done)
4256 {
4257 if (val)
4258 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4259 else
4260 return boolean_false_node;
4261 }
4262 }
4263
4264 /* Same business with inequality tests. */
4265 else if (code1 == NE_EXPR)
4266 {
4267 bool val;
4268 switch (code2)
4269 {
4270 case EQ_EXPR: val = (cmp != 0); break;
4271 case NE_EXPR: val = (cmp == 0); break;
4272 case LT_EXPR: val = (cmp >= 0); break;
4273 case GT_EXPR: val = (cmp <= 0); break;
4274 case LE_EXPR: val = (cmp > 0); break;
4275 case GE_EXPR: val = (cmp < 0); break;
4276 default:
4277 val = false;
4278 }
4279 if (val)
4280 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4281 }
4282 else if (code2 == NE_EXPR)
4283 {
4284 bool val;
4285 switch (code1)
4286 {
4287 case EQ_EXPR: val = (cmp == 0); break;
4288 case NE_EXPR: val = (cmp != 0); break;
4289 case LT_EXPR: val = (cmp <= 0); break;
4290 case GT_EXPR: val = (cmp >= 0); break;
4291 case LE_EXPR: val = (cmp < 0); break;
4292 case GE_EXPR: val = (cmp > 0); break;
4293 default:
4294 val = false;
4295 }
4296 if (val)
4297 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4298 }
4299
4300 /* Chose the more restrictive of two < or <= comparisons. */
4301 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4302 && (code2 == LT_EXPR || code2 == LE_EXPR))
4303 {
4304 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4305 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4306 else
4307 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4308 }
4309
4310 /* Likewise chose the more restrictive of two > or >= comparisons. */
4311 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4312 && (code2 == GT_EXPR || code2 == GE_EXPR))
4313 {
4314 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4315 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4316 else
4317 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4318 }
4319
4320 /* Check for singleton ranges. */
4321 else if (cmp == 0
4322 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4323 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4324 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4325
4326 /* Check for disjoint ranges. */
4327 else if (cmp <= 0
4328 && (code1 == LT_EXPR || code1 == LE_EXPR)
4329 && (code2 == GT_EXPR || code2 == GE_EXPR))
4330 return boolean_false_node;
4331 else if (cmp >= 0
4332 && (code1 == GT_EXPR || code1 == GE_EXPR)
4333 && (code2 == LT_EXPR || code2 == LE_EXPR))
4334 return boolean_false_node;
4335 }
4336
4337 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4338 NAME's definition is a truth value. See if there are any simplifications
4339 that can be done against the NAME's definition. */
4340 if (TREE_CODE (op1a) == SSA_NAME
4341 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4342 && (integer_zerop (op1b) || integer_onep (op1b)))
4343 {
4344 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4345 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4346 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4347 switch (gimple_code (stmt))
4348 {
4349 case GIMPLE_ASSIGN:
4350 /* Try to simplify by copy-propagating the definition. */
4351 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4352
4353 case GIMPLE_PHI:
4354 /* If every argument to the PHI produces the same result when
4355 ANDed with the second comparison, we win.
4356 Do not do this unless the type is bool since we need a bool
4357 result here anyway. */
4358 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4359 {
4360 tree result = NULL_TREE;
4361 unsigned i;
4362 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4363 {
4364 tree arg = gimple_phi_arg_def (stmt, i);
4365
4366 /* If this PHI has itself as an argument, ignore it.
4367 If all the other args produce the same result,
4368 we're still OK. */
4369 if (arg == gimple_phi_result (stmt))
4370 continue;
4371 else if (TREE_CODE (arg) == INTEGER_CST)
4372 {
4373 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4374 {
4375 if (!result)
4376 result = boolean_false_node;
4377 else if (!integer_zerop (result))
4378 return NULL_TREE;
4379 }
4380 else if (!result)
4381 result = fold_build2 (code2, boolean_type_node,
4382 op2a, op2b);
4383 else if (!same_bool_comparison_p (result,
4384 code2, op2a, op2b))
4385 return NULL_TREE;
4386 }
0e8b84ec
JJ
4387 else if (TREE_CODE (arg) == SSA_NAME
4388 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4389 {
6c66f733 4390 tree temp;
355fe088 4391 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4392 /* In simple cases we can look through PHI nodes,
4393 but we have to be careful with loops.
4394 See PR49073. */
4395 if (! dom_info_available_p (CDI_DOMINATORS)
4396 || gimple_bb (def_stmt) == gimple_bb (stmt)
4397 || dominated_by_p (CDI_DOMINATORS,
4398 gimple_bb (def_stmt),
4399 gimple_bb (stmt)))
4400 return NULL_TREE;
4401 temp = and_var_with_comparison (arg, invert, code2,
4402 op2a, op2b);
e89065a1
SL
4403 if (!temp)
4404 return NULL_TREE;
4405 else if (!result)
4406 result = temp;
4407 else if (!same_bool_result_p (result, temp))
4408 return NULL_TREE;
4409 }
4410 else
4411 return NULL_TREE;
4412 }
4413 return result;
4414 }
4415
4416 default:
4417 break;
4418 }
4419 }
4420 return NULL_TREE;
4421}
4422
4423/* Try to simplify the AND of two comparisons, specified by
4424 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4425 If this can be simplified to a single expression (without requiring
4426 introducing more SSA variables to hold intermediate values),
4427 return the resulting tree. Otherwise return NULL_TREE.
4428 If the result expression is non-null, it has boolean type. */
4429
4430tree
4431maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4432 enum tree_code code2, tree op2a, tree op2b)
4433{
4434 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4435 if (t)
4436 return t;
4437 else
4438 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4439}
4440
4441/* Helper function for or_comparisons_1: try to simplify the OR of the
4442 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4443 If INVERT is true, invert the value of VAR before doing the OR.
4444 Return NULL_EXPR if we can't simplify this to a single expression. */
4445
4446static tree
4447or_var_with_comparison (tree var, bool invert,
4448 enum tree_code code2, tree op2a, tree op2b)
4449{
4450 tree t;
355fe088 4451 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
4452
4453 /* We can only deal with variables whose definitions are assignments. */
4454 if (!is_gimple_assign (stmt))
4455 return NULL_TREE;
4456
4457 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4458 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4459 Then we only have to consider the simpler non-inverted cases. */
4460 if (invert)
4461 t = and_var_with_comparison_1 (stmt,
4462 invert_tree_comparison (code2, false),
4463 op2a, op2b);
4464 else
4465 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4466 return canonicalize_bool (t, invert);
4467}
4468
4469/* Try to simplify the OR of the ssa variable defined by the assignment
4470 STMT with the comparison specified by (OP2A CODE2 OP2B).
4471 Return NULL_EXPR if we can't simplify this to a single expression. */
4472
4473static tree
355fe088 4474or_var_with_comparison_1 (gimple *stmt,
e89065a1
SL
4475 enum tree_code code2, tree op2a, tree op2b)
4476{
4477 tree var = gimple_assign_lhs (stmt);
4478 tree true_test_var = NULL_TREE;
4479 tree false_test_var = NULL_TREE;
4480 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4481
4482 /* Check for identities like (var OR (var != 0)) => true . */
4483 if (TREE_CODE (op2a) == SSA_NAME
4484 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4485 {
4486 if ((code2 == NE_EXPR && integer_zerop (op2b))
4487 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4488 {
4489 true_test_var = op2a;
4490 if (var == true_test_var)
4491 return var;
4492 }
4493 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4494 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4495 {
4496 false_test_var = op2a;
4497 if (var == false_test_var)
4498 return boolean_true_node;
4499 }
4500 }
4501
4502 /* If the definition is a comparison, recurse on it. */
4503 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4504 {
4505 tree t = or_comparisons_1 (innercode,
4506 gimple_assign_rhs1 (stmt),
4507 gimple_assign_rhs2 (stmt),
4508 code2,
4509 op2a,
4510 op2b);
4511 if (t)
4512 return t;
4513 }
4514
4515 /* If the definition is an AND or OR expression, we may be able to
4516 simplify by reassociating. */
eb9820c0
KT
4517 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4518 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4519 {
4520 tree inner1 = gimple_assign_rhs1 (stmt);
4521 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 4522 gimple *s;
e89065a1
SL
4523 tree t;
4524 tree partial = NULL_TREE;
eb9820c0 4525 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4526
4527 /* Check for boolean identities that don't require recursive examination
4528 of inner1/inner2:
4529 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4530 inner1 OR (inner1 AND inner2) => inner1
4531 !inner1 OR (inner1 OR inner2) => true
4532 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4533 */
4534 if (inner1 == true_test_var)
4535 return (is_or ? var : inner1);
4536 else if (inner2 == true_test_var)
4537 return (is_or ? var : inner2);
4538 else if (inner1 == false_test_var)
4539 return (is_or
4540 ? boolean_true_node
4541 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4542 else if (inner2 == false_test_var)
4543 return (is_or
4544 ? boolean_true_node
4545 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4546
4547 /* Next, redistribute/reassociate the OR across the inner tests.
4548 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4549 if (TREE_CODE (inner1) == SSA_NAME
4550 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4551 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4552 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4553 gimple_assign_rhs1 (s),
4554 gimple_assign_rhs2 (s),
4555 code2, op2a, op2b)))
4556 {
4557 /* Handle the OR case, where we are reassociating:
4558 (inner1 OR inner2) OR (op2a code2 op2b)
4559 => (t OR inner2)
4560 If the partial result t is a constant, we win. Otherwise
4561 continue on to try reassociating with the other inner test. */
8236c8eb 4562 if (is_or)
e89065a1
SL
4563 {
4564 if (integer_onep (t))
4565 return boolean_true_node;
4566 else if (integer_zerop (t))
4567 return inner2;
4568 }
4569
4570 /* Handle the AND case, where we are redistributing:
4571 (inner1 AND inner2) OR (op2a code2 op2b)
4572 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4573 else if (integer_zerop (t))
4574 return boolean_false_node;
4575
4576 /* Save partial result for later. */
4577 partial = t;
e89065a1
SL
4578 }
4579
4580 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4581 if (TREE_CODE (inner2) == SSA_NAME
4582 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4583 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4584 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4585 gimple_assign_rhs1 (s),
4586 gimple_assign_rhs2 (s),
4587 code2, op2a, op2b)))
4588 {
4589 /* Handle the OR case, where we are reassociating:
4590 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4591 => (inner1 OR t)
4592 => (t OR partial) */
4593 if (is_or)
e89065a1
SL
4594 {
4595 if (integer_zerop (t))
4596 return inner1;
4597 else if (integer_onep (t))
4598 return boolean_true_node;
8236c8eb
JJ
4599 /* If both are the same, we can apply the identity
4600 (x OR x) == x. */
4601 else if (partial && same_bool_result_p (t, partial))
4602 return t;
e89065a1
SL
4603 }
4604
4605 /* Handle the AND case, where we are redistributing:
4606 (inner1 AND inner2) OR (op2a code2 op2b)
4607 => (t AND (inner1 OR (op2a code2 op2b)))
4608 => (t AND partial) */
4609 else
4610 {
4611 if (integer_zerop (t))
4612 return boolean_false_node;
4613 else if (partial)
4614 {
4615 /* We already got a simplification for the other
4616 operand to the redistributed AND expression. The
4617 interesting case is when at least one is true.
4618 Or, if both are the same, we can apply the identity
8236c8eb 4619 (x AND x) == x. */
e89065a1
SL
4620 if (integer_onep (partial))
4621 return t;
4622 else if (integer_onep (t))
4623 return partial;
4624 else if (same_bool_result_p (t, partial))
8236c8eb 4625 return t;
e89065a1
SL
4626 }
4627 }
4628 }
4629 }
4630 return NULL_TREE;
4631}
4632
4633/* Try to simplify the OR of two comparisons defined by
4634 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4635 If this can be done without constructing an intermediate value,
4636 return the resulting tree; otherwise NULL_TREE is returned.
4637 This function is deliberately asymmetric as it recurses on SSA_DEFs
4638 in the first comparison but not the second. */
4639
4640static tree
4641or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4642 enum tree_code code2, tree op2a, tree op2b)
4643{
ae22ac3c 4644 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4645
e89065a1
SL
4646 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4647 if (operand_equal_p (op1a, op2a, 0)
4648 && operand_equal_p (op1b, op2b, 0))
4649 {
eb9820c0 4650 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4651 tree t = combine_comparisons (UNKNOWN_LOCATION,
4652 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4653 truth_type, op1a, op1b);
e89065a1
SL
4654 if (t)
4655 return t;
4656 }
4657
4658 /* Likewise the swapped case of the above. */
4659 if (operand_equal_p (op1a, op2b, 0)
4660 && operand_equal_p (op1b, op2a, 0))
4661 {
eb9820c0 4662 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4663 tree t = combine_comparisons (UNKNOWN_LOCATION,
4664 TRUTH_ORIF_EXPR, code1,
4665 swap_tree_comparison (code2),
31ed6226 4666 truth_type, op1a, op1b);
e89065a1
SL
4667 if (t)
4668 return t;
4669 }
4670
4671 /* If both comparisons are of the same value against constants, we might
4672 be able to merge them. */
4673 if (operand_equal_p (op1a, op2a, 0)
4674 && TREE_CODE (op1b) == INTEGER_CST
4675 && TREE_CODE (op2b) == INTEGER_CST)
4676 {
4677 int cmp = tree_int_cst_compare (op1b, op2b);
4678
4679 /* If we have (op1a != op1b), we should either be able to
4680 return that or TRUE, depending on whether the constant op1b
4681 also satisfies the other comparison against op2b. */
4682 if (code1 == NE_EXPR)
4683 {
4684 bool done = true;
4685 bool val;
4686 switch (code2)
4687 {
4688 case EQ_EXPR: val = (cmp == 0); break;
4689 case NE_EXPR: val = (cmp != 0); break;
4690 case LT_EXPR: val = (cmp < 0); break;
4691 case GT_EXPR: val = (cmp > 0); break;
4692 case LE_EXPR: val = (cmp <= 0); break;
4693 case GE_EXPR: val = (cmp >= 0); break;
4694 default: done = false;
4695 }
4696 if (done)
4697 {
4698 if (val)
4699 return boolean_true_node;
4700 else
4701 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4702 }
4703 }
4704 /* Likewise if the second comparison is a != comparison. */
4705 else if (code2 == NE_EXPR)
4706 {
4707 bool done = true;
4708 bool val;
4709 switch (code1)
4710 {
4711 case EQ_EXPR: val = (cmp == 0); break;
4712 case NE_EXPR: val = (cmp != 0); break;
4713 case LT_EXPR: val = (cmp > 0); break;
4714 case GT_EXPR: val = (cmp < 0); break;
4715 case LE_EXPR: val = (cmp >= 0); break;
4716 case GE_EXPR: val = (cmp <= 0); break;
4717 default: done = false;
4718 }
4719 if (done)
4720 {
4721 if (val)
4722 return boolean_true_node;
4723 else
4724 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4725 }
4726 }
4727
4728 /* See if an equality test is redundant with the other comparison. */
4729 else if (code1 == EQ_EXPR)
4730 {
4731 bool val;
4732 switch (code2)
4733 {
4734 case EQ_EXPR: val = (cmp == 0); break;
4735 case NE_EXPR: val = (cmp != 0); break;
4736 case LT_EXPR: val = (cmp < 0); break;
4737 case GT_EXPR: val = (cmp > 0); break;
4738 case LE_EXPR: val = (cmp <= 0); break;
4739 case GE_EXPR: val = (cmp >= 0); break;
4740 default:
4741 val = false;
4742 }
4743 if (val)
4744 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4745 }
4746 else if (code2 == EQ_EXPR)
4747 {
4748 bool val;
4749 switch (code1)
4750 {
4751 case EQ_EXPR: val = (cmp == 0); break;
4752 case NE_EXPR: val = (cmp != 0); break;
4753 case LT_EXPR: val = (cmp > 0); break;
4754 case GT_EXPR: val = (cmp < 0); break;
4755 case LE_EXPR: val = (cmp >= 0); break;
4756 case GE_EXPR: val = (cmp <= 0); break;
4757 default:
4758 val = false;
4759 }
4760 if (val)
4761 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4762 }
4763
4764 /* Chose the less restrictive of two < or <= comparisons. */
4765 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4766 && (code2 == LT_EXPR || code2 == LE_EXPR))
4767 {
4768 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4769 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4770 else
4771 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4772 }
4773
4774 /* Likewise chose the less restrictive of two > or >= comparisons. */
4775 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4776 && (code2 == GT_EXPR || code2 == GE_EXPR))
4777 {
4778 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4779 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4780 else
4781 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4782 }
4783
4784 /* Check for singleton ranges. */
4785 else if (cmp == 0
4786 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4787 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4788 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4789
4790 /* Check for less/greater pairs that don't restrict the range at all. */
4791 else if (cmp >= 0
4792 && (code1 == LT_EXPR || code1 == LE_EXPR)
4793 && (code2 == GT_EXPR || code2 == GE_EXPR))
4794 return boolean_true_node;
4795 else if (cmp <= 0
4796 && (code1 == GT_EXPR || code1 == GE_EXPR)
4797 && (code2 == LT_EXPR || code2 == LE_EXPR))
4798 return boolean_true_node;
4799 }
4800
4801 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4802 NAME's definition is a truth value. See if there are any simplifications
4803 that can be done against the NAME's definition. */
4804 if (TREE_CODE (op1a) == SSA_NAME
4805 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4806 && (integer_zerop (op1b) || integer_onep (op1b)))
4807 {
4808 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4809 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 4810 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
4811 switch (gimple_code (stmt))
4812 {
4813 case GIMPLE_ASSIGN:
4814 /* Try to simplify by copy-propagating the definition. */
4815 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4816
4817 case GIMPLE_PHI:
4818 /* If every argument to the PHI produces the same result when
4819 ORed with the second comparison, we win.
4820 Do not do this unless the type is bool since we need a bool
4821 result here anyway. */
4822 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4823 {
4824 tree result = NULL_TREE;
4825 unsigned i;
4826 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4827 {
4828 tree arg = gimple_phi_arg_def (stmt, i);
4829
4830 /* If this PHI has itself as an argument, ignore it.
4831 If all the other args produce the same result,
4832 we're still OK. */
4833 if (arg == gimple_phi_result (stmt))
4834 continue;
4835 else if (TREE_CODE (arg) == INTEGER_CST)
4836 {
4837 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4838 {
4839 if (!result)
4840 result = boolean_true_node;
4841 else if (!integer_onep (result))
4842 return NULL_TREE;
4843 }
4844 else if (!result)
4845 result = fold_build2 (code2, boolean_type_node,
4846 op2a, op2b);
4847 else if (!same_bool_comparison_p (result,
4848 code2, op2a, op2b))
4849 return NULL_TREE;
4850 }
0e8b84ec
JJ
4851 else if (TREE_CODE (arg) == SSA_NAME
4852 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4853 {
6c66f733 4854 tree temp;
355fe088 4855 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
4856 /* In simple cases we can look through PHI nodes,
4857 but we have to be careful with loops.
4858 See PR49073. */
4859 if (! dom_info_available_p (CDI_DOMINATORS)
4860 || gimple_bb (def_stmt) == gimple_bb (stmt)
4861 || dominated_by_p (CDI_DOMINATORS,
4862 gimple_bb (def_stmt),
4863 gimple_bb (stmt)))
4864 return NULL_TREE;
4865 temp = or_var_with_comparison (arg, invert, code2,
4866 op2a, op2b);
e89065a1
SL
4867 if (!temp)
4868 return NULL_TREE;
4869 else if (!result)
4870 result = temp;
4871 else if (!same_bool_result_p (result, temp))
4872 return NULL_TREE;
4873 }
4874 else
4875 return NULL_TREE;
4876 }
4877 return result;
4878 }
4879
4880 default:
4881 break;
4882 }
4883 }
4884 return NULL_TREE;
4885}
4886
4887/* Try to simplify the OR of two comparisons, specified by
4888 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4889 If this can be simplified to a single expression (without requiring
4890 introducing more SSA variables to hold intermediate values),
4891 return the resulting tree. Otherwise return NULL_TREE.
4892 If the result expression is non-null, it has boolean type. */
4893
4894tree
4895maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4896 enum tree_code code2, tree op2a, tree op2b)
4897{
4898 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4899 if (t)
4900 return t;
4901 else
4902 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4903}
cfef45c8
RG
4904
4905
4906/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4907
4908 Either NULL_TREE, a simplified but non-constant or a constant
4909 is returned.
4910
4911 ??? This should go into a gimple-fold-inline.h file to be eventually
4912 privatized with the single valueize function used in the various TUs
4913 to avoid the indirect function call overhead. */
4914
4915tree
355fe088 4916gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 4917 tree (*gvalueize) (tree))
cfef45c8 4918{
45cc9f96
RB
4919 code_helper rcode;
4920 tree ops[3] = {};
4921 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4922 edges if there are intermediate VARYING defs. For this reason
4923 do not follow SSA edges here even though SCCVN can technically
4924 just deal fine with that. */
34050b6b 4925 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
45cc9f96 4926 {
34050b6b
RB
4927 tree res = NULL_TREE;
4928 if (rcode.is_tree_code ()
4929 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4930 || ((tree_code) rcode) == ADDR_EXPR)
4931 && is_gimple_val (ops[0]))
4932 res = ops[0];
4933 else if (mprts_hook)
4934 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
4935 if (res)
45cc9f96 4936 {
34050b6b
RB
4937 if (dump_file && dump_flags & TDF_DETAILS)
4938 {
4939 fprintf (dump_file, "Match-and-simplified ");
4940 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4941 fprintf (dump_file, " to ");
4942 print_generic_expr (dump_file, res, 0);
4943 fprintf (dump_file, "\n");
4944 }
4945 return res;
45cc9f96 4946 }
45cc9f96
RB
4947 }
4948
cfef45c8
RG
4949 location_t loc = gimple_location (stmt);
4950 switch (gimple_code (stmt))
4951 {
4952 case GIMPLE_ASSIGN:
4953 {
4954 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4955
4956 switch (get_gimple_rhs_class (subcode))
4957 {
4958 case GIMPLE_SINGLE_RHS:
4959 {
4960 tree rhs = gimple_assign_rhs1 (stmt);
4961 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4962
4963 if (TREE_CODE (rhs) == SSA_NAME)
4964 {
4965 /* If the RHS is an SSA_NAME, return its known constant value,
4966 if any. */
4967 return (*valueize) (rhs);
4968 }
4969 /* Handle propagating invariant addresses into address
4970 operations. */
4971 else if (TREE_CODE (rhs) == ADDR_EXPR
4972 && !is_gimple_min_invariant (rhs))
4973 {
d25c4172 4974 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4975 tree base;
4976 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4977 &offset,
4978 valueize);
4979 if (base
4980 && (CONSTANT_CLASS_P (base)
4981 || decl_address_invariant_p (base)))
4982 return build_invariant_address (TREE_TYPE (rhs),
4983 base, offset);
4984 }
4985 else if (TREE_CODE (rhs) == CONSTRUCTOR
4986 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4987 && (CONSTRUCTOR_NELTS (rhs)
4988 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4989 {
4990 unsigned i;
d2a12ae7 4991 tree val, *vec;
cfef45c8 4992
d2a12ae7
RG
4993 vec = XALLOCAVEC (tree,
4994 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4995 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4996 {
4997 val = (*valueize) (val);
4998 if (TREE_CODE (val) == INTEGER_CST
4999 || TREE_CODE (val) == REAL_CST
5000 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 5001 vec[i] = val;
cfef45c8
RG
5002 else
5003 return NULL_TREE;
5004 }
5005
d2a12ae7 5006 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 5007 }
bdf37f7a
JH
5008 if (subcode == OBJ_TYPE_REF)
5009 {
5010 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
5011 /* If callee is constant, we can fold away the wrapper. */
5012 if (is_gimple_min_invariant (val))
5013 return val;
5014 }
cfef45c8
RG
5015
5016 if (kind == tcc_reference)
5017 {
5018 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5019 || TREE_CODE (rhs) == REALPART_EXPR
5020 || TREE_CODE (rhs) == IMAGPART_EXPR)
5021 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5022 {
5023 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5024 return fold_unary_loc (EXPR_LOCATION (rhs),
5025 TREE_CODE (rhs),
5026 TREE_TYPE (rhs), val);
5027 }
5028 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5029 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5030 {
5031 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5032 return fold_ternary_loc (EXPR_LOCATION (rhs),
5033 TREE_CODE (rhs),
5034 TREE_TYPE (rhs), val,
5035 TREE_OPERAND (rhs, 1),
5036 TREE_OPERAND (rhs, 2));
5037 }
5038 else if (TREE_CODE (rhs) == MEM_REF
5039 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5040 {
5041 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5042 if (TREE_CODE (val) == ADDR_EXPR
5043 && is_gimple_min_invariant (val))
5044 {
5045 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5046 unshare_expr (val),
5047 TREE_OPERAND (rhs, 1));
5048 if (tem)
5049 rhs = tem;
5050 }
5051 }
5052 return fold_const_aggregate_ref_1 (rhs, valueize);
5053 }
5054 else if (kind == tcc_declaration)
5055 return get_symbol_constant_value (rhs);
5056 return rhs;
5057 }
5058
5059 case GIMPLE_UNARY_RHS:
f3582e54 5060 return NULL_TREE;
cfef45c8
RG
5061
5062 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
5063 /* Translate &x + CST into an invariant form suitable for
5064 further propagation. */
5065 if (subcode == POINTER_PLUS_EXPR)
5066 {
4b1b9e64
RB
5067 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5068 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
5069 if (TREE_CODE (op0) == ADDR_EXPR
5070 && TREE_CODE (op1) == INTEGER_CST)
5071 {
5072 tree off = fold_convert (ptr_type_node, op1);
5073 return build_fold_addr_expr_loc
5074 (loc,
5075 fold_build2 (MEM_REF,
5076 TREE_TYPE (TREE_TYPE (op0)),
5077 unshare_expr (op0), off));
5078 }
5079 }
59c20dc7
RB
5080 /* Canonicalize bool != 0 and bool == 0 appearing after
5081 valueization. While gimple_simplify handles this
5082 it can get confused by the ~X == 1 -> X == 0 transform
5083 which we cant reduce to a SSA name or a constant
5084 (and we have no way to tell gimple_simplify to not
5085 consider those transforms in the first place). */
5086 else if (subcode == EQ_EXPR
5087 || subcode == NE_EXPR)
5088 {
5089 tree lhs = gimple_assign_lhs (stmt);
5090 tree op0 = gimple_assign_rhs1 (stmt);
5091 if (useless_type_conversion_p (TREE_TYPE (lhs),
5092 TREE_TYPE (op0)))
5093 {
5094 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5095 op0 = (*valueize) (op0);
8861704d
RB
5096 if (TREE_CODE (op0) == INTEGER_CST)
5097 std::swap (op0, op1);
5098 if (TREE_CODE (op1) == INTEGER_CST
5099 && ((subcode == NE_EXPR && integer_zerop (op1))
5100 || (subcode == EQ_EXPR && integer_onep (op1))))
5101 return op0;
59c20dc7
RB
5102 }
5103 }
4b1b9e64 5104 return NULL_TREE;
cfef45c8
RG
5105
5106 case GIMPLE_TERNARY_RHS:
5107 {
5108 /* Handle ternary operators that can appear in GIMPLE form. */
5109 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5110 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5111 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
5112 return fold_ternary_loc (loc, subcode,
5113 gimple_expr_type (stmt), op0, op1, op2);
5114 }
5115
5116 default:
5117 gcc_unreachable ();
5118 }
5119 }
5120
5121 case GIMPLE_CALL:
5122 {
25583c4f 5123 tree fn;
538dd0b7 5124 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5125
5126 if (gimple_call_internal_p (stmt))
31e071ae
MP
5127 {
5128 enum tree_code subcode = ERROR_MARK;
5129 switch (gimple_call_internal_fn (stmt))
5130 {
5131 case IFN_UBSAN_CHECK_ADD:
5132 subcode = PLUS_EXPR;
5133 break;
5134 case IFN_UBSAN_CHECK_SUB:
5135 subcode = MINUS_EXPR;
5136 break;
5137 case IFN_UBSAN_CHECK_MUL:
5138 subcode = MULT_EXPR;
5139 break;
5140 default:
5141 return NULL_TREE;
5142 }
368b454d
JJ
5143 tree arg0 = gimple_call_arg (stmt, 0);
5144 tree arg1 = gimple_call_arg (stmt, 1);
5145 tree op0 = (*valueize) (arg0);
5146 tree op1 = (*valueize) (arg1);
31e071ae
MP
5147
5148 if (TREE_CODE (op0) != INTEGER_CST
5149 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5150 {
5151 switch (subcode)
5152 {
5153 case MULT_EXPR:
5154 /* x * 0 = 0 * x = 0 without overflow. */
5155 if (integer_zerop (op0) || integer_zerop (op1))
5156 return build_zero_cst (TREE_TYPE (arg0));
5157 break;
5158 case MINUS_EXPR:
5159 /* y - y = 0 without overflow. */
5160 if (operand_equal_p (op0, op1, 0))
5161 return build_zero_cst (TREE_TYPE (arg0));
5162 break;
5163 default:
5164 break;
5165 }
5166 }
5167 tree res
5168 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5169 if (res
5170 && TREE_CODE (res) == INTEGER_CST
5171 && !TREE_OVERFLOW (res))
5172 return res;
5173 return NULL_TREE;
5174 }
25583c4f
RS
5175
5176 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5177 if (TREE_CODE (fn) == ADDR_EXPR
5178 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5179 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5180 && gimple_builtin_call_types_compatible_p (stmt,
5181 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5182 {
5183 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5184 tree retval;
cfef45c8
RG
5185 unsigned i;
5186 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5187 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5188 retval = fold_builtin_call_array (loc,
538dd0b7 5189 gimple_call_return_type (call_stmt),
cfef45c8 5190 fn, gimple_call_num_args (stmt), args);
cfef45c8 5191 if (retval)
5c944c6c
RB
5192 {
5193 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5194 STRIP_NOPS (retval);
538dd0b7
DM
5195 retval = fold_convert (gimple_call_return_type (call_stmt),
5196 retval);
5c944c6c 5197 }
cfef45c8
RG
5198 return retval;
5199 }
5200 return NULL_TREE;
5201 }
5202
5203 default:
5204 return NULL_TREE;
5205 }
5206}
5207
5208/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5209 Returns NULL_TREE if folding to a constant is not possible, otherwise
5210 returns a constant according to is_gimple_min_invariant. */
5211
5212tree
355fe088 5213gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
5214{
5215 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5216 if (res && is_gimple_min_invariant (res))
5217 return res;
5218 return NULL_TREE;
5219}
5220
5221
5222/* The following set of functions are supposed to fold references using
5223 their constant initializers. */
5224
cfef45c8
RG
5225/* See if we can find constructor defining value of BASE.
5226 When we know the consructor with constant offset (such as
5227 base is array[40] and we do know constructor of array), then
5228 BIT_OFFSET is adjusted accordingly.
5229
5230 As a special case, return error_mark_node when constructor
5231 is not explicitly available, but it is known to be zero
5232 such as 'static const int a;'. */
5233static tree
5234get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5235 tree (*valueize)(tree))
5236{
5237 HOST_WIDE_INT bit_offset2, size, max_size;
5238 if (TREE_CODE (base) == MEM_REF)
5239 {
5240 if (!integer_zerop (TREE_OPERAND (base, 1)))
5241 {
9541ffee 5242 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5243 return NULL_TREE;
807e902e 5244 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5245 * BITS_PER_UNIT);
5246 }
5247
5248 if (valueize
5249 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5250 base = valueize (TREE_OPERAND (base, 0));
5251 if (!base || TREE_CODE (base) != ADDR_EXPR)
5252 return NULL_TREE;
5253 base = TREE_OPERAND (base, 0);
5254 }
5255
5256 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5257 DECL_INITIAL. If BASE is a nested reference into another
5258 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5259 the inner reference. */
5260 switch (TREE_CODE (base))
5261 {
5262 case VAR_DECL:
cfef45c8 5263 case CONST_DECL:
6a6dac52
JH
5264 {
5265 tree init = ctor_for_folding (base);
5266
688010ba 5267 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5268 NULL means unknown, while error_mark_node is 0. */
5269 if (init == error_mark_node)
5270 return NULL_TREE;
5271 if (!init)
5272 return error_mark_node;
5273 return init;
5274 }
cfef45c8
RG
5275
5276 case ARRAY_REF:
5277 case COMPONENT_REF:
5278 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5279 if (max_size == -1 || size != max_size)
5280 return NULL_TREE;
5281 *bit_offset += bit_offset2;
5282 return get_base_constructor (base, bit_offset, valueize);
5283
5284 case STRING_CST:
5285 case CONSTRUCTOR:
5286 return base;
5287
5288 default:
5289 return NULL_TREE;
5290 }
5291}
5292
cfef45c8
RG
5293/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5294 SIZE to the memory at bit OFFSET. */
5295
5296static tree
5297fold_array_ctor_reference (tree type, tree ctor,
5298 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5299 unsigned HOST_WIDE_INT size,
5300 tree from_decl)
cfef45c8
RG
5301{
5302 unsigned HOST_WIDE_INT cnt;
5303 tree cfield, cval;
807e902e
KZ
5304 offset_int low_bound;
5305 offset_int elt_size;
5306 offset_int index, max_index;
5307 offset_int access_index;
b48e22b2 5308 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
5309 HOST_WIDE_INT inner_offset;
5310
5311 /* Compute low bound and elt size. */
eb8f1123
RG
5312 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5313 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5314 if (domain_type && TYPE_MIN_VALUE (domain_type))
5315 {
5316 /* Static constructors for variably sized objects makes no sense. */
5317 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 5318 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 5319 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5320 }
5321 else
807e902e 5322 low_bound = 0;
cfef45c8 5323 /* Static constructors for variably sized objects makes no sense. */
c3284718 5324 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5325 == INTEGER_CST);
807e902e 5326 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5327
5328 /* We can handle only constantly sized accesses that are known to not
5329 be larger than size of array element. */
5330 if (!TYPE_SIZE_UNIT (type)
5331 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5332 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5333 || elt_size == 0)
cfef45c8
RG
5334 return NULL_TREE;
5335
5336 /* Compute the array index we look for. */
807e902e
KZ
5337 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5338 elt_size);
27bcd47c 5339 access_index += low_bound;
b48e22b2 5340 if (index_type)
807e902e
KZ
5341 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5342 TYPE_SIGN (index_type));
cfef45c8
RG
5343
5344 /* And offset within the access. */
27bcd47c 5345 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5346
5347 /* See if the array field is large enough to span whole access. We do not
5348 care to fold accesses spanning multiple array indexes. */
27bcd47c 5349 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
5350 return NULL_TREE;
5351
807e902e 5352 index = low_bound - 1;
b48e22b2 5353 if (index_type)
807e902e
KZ
5354 index = wi::ext (index, TYPE_PRECISION (index_type),
5355 TYPE_SIGN (index_type));
b48e22b2 5356
cfef45c8
RG
5357 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5358 {
5359 /* Array constructor might explicitely set index, or specify range
5360 or leave index NULL meaning that it is next index after previous
5361 one. */
5362 if (cfield)
5363 {
5364 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 5365 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
5366 else
5367 {
5368 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
5369 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5370 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
5371 }
5372 }
5373 else
b48e22b2 5374 {
807e902e 5375 index += 1;
b48e22b2 5376 if (index_type)
807e902e
KZ
5377 index = wi::ext (index, TYPE_PRECISION (index_type),
5378 TYPE_SIGN (index_type));
b48e22b2
EB
5379 max_index = index;
5380 }
cfef45c8
RG
5381
5382 /* Do we have match? */
807e902e
KZ
5383 if (wi::cmpu (access_index, index) >= 0
5384 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
5385 return fold_ctor_reference (type, cval, inner_offset, size,
5386 from_decl);
cfef45c8
RG
5387 }
5388 /* When memory is not explicitely mentioned in constructor,
5389 it is 0 (or out of range). */
5390 return build_zero_cst (type);
5391}
5392
5393/* CTOR is CONSTRUCTOR of an aggregate or vector.
5394 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5395
5396static tree
5397fold_nonarray_ctor_reference (tree type, tree ctor,
5398 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5399 unsigned HOST_WIDE_INT size,
5400 tree from_decl)
cfef45c8
RG
5401{
5402 unsigned HOST_WIDE_INT cnt;
5403 tree cfield, cval;
5404
5405 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5406 cval)
5407 {
5408 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5409 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5410 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5411 offset_int bitoffset;
5412 offset_int bitoffset_end, access_end;
cfef45c8
RG
5413
5414 /* Variable sized objects in static constructors makes no sense,
5415 but field_size can be NULL for flexible array members. */
5416 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5417 && TREE_CODE (byte_offset) == INTEGER_CST
5418 && (field_size != NULL_TREE
5419 ? TREE_CODE (field_size) == INTEGER_CST
5420 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5421
5422 /* Compute bit offset of the field. */
807e902e
KZ
5423 bitoffset = (wi::to_offset (field_offset)
5424 + wi::lshift (wi::to_offset (byte_offset),
5425 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5426 /* Compute bit offset where the field ends. */
5427 if (field_size != NULL_TREE)
807e902e 5428 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5429 else
807e902e 5430 bitoffset_end = 0;
cfef45c8 5431
807e902e 5432 access_end = offset_int (offset) + size;
b8b2b009
JJ
5433
5434 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5435 [BITOFFSET, BITOFFSET_END)? */
807e902e 5436 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5437 && (field_size == NULL_TREE
807e902e 5438 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5439 {
807e902e 5440 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5441 /* We do have overlap. Now see if field is large enough to
5442 cover the access. Give up for accesses spanning multiple
5443 fields. */
807e902e 5444 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5445 return NULL_TREE;
807e902e 5446 if (wi::lts_p (offset, bitoffset))
b8b2b009 5447 return NULL_TREE;
cfef45c8 5448 return fold_ctor_reference (type, cval,
27bcd47c 5449 inner_offset.to_uhwi (), size,
c44c2088 5450 from_decl);
cfef45c8
RG
5451 }
5452 }
5453 /* When memory is not explicitely mentioned in constructor, it is 0. */
5454 return build_zero_cst (type);
5455}
5456
5457/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5458 to the memory at bit OFFSET. */
5459
8403c2cf 5460tree
cfef45c8 5461fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5462 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5463{
5464 tree ret;
5465
5466 /* We found the field with exact match. */
5467 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5468 && !offset)
9d60be38 5469 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5470
5471 /* We are at the end of walk, see if we can view convert the
5472 result. */
5473 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5474 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5475 && !compare_tree_int (TYPE_SIZE (type), size)
5476 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5477 {
9d60be38 5478 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5479 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5480 if (ret)
86c5a5c3 5481 STRIP_USELESS_TYPE_CONVERSION (ret);
cfef45c8
RG
5482 return ret;
5483 }
b2505143
RB
5484 /* For constants and byte-aligned/sized reads try to go through
5485 native_encode/interpret. */
5486 if (CONSTANT_CLASS_P (ctor)
5487 && BITS_PER_UNIT == 8
5488 && offset % BITS_PER_UNIT == 0
5489 && size % BITS_PER_UNIT == 0
5490 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5491 {
5492 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5493 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5494 offset / BITS_PER_UNIT) > 0)
5495 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5496 }
cfef45c8
RG
5497 if (TREE_CODE (ctor) == CONSTRUCTOR)
5498 {
5499
eb8f1123
RG
5500 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5501 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5502 return fold_array_ctor_reference (type, ctor, offset, size,
5503 from_decl);
cfef45c8 5504 else
c44c2088
JH
5505 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5506 from_decl);
cfef45c8
RG
5507 }
5508
5509 return NULL_TREE;
5510}
5511
5512/* Return the tree representing the element referenced by T if T is an
5513 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5514 names using VALUEIZE. Return NULL_TREE otherwise. */
5515
5516tree
5517fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5518{
5519 tree ctor, idx, base;
5520 HOST_WIDE_INT offset, size, max_size;
5521 tree tem;
5522
f8a7df45
RG
5523 if (TREE_THIS_VOLATILE (t))
5524 return NULL_TREE;
5525
3a65ee74 5526 if (DECL_P (t))
cfef45c8
RG
5527 return get_symbol_constant_value (t);
5528
5529 tem = fold_read_from_constant_string (t);
5530 if (tem)
5531 return tem;
5532
5533 switch (TREE_CODE (t))
5534 {
5535 case ARRAY_REF:
5536 case ARRAY_RANGE_REF:
5537 /* Constant indexes are handled well by get_base_constructor.
5538 Only special case variable offsets.
5539 FIXME: This code can't handle nested references with variable indexes
5540 (they will be handled only by iteration of ccp). Perhaps we can bring
5541 get_ref_base_and_extent here and make it use a valueize callback. */
5542 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5543 && valueize
5544 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5545 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5546 {
5547 tree low_bound, unit_size;
5548
5549 /* If the resulting bit-offset is constant, track it. */
5550 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5551 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5552 && (unit_size = array_ref_element_size (t),
807e902e 5553 tree_fits_uhwi_p (unit_size)))
cfef45c8 5554 {
807e902e
KZ
5555 offset_int woffset
5556 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5557 TYPE_PRECISION (TREE_TYPE (idx)));
5558
5559 if (wi::fits_shwi_p (woffset))
5560 {
5561 offset = woffset.to_shwi ();
5562 /* TODO: This code seems wrong, multiply then check
5563 to see if it fits. */
5564 offset *= tree_to_uhwi (unit_size);
5565 offset *= BITS_PER_UNIT;
5566
5567 base = TREE_OPERAND (t, 0);
5568 ctor = get_base_constructor (base, &offset, valueize);
5569 /* Empty constructor. Always fold to 0. */
5570 if (ctor == error_mark_node)
5571 return build_zero_cst (TREE_TYPE (t));
5572 /* Out of bound array access. Value is undefined,
5573 but don't fold. */
5574 if (offset < 0)
5575 return NULL_TREE;
5576 /* We can not determine ctor. */
5577 if (!ctor)
5578 return NULL_TREE;
5579 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5580 tree_to_uhwi (unit_size)
5581 * BITS_PER_UNIT,
5582 base);
5583 }
cfef45c8
RG
5584 }
5585 }
5586 /* Fallthru. */
5587
5588 case COMPONENT_REF:
5589 case BIT_FIELD_REF:
5590 case TARGET_MEM_REF:
5591 case MEM_REF:
5592 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5593 ctor = get_base_constructor (base, &offset, valueize);
5594
5595 /* Empty constructor. Always fold to 0. */
5596 if (ctor == error_mark_node)
5597 return build_zero_cst (TREE_TYPE (t));
5598 /* We do not know precise address. */
5599 if (max_size == -1 || max_size != size)
5600 return NULL_TREE;
5601 /* We can not determine ctor. */
5602 if (!ctor)
5603 return NULL_TREE;
5604
5605 /* Out of bound array access. Value is undefined, but don't fold. */
5606 if (offset < 0)
5607 return NULL_TREE;
5608
c44c2088
JH
5609 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5610 base);
cfef45c8
RG
5611
5612 case REALPART_EXPR:
5613 case IMAGPART_EXPR:
5614 {
5615 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5616 if (c && TREE_CODE (c) == COMPLEX_CST)
5617 return fold_build1_loc (EXPR_LOCATION (t),
5618 TREE_CODE (t), TREE_TYPE (t), c);
5619 break;
5620 }
5621
5622 default:
5623 break;
5624 }
5625
5626 return NULL_TREE;
5627}
5628
5629tree
5630fold_const_aggregate_ref (tree t)
5631{
5632 return fold_const_aggregate_ref_1 (t, NULL);
5633}
06bc3ec7 5634
85942f45 5635/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5636 at OFFSET.
5637 Set CAN_REFER if non-NULL to false if method
5638 is not referable or if the virtual table is ill-formed (such as rewriten
5639 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5640
5641tree
85942f45
JH
5642gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5643 tree v,
ec77d61f
JH
5644 unsigned HOST_WIDE_INT offset,
5645 bool *can_refer)
81fa35bd 5646{
85942f45
JH
5647 tree vtable = v, init, fn;
5648 unsigned HOST_WIDE_INT size;
8c311b50
JH
5649 unsigned HOST_WIDE_INT elt_size, access_index;
5650 tree domain_type;
81fa35bd 5651
ec77d61f
JH
5652 if (can_refer)
5653 *can_refer = true;
5654
9de2f554 5655 /* First of all double check we have virtual table. */
81fa35bd 5656 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5657 || !DECL_VIRTUAL_P (v))
ec77d61f 5658 {
ec77d61f
JH
5659 /* Pass down that we lost track of the target. */
5660 if (can_refer)
5661 *can_refer = false;
5662 return NULL_TREE;
5663 }
9de2f554 5664
2aa3da06
JH
5665 init = ctor_for_folding (v);
5666
9de2f554 5667 /* The virtual tables should always be born with constructors
2aa3da06
JH
5668 and we always should assume that they are avaialble for
5669 folding. At the moment we do not stream them in all cases,
5670 but it should never happen that ctor seem unreachable. */
5671 gcc_assert (init);
5672 if (init == error_mark_node)
5673 {
5674 gcc_assert (in_lto_p);
ec77d61f
JH
5675 /* Pass down that we lost track of the target. */
5676 if (can_refer)
5677 *can_refer = false;
2aa3da06
JH
5678 return NULL_TREE;
5679 }
81fa35bd 5680 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5681 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5682 offset *= BITS_PER_UNIT;
81fa35bd 5683 offset += token * size;
9de2f554 5684
8c311b50
JH
5685 /* Lookup the value in the constructor that is assumed to be array.
5686 This is equivalent to
5687 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5688 offset, size, NULL);
5689 but in a constant time. We expect that frontend produced a simple
5690 array without indexed initializers. */
5691
5692 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5693 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5694 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5695 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5696
5697 access_index = offset / BITS_PER_UNIT / elt_size;
5698 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5699
5700 /* This code makes an assumption that there are no
5701 indexed fileds produced by C++ FE, so we can directly index the array. */
5702 if (access_index < CONSTRUCTOR_NELTS (init))
5703 {
5704 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5705 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5706 STRIP_NOPS (fn);
5707 }
5708 else
5709 fn = NULL;
9de2f554
JH
5710
5711 /* For type inconsistent program we may end up looking up virtual method
5712 in virtual table that does not contain TOKEN entries. We may overrun
5713 the virtual table and pick up a constant or RTTI info pointer.
5714 In any case the call is undefined. */
5715 if (!fn
5716 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5717 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5718 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5719 else
5720 {
5721 fn = TREE_OPERAND (fn, 0);
5722
5723 /* When cgraph node is missing and function is not public, we cannot
5724 devirtualize. This can happen in WHOPR when the actual method
5725 ends up in other partition, because we found devirtualization
5726 possibility too late. */
5727 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5728 {
5729 if (can_refer)
5730 {
5731 *can_refer = false;
5732 return fn;
5733 }
5734 return NULL_TREE;
5735 }
9de2f554 5736 }
81fa35bd 5737
7501ca28
RG
5738 /* Make sure we create a cgraph node for functions we'll reference.
5739 They can be non-existent if the reference comes from an entry
5740 of an external vtable for example. */
d52f5295 5741 cgraph_node::get_create (fn);
7501ca28 5742
81fa35bd
MJ
5743 return fn;
5744}
5745
85942f45
JH
5746/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5747 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5748 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5749 OBJ_TYPE_REF_OBJECT(REF).
5750 Set CAN_REFER if non-NULL to false if method
5751 is not referable or if the virtual table is ill-formed (such as rewriten
5752 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5753
5754tree
ec77d61f
JH
5755gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5756 bool *can_refer)
85942f45
JH
5757{
5758 unsigned HOST_WIDE_INT offset;
5759 tree v;
5760
5761 v = BINFO_VTABLE (known_binfo);
5762 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5763 if (!v)
5764 return NULL_TREE;
5765
5766 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5767 {
5768 if (can_refer)
5769 *can_refer = false;
5770 return NULL_TREE;
5771 }
5772 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5773}
5774
06bc3ec7
BS
5775/* Return true iff VAL is a gimple expression that is known to be
5776 non-negative. Restricted to floating-point inputs. */
5777
5778bool
5779gimple_val_nonnegative_real_p (tree val)
5780{
355fe088 5781 gimple *def_stmt;
06bc3ec7
BS
5782
5783 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5784
5785 /* Use existing logic for non-gimple trees. */
5786 if (tree_expr_nonnegative_p (val))
5787 return true;
5788
5789 if (TREE_CODE (val) != SSA_NAME)
5790 return false;
5791
5792 /* Currently we look only at the immediately defining statement
5793 to make this determination, since recursion on defining
5794 statements of operands can lead to quadratic behavior in the
5795 worst case. This is expected to catch almost all occurrences
5796 in practice. It would be possible to implement limited-depth
5797 recursion if important cases are lost. Alternatively, passes
5798 that need this information (such as the pow/powi lowering code
5799 in the cse_sincos pass) could be revised to provide it through
5800 dataflow propagation. */
5801
5802 def_stmt = SSA_NAME_DEF_STMT (val);
5803
5804 if (is_gimple_assign (def_stmt))
5805 {
5806 tree op0, op1;
5807
5808 /* See fold-const.c:tree_expr_nonnegative_p for additional
5809 cases that could be handled with recursion. */
5810
5811 switch (gimple_assign_rhs_code (def_stmt))
5812 {
5813 case ABS_EXPR:
5814 /* Always true for floating-point operands. */
5815 return true;
5816
5817 case MULT_EXPR:
5818 /* True if the two operands are identical (since we are
5819 restricted to floating-point inputs). */
5820 op0 = gimple_assign_rhs1 (def_stmt);
5821 op1 = gimple_assign_rhs2 (def_stmt);
5822
5823 if (op0 == op1
5824 || operand_equal_p (op0, op1, 0))
5825 return true;
5826
5827 default:
5828 return false;
5829 }
5830 }
5831 else if (is_gimple_call (def_stmt))
5832 {
5833 tree fndecl = gimple_call_fndecl (def_stmt);
5834 if (fndecl
5835 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5836 {
5837 tree arg1;
5838
5839 switch (DECL_FUNCTION_CODE (fndecl))
5840 {
5841 CASE_FLT_FN (BUILT_IN_ACOS):
5842 CASE_FLT_FN (BUILT_IN_ACOSH):
5843 CASE_FLT_FN (BUILT_IN_CABS):
5844 CASE_FLT_FN (BUILT_IN_COSH):
5845 CASE_FLT_FN (BUILT_IN_ERFC):
5846 CASE_FLT_FN (BUILT_IN_EXP):
5847 CASE_FLT_FN (BUILT_IN_EXP10):
5848 CASE_FLT_FN (BUILT_IN_EXP2):
5849 CASE_FLT_FN (BUILT_IN_FABS):
5850 CASE_FLT_FN (BUILT_IN_FDIM):
5851 CASE_FLT_FN (BUILT_IN_HYPOT):
5852 CASE_FLT_FN (BUILT_IN_POW10):
5853 return true;
5854
5855 CASE_FLT_FN (BUILT_IN_SQRT):
5856 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5857 nonnegative inputs. */
3d3dbadd 5858 if (!HONOR_SIGNED_ZEROS (val))
06bc3ec7
BS
5859 return true;
5860
5861 break;
5862
5863 CASE_FLT_FN (BUILT_IN_POWI):
5864 /* True if the second argument is an even integer. */
5865 arg1 = gimple_call_arg (def_stmt, 1);
5866
5867 if (TREE_CODE (arg1) == INTEGER_CST
5868 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5869 return true;
5870
5871 break;
5872
5873 CASE_FLT_FN (BUILT_IN_POW):
5874 /* True if the second argument is an even integer-valued
5875 real. */
5876 arg1 = gimple_call_arg (def_stmt, 1);
5877
5878 if (TREE_CODE (arg1) == REAL_CST)
5879 {
5880 REAL_VALUE_TYPE c;
5881 HOST_WIDE_INT n;
5882
5883 c = TREE_REAL_CST (arg1);
5884 n = real_to_integer (&c);
5885
5886 if ((n & 1) == 0)
5887 {
5888 REAL_VALUE_TYPE cint;
807e902e 5889 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5890 if (real_identical (&c, &cint))
5891 return true;
5892 }
5893 }
5894
5895 break;
5896
5897 default:
5898 return false;
5899 }
5900 }
5901 }
5902
5903 return false;
5904}
b184c8f1
AM
5905
5906/* Given a pointer value OP0, return a simplified version of an
5907 indirection through OP0, or NULL_TREE if no simplification is
5908 possible. Note that the resulting type may be different from
5909 the type pointed to in the sense that it is still compatible
5910 from the langhooks point of view. */
5911
5912tree
5913gimple_fold_indirect_ref (tree t)
5914{
5915 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5916 tree sub = t;
5917 tree subtype;
5918
5919 STRIP_NOPS (sub);
5920 subtype = TREE_TYPE (sub);
5921 if (!POINTER_TYPE_P (subtype))
5922 return NULL_TREE;
5923
5924 if (TREE_CODE (sub) == ADDR_EXPR)
5925 {
5926 tree op = TREE_OPERAND (sub, 0);
5927 tree optype = TREE_TYPE (op);
5928 /* *&p => p */
5929 if (useless_type_conversion_p (type, optype))
5930 return op;
5931
5932 /* *(foo *)&fooarray => fooarray[0] */
5933 if (TREE_CODE (optype) == ARRAY_TYPE
5934 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5935 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5936 {
5937 tree type_domain = TYPE_DOMAIN (optype);
5938 tree min_val = size_zero_node;
5939 if (type_domain && TYPE_MIN_VALUE (type_domain))
5940 min_val = TYPE_MIN_VALUE (type_domain);
5941 if (TREE_CODE (min_val) == INTEGER_CST)
5942 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5943 }
5944 /* *(foo *)&complexfoo => __real__ complexfoo */
5945 else if (TREE_CODE (optype) == COMPLEX_TYPE
5946 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5947 return fold_build1 (REALPART_EXPR, type, op);
5948 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5949 else if (TREE_CODE (optype) == VECTOR_TYPE
5950 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5951 {
5952 tree part_width = TYPE_SIZE (type);
5953 tree index = bitsize_int (0);
5954 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5955 }
5956 }
5957
5958 /* *(p + CST) -> ... */
5959 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5960 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5961 {
5962 tree addr = TREE_OPERAND (sub, 0);
5963 tree off = TREE_OPERAND (sub, 1);
5964 tree addrtype;
5965
5966 STRIP_NOPS (addr);
5967 addrtype = TREE_TYPE (addr);
5968
5969 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5970 if (TREE_CODE (addr) == ADDR_EXPR
5971 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5972 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5973 && tree_fits_uhwi_p (off))
b184c8f1 5974 {
ae7e9ddd 5975 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5976 tree part_width = TYPE_SIZE (type);
5977 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5978 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5979 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5980 tree index = bitsize_int (indexi);
5981 if (offset / part_widthi
e934916c 5982 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5983 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5984 part_width, index);
5985 }
5986
5987 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5988 if (TREE_CODE (addr) == ADDR_EXPR
5989 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5990 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5991 {
5992 tree size = TYPE_SIZE_UNIT (type);
5993 if (tree_int_cst_equal (size, off))
5994 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5995 }
5996
5997 /* *(p + CST) -> MEM_REF <p, CST>. */
5998 if (TREE_CODE (addr) != ADDR_EXPR
5999 || DECL_P (TREE_OPERAND (addr, 0)))
6000 return fold_build2 (MEM_REF, type,
6001 addr,
807e902e 6002 wide_int_to_tree (ptype, off));
b184c8f1
AM
6003 }
6004
6005 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6006 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6007 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6008 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6009 {
6010 tree type_domain;
6011 tree min_val = size_zero_node;
6012 tree osub = sub;
6013 sub = gimple_fold_indirect_ref (sub);
6014 if (! sub)
6015 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6016 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6017 if (type_domain && TYPE_MIN_VALUE (type_domain))
6018 min_val = TYPE_MIN_VALUE (type_domain);
6019 if (TREE_CODE (min_val) == INTEGER_CST)
6020 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6021 }
6022
6023 return NULL_TREE;
6024}
19e51b40
JJ
6025
6026/* Return true if CODE is an operation that when operating on signed
6027 integer types involves undefined behavior on overflow and the
6028 operation can be expressed with unsigned arithmetic. */
6029
6030bool
6031arith_code_with_undefined_signed_overflow (tree_code code)
6032{
6033 switch (code)
6034 {
6035 case PLUS_EXPR:
6036 case MINUS_EXPR:
6037 case MULT_EXPR:
6038 case NEGATE_EXPR:
6039 case POINTER_PLUS_EXPR:
6040 return true;
6041 default:
6042 return false;
6043 }
6044}
6045
6046/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6047 operation that can be transformed to unsigned arithmetic by converting
6048 its operand, carrying out the operation in the corresponding unsigned
6049 type and converting the result back to the original type.
6050
6051 Returns a sequence of statements that replace STMT and also contain
6052 a modified form of STMT itself. */
6053
6054gimple_seq
355fe088 6055rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
6056{
6057 if (dump_file && (dump_flags & TDF_DETAILS))
6058 {
6059 fprintf (dump_file, "rewriting stmt with undefined signed "
6060 "overflow ");
6061 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6062 }
6063
6064 tree lhs = gimple_assign_lhs (stmt);
6065 tree type = unsigned_type_for (TREE_TYPE (lhs));
6066 gimple_seq stmts = NULL;
6067 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6068 {
6069 gimple_seq stmts2 = NULL;
6070 gimple_set_op (stmt, i,
6071 force_gimple_operand (fold_convert (type,
6072 gimple_op (stmt, i)),
6073 &stmts2, true, NULL_TREE));
6074 gimple_seq_add_seq (&stmts, stmts2);
6075 }
6076 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6077 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6078 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6079 gimple_seq_add_stmt (&stmts, stmt);
355fe088 6080 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6081 gimple_seq_add_stmt (&stmts, cvt);
6082
6083 return stmts;
6084}
d4f5cd5e 6085
3d2cf79f 6086
c26de36d
RB
6087/* The valueization hook we use for the gimple_build API simplification.
6088 This makes us match fold_buildN behavior by only combining with
6089 statements in the sequence(s) we are currently building. */
6090
6091static tree
6092gimple_build_valueize (tree op)
6093{
6094 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6095 return op;
6096 return NULL_TREE;
6097}
6098
3d2cf79f 6099/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 6100 simplifying it first if possible. Returns the built
3d2cf79f
RB
6101 expression value and appends statements possibly defining it
6102 to SEQ. */
6103
6104tree
6105gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6106 enum tree_code code, tree type, tree op0)
3d2cf79f 6107{
c26de36d 6108 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
6109 if (!res)
6110 {
6111 if (gimple_in_ssa_p (cfun))
b731b390 6112 res = make_ssa_name (type);
3d2cf79f 6113 else
b731b390 6114 res = create_tmp_reg (type);
355fe088 6115 gimple *stmt;
3d2cf79f
RB
6116 if (code == REALPART_EXPR
6117 || code == IMAGPART_EXPR
6118 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6119 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6120 else
0d0e4a03 6121 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6122 gimple_set_location (stmt, loc);
6123 gimple_seq_add_stmt_without_update (seq, stmt);
6124 }
6125 return res;
6126}
6127
6128/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 6129 simplifying it first if possible. Returns the built
3d2cf79f
RB
6130 expression value and appends statements possibly defining it
6131 to SEQ. */
6132
6133tree
6134gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6135 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 6136{
c26de36d 6137 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
6138 if (!res)
6139 {
6140 if (gimple_in_ssa_p (cfun))
b731b390 6141 res = make_ssa_name (type);
3d2cf79f 6142 else
b731b390 6143 res = create_tmp_reg (type);
355fe088 6144 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6145 gimple_set_location (stmt, loc);
6146 gimple_seq_add_stmt_without_update (seq, stmt);
6147 }
6148 return res;
6149}
6150
6151/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 6152 simplifying it first if possible. Returns the built
3d2cf79f
RB
6153 expression value and appends statements possibly defining it
6154 to SEQ. */
6155
6156tree
6157gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6158 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
6159{
6160 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 6161 seq, gimple_build_valueize);
3d2cf79f
RB
6162 if (!res)
6163 {
6164 if (gimple_in_ssa_p (cfun))
b731b390 6165 res = make_ssa_name (type);
3d2cf79f 6166 else
b731b390 6167 res = create_tmp_reg (type);
355fe088 6168 gimple *stmt;
3d2cf79f 6169 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6170 stmt = gimple_build_assign (res, code,
6171 build3 (code, type, op0, op1, op2));
3d2cf79f 6172 else
0d0e4a03 6173 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6174 gimple_set_location (stmt, loc);
6175 gimple_seq_add_stmt_without_update (seq, stmt);
6176 }
6177 return res;
6178}
6179
6180/* Build the call FN (ARG0) with a result of type TYPE
6181 (or no result if TYPE is void) with location LOC,
c26de36d 6182 simplifying it first if possible. Returns the built
3d2cf79f
RB
6183 expression value (or NULL_TREE if TYPE is void) and appends
6184 statements possibly defining it to SEQ. */
6185
6186tree
6187gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6188 enum built_in_function fn, tree type, tree arg0)
3d2cf79f 6189{
c26de36d 6190 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
6191 if (!res)
6192 {
6193 tree decl = builtin_decl_implicit (fn);
355fe088 6194 gimple *stmt = gimple_build_call (decl, 1, arg0);
3d2cf79f
RB
6195 if (!VOID_TYPE_P (type))
6196 {
6197 if (gimple_in_ssa_p (cfun))
b731b390 6198 res = make_ssa_name (type);
3d2cf79f 6199 else
b731b390 6200 res = create_tmp_reg (type);
3d2cf79f
RB
6201 gimple_call_set_lhs (stmt, res);
6202 }
6203 gimple_set_location (stmt, loc);
6204 gimple_seq_add_stmt_without_update (seq, stmt);
6205 }
6206 return res;
6207}
6208
6209/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6210 (or no result if TYPE is void) with location LOC,
c26de36d 6211 simplifying it first if possible. Returns the built
3d2cf79f
RB
6212 expression value (or NULL_TREE if TYPE is void) and appends
6213 statements possibly defining it to SEQ. */
6214
6215tree
6216gimple_build (gimple_seq *seq, location_t loc,
c26de36d 6217 enum built_in_function fn, tree type, tree arg0, tree arg1)
3d2cf79f 6218{
c26de36d 6219 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
6220 if (!res)
6221 {
6222 tree decl = builtin_decl_implicit (fn);
355fe088 6223 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
3d2cf79f
RB
6224 if (!VOID_TYPE_P (type))
6225 {
6226 if (gimple_in_ssa_p (cfun))
b731b390 6227 res = make_ssa_name (type);
3d2cf79f 6228 else
b731b390 6229 res = create_tmp_reg (type);
3d2cf79f
RB
6230 gimple_call_set_lhs (stmt, res);
6231 }
6232 gimple_set_location (stmt, loc);
6233 gimple_seq_add_stmt_without_update (seq, stmt);
6234 }
6235 return res;
6236}
6237
6238/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6239 (or no result if TYPE is void) with location LOC,
c26de36d 6240 simplifying it first if possible. Returns the built
3d2cf79f
RB
6241 expression value (or NULL_TREE if TYPE is void) and appends
6242 statements possibly defining it to SEQ. */
6243
6244tree
6245gimple_build (gimple_seq *seq, location_t loc,
6246 enum built_in_function fn, tree type,
c26de36d 6247 tree arg0, tree arg1, tree arg2)
3d2cf79f 6248{
c26de36d
RB
6249 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6250 seq, gimple_build_valueize);
3d2cf79f
RB
6251 if (!res)
6252 {
6253 tree decl = builtin_decl_implicit (fn);
355fe088 6254 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
3d2cf79f
RB
6255 if (!VOID_TYPE_P (type))
6256 {
6257 if (gimple_in_ssa_p (cfun))
b731b390 6258 res = make_ssa_name (type);
3d2cf79f 6259 else
b731b390 6260 res = create_tmp_reg (type);
3d2cf79f
RB
6261 gimple_call_set_lhs (stmt, res);
6262 }
6263 gimple_set_location (stmt, loc);
6264 gimple_seq_add_stmt_without_update (seq, stmt);
6265 }
6266 return res;
6267}
6268
6269/* Build the conversion (TYPE) OP with a result of type TYPE
6270 with location LOC if such conversion is neccesary in GIMPLE,
6271 simplifying it first.
6272 Returns the built expression value and appends
6273 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6274
6275tree
6276gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6277{
6278 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6279 return op;
3d2cf79f 6280 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6281}