]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Replace node->name/node->order with node->dump_name.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
8d9254fc 2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
cbdd87d4 68
598f7235
MS
69enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
598f7235
MS
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
82};
83
03c4a945
MS
84static bool
85get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 86
b3b9f3d0 87/* Return true when DECL can be referenced from current unit.
c44c2088
JH
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
1389294c 91
1389294c
JH
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
3e89949e 104 we devirtualize only during final compilation stage.
b3b9f3d0
JH
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
108
1389294c 109static bool
c44c2088 110can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 111{
2c8326a5 112 varpool_node *vnode;
1389294c 113 struct cgraph_node *node;
5e20cdc9 114 symtab_node *snode;
c44c2088 115
00de328a 116 if (DECL_ABSTRACT_P (decl))
1632a686
JH
117 return false;
118
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 121 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
122 return true;
123
d4babd37
JM
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
1632a686 127 {
d4babd37
JM
128 if (DECL_EXTERNAL (decl))
129 return false;
3aaf0529
JH
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
3dafb85c 132 if (symtab->function_flags_ready)
3aaf0529 133 return true;
d52f5295 134 snode = symtab_node::get (decl);
3aaf0529 135 if (!snode || !snode->definition)
1632a686 136 return false;
7de90a6c 137 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 138 return !node || !node->inlined_to;
1632a686
JH
139 }
140
6da8be89 141 /* We will later output the initializer, so we can refer to it.
c44c2088 142 So we are concerned only when DECL comes from initializer of
3aaf0529 143 external var or var that has been optimized out. */
c44c2088 144 if (!from_decl
8813a647 145 || !VAR_P (from_decl)
3aaf0529 146 || (!DECL_EXTERNAL (from_decl)
9041d2e6 147 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 148 && vnode->definition)
6da8be89 149 || (flag_ltrans
9041d2e6 150 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 151 && vnode->in_other_partition))
c44c2088 152 return true;
c44c2088
JH
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
a33a931b 158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 160 return false;
b3b9f3d0
JH
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
3aaf0529
JH
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
169
170 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 171 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
3dafb85c 177 if (!symtab->function_flags_ready)
b3b9f3d0 178 return true;
c44c2088 179
d52f5295 180 snode = symtab_node::get (decl);
3aaf0529
JH
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 187 return !node || !node->inlined_to;
1389294c
JH
188}
189
a15ebbcd
ML
190/* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
193
edc19e03
WS
194tree
195create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
196{
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
201}
202
0038d4e0 203/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
206
207tree
c44c2088 208canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 209{
37f808c4
RB
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
212
50619002
EB
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
315f5f1b
RG
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 217 {
315f5f1b
RG
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
17f39a39
JH
226 }
227 if (TREE_CODE (cval) == ADDR_EXPR)
228 {
5a27a197
RG
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
231 {
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
235 }
5a27a197
RG
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
238 if (!base)
239 return NULL_TREE;
b3b9f3d0 240
8813a647 241 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 242 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 243 return NULL_TREE;
13f92e8d
JJ
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
8813a647 246 if (VAR_P (base))
46eb666a 247 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
248 else if (TREE_CODE (base) == FUNCTION_DECL)
249 {
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
d52f5295 253 cgraph_node::get_create (base);
7501ca28 254 }
0038d4e0 255 /* Fixup types in global initializers. */
73aef89e
RG
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
258
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
17f39a39 262 }
37f808c4
RB
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
265 {
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
271 }
50619002 272 return orig_cval;
17f39a39 273}
cbdd87d4
RG
274
275/* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
277
278tree
279get_symbol_constant_value (tree sym)
280{
6a6dac52
JH
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
cbdd87d4 283 {
cbdd87d4
RG
284 if (val)
285 {
9d60be38 286 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 287 if (val && is_gimple_min_invariant (val))
17f39a39 288 return val;
1389294c
JH
289 else
290 return NULL_TREE;
cbdd87d4
RG
291 }
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
b8a8c472 296 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 297 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
298 }
299
300 return NULL_TREE;
301}
302
303
cbdd87d4
RG
304
305/* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
309
310static tree
311maybe_fold_reference (tree expr, bool is_lhs)
312{
17f39a39 313 tree result;
cbdd87d4 314
f0eddb90
RG
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
331
f0eddb90
RG
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
cbdd87d4 336
cbdd87d4
RG
337 return NULL_TREE;
338}
339
340
341/* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
345
346static tree
347fold_gimple_assign (gimple_stmt_iterator *si)
348{
355fe088 349 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
352
353 tree result = NULL_TREE;
354
355 switch (get_gimple_rhs_class (subcode))
356 {
357 case GIMPLE_SINGLE_RHS:
358 {
359 tree rhs = gimple_assign_rhs1 (stmt);
360
8c00ba08
JW
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
363
4e71066d 364 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
365 return maybe_fold_reference (rhs, false);
366
bdf37f7a
JH
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
368 {
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
f8a39967 372 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
373 {
374 bool final;
375 vec <cgraph_node *>targets
f8a39967 376 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 378 {
2b5f0895
XDL
379 if (dump_enabled_p ())
380 {
4f5b9c80 381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
3ef276e4 386 : "NULL");
2b5f0895 387 }
3ef276e4
RB
388 if (targets.length () == 1)
389 {
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
394 }
395 else
67914693
SL
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
3ef276e4 398 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
399 return val;
400 }
401 }
bdf37f7a 402 }
7524f419 403
cbdd87d4
RG
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
405 {
70f34814
RG
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
cbdd87d4
RG
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
418
419 if (result)
420 {
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
426
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
429 }
cbdd87d4
RG
430 }
431
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
434 {
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
438
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 440 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
441 return NULL_TREE;
442
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
445 }
446
447 else if (DECL_P (rhs))
9d60be38 448 return get_symbol_constant_value (rhs);
cbdd87d4
RG
449 }
450 break;
451
452 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
453 break;
454
455 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
456 break;
457
0354c0c7 458 case GIMPLE_TERNARY_RHS:
5c099d40
RB
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
0354c0c7
BS
464
465 if (result)
466 {
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
0354c0c7
BS
470 }
471 break;
472
cbdd87d4
RG
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
475 }
476
477 return NULL_TREE;
478}
479
fef5a0d9
RB
480
481/* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
485
486static void
487gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
488{
355fe088 489 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
490
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
493
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
355fe088 496 gimple *laststore = NULL;
fef5a0d9
RB
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
499 {
355fe088 500 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
506 {
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
516 }
517 }
518
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
524 {
355fe088 525 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
533 }
534
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
539 {
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
543 {
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
546 }
547 }
548
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
551}
552
cbdd87d4
RG
553/* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
cbdd87d4
RG
562
563void
564gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
565{
566 tree lhs;
355fe088 567 gimple *stmt, *new_stmt;
cbdd87d4 568 gimple_stmt_iterator i;
355a7673 569 gimple_seq stmts = NULL;
cbdd87d4
RG
570
571 stmt = gsi_stmt (*si_p);
572
573 gcc_assert (is_gimple_call (stmt));
574
45852dcc 575 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 576
e256dfce 577 lhs = gimple_call_lhs (stmt);
cbdd87d4 578 if (lhs == NULL_TREE)
6e572326
RG
579 {
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
584 {
9fdc58de 585 pop_gimplify_context (NULL);
6e572326
RG
586 if (gimple_in_ssa_p (cfun))
587 {
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
590 }
f6b4dc28 591 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
592 return;
593 }
594 }
cbdd87d4 595 else
e256dfce 596 {
381cdae4 597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
602 }
cbdd87d4
RG
603
604 pop_gimplify_context (NULL);
605
fef5a0d9
RB
606 gsi_replace_with_seq_vops (si_p, stmts);
607}
cbdd87d4 608
fef5a0d9
RB
609
610/* Replace the call at *GSI with the gimple value VAL. */
611
e3174bdf 612void
fef5a0d9
RB
613replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
614{
355fe088 615 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 616 tree lhs = gimple_call_lhs (stmt);
355fe088 617 gimple *repl;
fef5a0d9 618 if (lhs)
e256dfce 619 {
fef5a0d9
RB
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
623 }
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
628 {
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
631 }
f6b4dc28 632 gsi_replace (gsi, repl, false);
fef5a0d9
RB
633}
634
635/* Replace the call at *GSI with the new call REPL and fold that
636 again. */
637
638static void
355fe088 639replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 640{
355fe088 641 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
779724a5 644 gimple_move_vops (repl, stmt);
f6b4dc28 645 gsi_replace (gsi, repl, false);
fef5a0d9
RB
646 fold_stmt (gsi);
647}
648
649/* Return true if VAR is a VAR_DECL or a component thereof. */
650
651static bool
652var_decl_component_p (tree var)
653{
654 tree inner = var;
655 while (handled_component_p (inner))
656 inner = TREE_OPERAND (inner, 0);
47cac108
RB
657 return (DECL_P (inner)
658 || (TREE_CODE (inner) == MEM_REF
659 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
660}
661
c89af696
AH
662/* Return TRUE if the SIZE argument, representing the size of an
663 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
664
665static bool
666size_must_be_zero_p (tree size)
667{
668 if (integer_zerop (size))
669 return true;
670
3f27391f 671 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
672 return false;
673
6512c0f1
MS
674 tree type = TREE_TYPE (size);
675 int prec = TYPE_PRECISION (type);
676
6512c0f1
MS
677 /* Compute the value of SSIZE_MAX, the largest positive value that
678 can be stored in ssize_t, the signed counterpart of size_t. */
679 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 680 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
681 wide_int_to_tree (type, ssize_max));
682 value_range vr;
c89af696
AH
683 get_range_info (size, vr);
684 vr.intersect (&valid_range);
685 return vr.zero_p ();
6512c0f1
MS
686}
687
cc8bea0a
MS
688/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
689 diagnose (otherwise undefined) overlapping copies without preventing
690 folding. When folded, GCC guarantees that overlapping memcpy has
691 the same semantics as memmove. Call to the library memcpy need not
692 provide the same guarantee. Return false if no simplification can
693 be made. */
fef5a0d9
RB
694
695static bool
696gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 697 tree dest, tree src, enum built_in_function code)
fef5a0d9 698{
355fe088 699 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
700 tree lhs = gimple_call_lhs (stmt);
701 tree len = gimple_call_arg (stmt, 2);
702 tree destvar, srcvar;
703 location_t loc = gimple_location (stmt);
704
6512c0f1
MS
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len))
fef5a0d9 708 {
355fe088 709 gimple *repl;
fef5a0d9
RB
710 if (gimple_call_lhs (stmt))
711 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 716 {
fef5a0d9
RB
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
f6b4dc28 720 gsi_replace (gsi, repl, false);
fef5a0d9
RB
721 return true;
722 }
723
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src, dest, 0))
727 {
cc8bea0a
MS
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 730 32667). */
fef5a0d9
RB
731 unlink_stmt_vdef (stmt);
732 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
733 release_ssa_name (gimple_vdef (stmt));
734 if (!lhs)
735 {
f6b4dc28 736 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
737 return true;
738 }
739 goto done;
740 }
741 else
742 {
743 tree srctype, desttype;
744 unsigned int src_align, dest_align;
745 tree off0;
d01b568a
BE
746 const char *tmp_str;
747 unsigned HOST_WIDE_INT tmp_len;
fef5a0d9
RB
748
749 /* Build accesses at offset zero with a ref-all character type. */
750 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
751 ptr_mode, true), 0);
752
753 /* If we can perform the copy efficiently with first doing all loads
754 and then all stores inline it that way. Currently efficiently
755 means that we can load all the memory into a single integer
756 register which is what MOVE_MAX gives us. */
757 src_align = get_pointer_alignment (src);
758 dest_align = get_pointer_alignment (dest);
759 if (tree_fits_uhwi_p (len)
760 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
761 /* FIXME: Don't transform copies from strings with known length.
762 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
763 from being handled, and the case was XFAILed for that reason.
764 Now that it is handled and the XFAIL removed, as soon as other
765 strlenopt tests that rely on it for passing are adjusted, this
766 hack can be removed. */
767 && !c_strlen (src, 1)
d01b568a
BE
768 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
769 && memchr (tmp_str, 0, tmp_len) == NULL))
fef5a0d9
RB
770 {
771 unsigned ilen = tree_to_uhwi (len);
146ec50f 772 if (pow2p_hwi (ilen))
fef5a0d9 773 {
213694e5
MS
774 /* Detect out-of-bounds accesses without issuing warnings.
775 Avoid folding out-of-bounds copies but to avoid false
776 positives for unreachable code defer warning until after
777 DCE has worked its magic.
778 -Wrestrict is still diagnosed. */
779 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
780 dest, src, len, len,
781 false, false))
782 if (warning != OPT_Wrestrict)
783 return false;
cc8bea0a 784
64ab8765 785 scalar_int_mode mode;
fef5a0d9
RB
786 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
787 if (type
64ab8765
RS
788 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
789 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
790 /* If the destination pointer is not aligned we must be able
791 to emit an unaligned store. */
64ab8765 792 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 793 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 794 || (optab_handler (movmisalign_optab, mode)
f869c12f 795 != CODE_FOR_nothing)))
fef5a0d9
RB
796 {
797 tree srctype = type;
798 tree desttype = type;
64ab8765 799 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
800 srctype = build_aligned_type (type, src_align);
801 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
802 tree tem = fold_const_aggregate_ref (srcmem);
803 if (tem)
804 srcmem = tem;
64ab8765 805 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 806 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 807 && (optab_handler (movmisalign_optab, mode)
f869c12f 808 == CODE_FOR_nothing))
fef5a0d9
RB
809 srcmem = NULL_TREE;
810 if (srcmem)
811 {
355fe088 812 gimple *new_stmt;
fef5a0d9
RB
813 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
814 {
815 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
816 srcmem
817 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
818 new_stmt);
fef5a0d9
RB
819 gimple_assign_set_lhs (new_stmt, srcmem);
820 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
821 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
822 }
64ab8765 823 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
824 desttype = build_aligned_type (type, dest_align);
825 new_stmt
826 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
827 dest, off0),
828 srcmem);
779724a5 829 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
830 if (!lhs)
831 {
f6b4dc28 832 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
833 return true;
834 }
835 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
836 goto done;
837 }
838 }
839 }
840 }
841
0d67a510 842 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
843 {
844 /* Both DEST and SRC must be pointer types.
845 ??? This is what old code did. Is the testing for pointer types
846 really mandatory?
847
848 If either SRC is readonly or length is 1, we can use memcpy. */
849 if (!dest_align || !src_align)
850 return false;
851 if (readonly_data_expr (src)
852 || (tree_fits_uhwi_p (len)
853 && (MIN (src_align, dest_align) / BITS_PER_UNIT
854 >= tree_to_uhwi (len))))
855 {
856 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
857 if (!fn)
858 return false;
859 gimple_call_set_fndecl (stmt, fn);
860 gimple_call_set_arg (stmt, 0, dest);
861 gimple_call_set_arg (stmt, 1, src);
862 fold_stmt (gsi);
863 return true;
864 }
865
866 /* If *src and *dest can't overlap, optimize into memcpy as well. */
867 if (TREE_CODE (src) == ADDR_EXPR
868 && TREE_CODE (dest) == ADDR_EXPR)
869 {
870 tree src_base, dest_base, fn;
a90c8804
RS
871 poly_int64 src_offset = 0, dest_offset = 0;
872 poly_uint64 maxsize;
fef5a0d9
RB
873
874 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
875 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
876 if (src_base == NULL)
877 src_base = srcvar;
fef5a0d9 878 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
879 dest_base = get_addr_base_and_unit_offset (destvar,
880 &dest_offset);
881 if (dest_base == NULL)
882 dest_base = destvar;
a90c8804 883 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 884 maxsize = -1;
fef5a0d9
RB
885 if (SSA_VAR_P (src_base)
886 && SSA_VAR_P (dest_base))
887 {
888 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
889 && ranges_maybe_overlap_p (src_offset, maxsize,
890 dest_offset, maxsize))
fef5a0d9
RB
891 return false;
892 }
893 else if (TREE_CODE (src_base) == MEM_REF
894 && TREE_CODE (dest_base) == MEM_REF)
895 {
896 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
897 TREE_OPERAND (dest_base, 0), 0))
898 return false;
a90c8804
RS
899 poly_offset_int full_src_offset
900 = mem_ref_offset (src_base) + src_offset;
901 poly_offset_int full_dest_offset
902 = mem_ref_offset (dest_base) + dest_offset;
903 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
904 full_dest_offset, maxsize))
fef5a0d9
RB
905 return false;
906 }
907 else
908 return false;
909
910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 if (!fn)
912 return false;
913 gimple_call_set_fndecl (stmt, fn);
914 gimple_call_set_arg (stmt, 0, dest);
915 gimple_call_set_arg (stmt, 1, src);
916 fold_stmt (gsi);
917 return true;
918 }
919
920 /* If the destination and source do not alias optimize into
921 memcpy as well. */
922 if ((is_gimple_min_invariant (dest)
923 || TREE_CODE (dest) == SSA_NAME)
924 && (is_gimple_min_invariant (src)
925 || TREE_CODE (src) == SSA_NAME))
926 {
927 ao_ref destr, srcr;
928 ao_ref_init_from_ptr_and_size (&destr, dest, len);
929 ao_ref_init_from_ptr_and_size (&srcr, src, len);
930 if (!refs_may_alias_p_1 (&destr, &srcr, false))
931 {
932 tree fn;
933 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
934 if (!fn)
935 return false;
936 gimple_call_set_fndecl (stmt, fn);
937 gimple_call_set_arg (stmt, 0, dest);
938 gimple_call_set_arg (stmt, 1, src);
939 fold_stmt (gsi);
940 return true;
941 }
942 }
943
944 return false;
945 }
946
947 if (!tree_fits_shwi_p (len))
948 return false;
fef5a0d9
RB
949 if (!POINTER_TYPE_P (TREE_TYPE (src))
950 || !POINTER_TYPE_P (TREE_TYPE (dest)))
951 return false;
952 /* In the following try to find a type that is most natural to be
953 used for the memcpy source and destination and that allows
954 the most optimization when memcpy is turned into a plain assignment
955 using that type. In theory we could always use a char[len] type
956 but that only gains us that the destination and source possibly
957 no longer will have their address taken. */
fef5a0d9
RB
958 srctype = TREE_TYPE (TREE_TYPE (src));
959 if (TREE_CODE (srctype) == ARRAY_TYPE
960 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 961 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
962 desttype = TREE_TYPE (TREE_TYPE (dest));
963 if (TREE_CODE (desttype) == ARRAY_TYPE
964 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 965 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
966 if (TREE_ADDRESSABLE (srctype)
967 || TREE_ADDRESSABLE (desttype))
968 return false;
969
970 /* Make sure we are not copying using a floating-point mode or
971 a type whose size possibly does not match its precision. */
972 if (FLOAT_MODE_P (TYPE_MODE (desttype))
973 || TREE_CODE (desttype) == BOOLEAN_TYPE
974 || TREE_CODE (desttype) == ENUMERAL_TYPE)
975 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
976 if (FLOAT_MODE_P (TYPE_MODE (srctype))
977 || TREE_CODE (srctype) == BOOLEAN_TYPE
978 || TREE_CODE (srctype) == ENUMERAL_TYPE)
979 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
980 if (!srctype)
981 srctype = desttype;
982 if (!desttype)
983 desttype = srctype;
984 if (!srctype)
985 return false;
986
987 src_align = get_pointer_alignment (src);
988 dest_align = get_pointer_alignment (dest);
fef5a0d9 989
5105b576
RB
990 /* Choose between src and destination type for the access based
991 on alignment, whether the access constitutes a register access
992 and whether it may actually expose a declaration for SSA rewrite
993 or SRA decomposition. */
42f74245 994 destvar = NULL_TREE;
5105b576 995 srcvar = NULL_TREE;
42f74245
RB
996 if (TREE_CODE (dest) == ADDR_EXPR
997 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
998 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
999 && dest_align >= TYPE_ALIGN (desttype)
1000 && (is_gimple_reg_type (desttype)
1001 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1002 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1003 else if (TREE_CODE (src) == ADDR_EXPR
1004 && var_decl_component_p (TREE_OPERAND (src, 0))
1005 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1006 && src_align >= TYPE_ALIGN (srctype)
1007 && (is_gimple_reg_type (srctype)
1008 || dest_align >= TYPE_ALIGN (srctype)))
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9
RB
1010 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1011 return false;
1012
5105b576
RB
1013 /* Now that we chose an access type express the other side in
1014 terms of it if the target allows that with respect to alignment
1015 constraints. */
fef5a0d9
RB
1016 if (srcvar == NULL_TREE)
1017 {
fef5a0d9
RB
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1021 {
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1027 }
1028 }
1029 else if (destvar == NULL_TREE)
1030 {
fef5a0d9
RB
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1040 }
1041 }
1042
213694e5
MS
1043 /* Same as above, detect out-of-bounds accesses without issuing
1044 warnings. Avoid folding out-of-bounds copies but to avoid
1045 false positives for unreachable code defer warning until
1046 after DCE has worked its magic.
1047 -Wrestrict is still diagnosed. */
1048 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1049 dest, src, len, len,
1050 false, false))
1051 if (warning != OPT_Wrestrict)
1052 return false;
cc8bea0a 1053
355fe088 1054 gimple *new_stmt;
fef5a0d9
RB
1055 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1056 {
921b13d0
RB
1057 tree tem = fold_const_aggregate_ref (srcvar);
1058 if (tem)
1059 srcvar = tem;
1060 if (! is_gimple_min_invariant (srcvar))
1061 {
1062 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1063 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1064 new_stmt);
921b13d0
RB
1065 gimple_assign_set_lhs (new_stmt, srcvar);
1066 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1067 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1068 }
d7257171
RB
1069 new_stmt = gimple_build_assign (destvar, srcvar);
1070 goto set_vop_and_replace;
fef5a0d9 1071 }
d7257171
RB
1072
1073 /* We get an aggregate copy. Use an unsigned char[] type to
1074 perform the copying to preserve padding and to avoid any issues
1075 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1076 desttype = build_array_type_nelts (unsigned_char_type_node,
1077 tree_to_uhwi (len));
1078 srctype = desttype;
1079 if (src_align > TYPE_ALIGN (srctype))
1080 srctype = build_aligned_type (srctype, src_align);
1081 if (dest_align > TYPE_ALIGN (desttype))
1082 desttype = build_aligned_type (desttype, dest_align);
1083 new_stmt
1084 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1085 fold_build2 (MEM_REF, srctype, src, off0));
1086set_vop_and_replace:
779724a5 1087 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1088 if (!lhs)
1089 {
f6b4dc28 1090 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1091 return true;
1092 }
1093 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1094 }
1095
1096done:
74e3c262 1097 gimple_seq stmts = NULL;
0d67a510 1098 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1099 len = NULL_TREE;
0d67a510 1100 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1101 {
1102 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1103 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1104 TREE_TYPE (dest), dest, len);
1105 }
0d67a510
ML
1106 else
1107 gcc_unreachable ();
fef5a0d9 1108
74e3c262 1109 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1110 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1111 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1112 return true;
1113}
1114
b3d8d88e
MS
1115/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1116 to built-in memcmp (a, b, len). */
1117
1118static bool
1119gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1120{
1121 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1122
1123 if (!fn)
1124 return false;
1125
1126 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127
1128 gimple *stmt = gsi_stmt (*gsi);
1129 tree a = gimple_call_arg (stmt, 0);
1130 tree b = gimple_call_arg (stmt, 1);
1131 tree len = gimple_call_arg (stmt, 2);
1132
1133 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1134 replace_call_with_call_and_fold (gsi, repl);
1135
1136 return true;
1137}
1138
1139/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1140 to built-in memmove (dest, src, len). */
1141
1142static bool
1143gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1144{
1145 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1146
1147 if (!fn)
1148 return false;
1149
1150 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1151 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1152 len) into memmove (dest, src, len). */
1153
1154 gimple *stmt = gsi_stmt (*gsi);
1155 tree src = gimple_call_arg (stmt, 0);
1156 tree dest = gimple_call_arg (stmt, 1);
1157 tree len = gimple_call_arg (stmt, 2);
1158
1159 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1160 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1161 replace_call_with_call_and_fold (gsi, repl);
1162
1163 return true;
1164}
1165
1166/* Transform a call to built-in bzero (dest, len) at *GSI into one
1167 to built-in memset (dest, 0, len). */
1168
1169static bool
1170gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1171{
1172 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1173
1174 if (!fn)
1175 return false;
1176
1177 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178
1179 gimple *stmt = gsi_stmt (*gsi);
1180 tree dest = gimple_call_arg (stmt, 0);
1181 tree len = gimple_call_arg (stmt, 1);
1182
1183 gimple_seq seq = NULL;
1184 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1185 gimple_seq_add_stmt_without_update (&seq, repl);
1186 gsi_replace_with_seq_vops (gsi, seq);
1187 fold_stmt (gsi);
1188
1189 return true;
1190}
1191
fef5a0d9
RB
1192/* Fold function call to builtin memset or bzero at *GSI setting the
1193 memory of size LEN to VAL. Return whether a simplification was made. */
1194
1195static bool
1196gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1197{
355fe088 1198 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1199 tree etype;
1200 unsigned HOST_WIDE_INT length, cval;
1201
1202 /* If the LEN parameter is zero, return DEST. */
1203 if (integer_zerop (len))
1204 {
1205 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1206 return true;
1207 }
1208
1209 if (! tree_fits_uhwi_p (len))
1210 return false;
1211
1212 if (TREE_CODE (c) != INTEGER_CST)
1213 return false;
1214
1215 tree dest = gimple_call_arg (stmt, 0);
1216 tree var = dest;
1217 if (TREE_CODE (var) != ADDR_EXPR)
1218 return false;
1219
1220 var = TREE_OPERAND (var, 0);
1221 if (TREE_THIS_VOLATILE (var))
1222 return false;
1223
1224 etype = TREE_TYPE (var);
1225 if (TREE_CODE (etype) == ARRAY_TYPE)
1226 etype = TREE_TYPE (etype);
1227
1228 if (!INTEGRAL_TYPE_P (etype)
1229 && !POINTER_TYPE_P (etype))
1230 return NULL_TREE;
1231
1232 if (! var_decl_component_p (var))
1233 return NULL_TREE;
1234
1235 length = tree_to_uhwi (len);
7a504f33 1236 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
fef5a0d9
RB
1237 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1238 return NULL_TREE;
1239
1240 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1241 return NULL_TREE;
1242
1243 if (integer_zerop (c))
1244 cval = 0;
1245 else
1246 {
1247 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1248 return NULL_TREE;
1249
1250 cval = TREE_INT_CST_LOW (c);
1251 cval &= 0xff;
1252 cval |= cval << 8;
1253 cval |= cval << 16;
1254 cval |= (cval << 31) << 1;
1255 }
1256
1257 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1258 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1259 gimple_move_vops (store, stmt);
fef5a0d9
RB
1260 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1261 if (gimple_call_lhs (stmt))
1262 {
355fe088 1263 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1264 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1265 }
1266 else
1267 {
1268 gimple_stmt_iterator gsi2 = *gsi;
1269 gsi_prev (gsi);
1270 gsi_remove (&gsi2, true);
1271 }
1272
1273 return true;
1274}
1275
fb471a13 1276/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1277
1278static bool
03c4a945
MS
1279get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1280 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1281{
fb471a13
MS
1282 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1283
fb471a13
MS
1284 /* The length computed by this invocation of the function. */
1285 tree val = NULL_TREE;
1286
eef2da67
MS
1287 /* True if VAL is an optimistic (tight) bound determined from
1288 the size of the character array in which the string may be
1289 stored. In that case, the computed VAL is used to set
1290 PDATA->MAXBOUND. */
1291 bool tight_bound = false;
1292
fb471a13
MS
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1296 {
fb471a13
MS
1297 tree op = TREE_OPERAND (arg, 0);
1298 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1299 {
fb471a13
MS
1300 tree aop0 = TREE_OPERAND (op, 0);
1301 if (TREE_CODE (aop0) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1303 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1304 pdata, eltsize);
fef5a0d9 1305 }
598f7235 1306 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1307 && rkind == SRK_LENRANGE)
fef5a0d9 1308 {
fb471a13
MS
1309 /* Fail if an array is the last member of a struct object
1310 since it could be treated as a (fake) flexible array
1311 member. */
1312 tree idx = TREE_OPERAND (op, 1);
1313
1314 arg = TREE_OPERAND (op, 0);
1315 tree optype = TREE_TYPE (arg);
1316 if (tree dom = TYPE_DOMAIN (optype))
1317 if (tree bound = TYPE_MAX_VALUE (dom))
1318 if (TREE_CODE (bound) == INTEGER_CST
1319 && TREE_CODE (idx) == INTEGER_CST
1320 && tree_int_cst_lt (bound, idx))
1321 return false;
fef5a0d9 1322 }
fb471a13 1323 }
7d583f42 1324
598f7235 1325 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1326 {
1327 /* We are computing the maximum value (not string length). */
1328 val = arg;
1329 if (TREE_CODE (val) != INTEGER_CST
1330 || tree_int_cst_sgn (val) < 0)
1331 return false;
1332 }
1333 else
1334 {
1335 c_strlen_data lendata = { };
1336 val = c_strlen (arg, 1, &lendata, eltsize);
1337
fb471a13
MS
1338 if (!val && lendata.decl)
1339 {
03c4a945
MS
1340 /* ARG refers to an unterminated const character array.
1341 DATA.DECL with size DATA.LEN. */
1342 val = lendata.minlen;
730832cd 1343 pdata->decl = lendata.decl;
7d583f42 1344 }
fb471a13
MS
1345 }
1346
a7160771
MS
1347 /* Set if VAL represents the maximum length based on array size (set
1348 when exact length cannot be determined). */
1349 bool maxbound = false;
1350
84de9426 1351 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1352 {
1353 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1354 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1355 pdata, eltsize);
88d0c3f0 1356
fb471a13 1357 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1358 {
fb471a13 1359 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1360
fb471a13
MS
1361 /* Determine the "innermost" array type. */
1362 while (TREE_CODE (optype) == ARRAY_TYPE
1363 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1364 optype = TREE_TYPE (optype);
c42d0aa0 1365
fb471a13
MS
1366 /* Avoid arrays of pointers. */
1367 tree eltype = TREE_TYPE (optype);
1368 if (TREE_CODE (optype) != ARRAY_TYPE
1369 || !INTEGRAL_TYPE_P (eltype))
1370 return false;
c42d0aa0 1371
fb471a13
MS
1372 /* Fail when the array bound is unknown or zero. */
1373 val = TYPE_SIZE_UNIT (optype);
1374 if (!val || integer_zerop (val))
1375 return false;
1bfd6a00 1376
fb471a13
MS
1377 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1378 integer_one_node);
c42d0aa0 1379
fb471a13
MS
1380 /* Set the minimum size to zero since the string in
1381 the array could have zero length. */
730832cd 1382 pdata->minlen = ssize_int (0);
204a7ecb 1383
eef2da67 1384 tight_bound = true;
fb471a13
MS
1385 }
1386 else if (TREE_CODE (arg) == COMPONENT_REF
1387 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1388 == ARRAY_TYPE))
1389 {
1390 /* Use the type of the member array to determine the upper
1391 bound on the length of the array. This may be overly
1392 optimistic if the array itself isn't NUL-terminated and
1393 the caller relies on the subsequent member to contain
1394 the NUL but that would only be considered valid if
03c4a945 1395 the array were the last member of a struct. */
fb471a13
MS
1396
1397 tree fld = TREE_OPERAND (arg, 1);
1398
1399 tree optype = TREE_TYPE (fld);
1400
1401 /* Determine the "innermost" array type. */
1402 while (TREE_CODE (optype) == ARRAY_TYPE
1403 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1404 optype = TREE_TYPE (optype);
1405
1406 /* Fail when the array bound is unknown or zero. */
1407 val = TYPE_SIZE_UNIT (optype);
1408 if (!val || integer_zerop (val))
1409 return false;
1410 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1411 integer_one_node);
1412
1413 /* Set the minimum size to zero since the string in
1414 the array could have zero length. */
730832cd 1415 pdata->minlen = ssize_int (0);
fb471a13 1416
eef2da67
MS
1417 /* The array size determined above is an optimistic bound
1418 on the length. If the array isn't nul-terminated the
1419 length computed by the library function would be greater.
1420 Even though using strlen to cross the subobject boundary
1421 is undefined, avoid drawing conclusions from the member
1422 type about the length here. */
1423 tight_bound = true;
1424 }
1425 else if (VAR_P (arg))
fb471a13 1426 {
eef2da67
MS
1427 /* Avoid handling pointers to arrays. GCC might misuse
1428 a pointer to an array of one bound to point to an array
1429 object of a greater bound. */
1430 tree argtype = TREE_TYPE (arg);
1431 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1432 {
eef2da67 1433 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1434 if (!val
1435 || TREE_CODE (val) != INTEGER_CST
1436 || integer_zerop (val))
88d0c3f0 1437 return false;
fb471a13
MS
1438 val = wide_int_to_tree (TREE_TYPE (val),
1439 wi::sub (wi::to_wide (val), 1));
1440
e495e31a
MS
1441 /* Set the minimum size to zero since the string in
1442 the array could have zero length. */
730832cd 1443 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1444 }
1445 }
a7160771 1446 maxbound = true;
fb471a13 1447 }
88d0c3f0 1448
fb471a13
MS
1449 if (!val)
1450 return false;
fef5a0d9 1451
fb471a13 1452 /* Adjust the lower bound on the string length as necessary. */
730832cd 1453 if (!pdata->minlen
598f7235 1454 || (rkind != SRK_STRLEN
730832cd 1455 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1456 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1457 && tree_int_cst_lt (val, pdata->minlen)))
1458 pdata->minlen = val;
88d0c3f0 1459
a7160771 1460 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1461 {
1462 /* Adjust the tighter (more optimistic) string length bound
1463 if necessary and proceed to adjust the more conservative
1464 bound. */
1465 if (TREE_CODE (val) == INTEGER_CST)
1466 {
a7160771
MS
1467 if (tree_int_cst_lt (pdata->maxbound, val))
1468 pdata->maxbound = val;
730832cd
MS
1469 }
1470 else
1471 pdata->maxbound = val;
1472 }
a7160771
MS
1473 else if (pdata->maxbound || maxbound)
1474 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1475 if VAL corresponds to the maximum length determined based
1476 on the type of the object. */
730832cd
MS
1477 pdata->maxbound = val;
1478
eef2da67
MS
1479 if (tight_bound)
1480 {
1481 /* VAL computed above represents an optimistically tight bound
1482 on the length of the string based on the referenced object's
1483 or subobject's type. Determine the conservative upper bound
1484 based on the enclosing object's size if possible. */
84de9426 1485 if (rkind == SRK_LENRANGE)
eef2da67
MS
1486 {
1487 poly_int64 offset;
1488 tree base = get_addr_base_and_unit_offset (arg, &offset);
1489 if (!base)
1490 {
1491 /* When the call above fails due to a non-constant offset
1492 assume the offset is zero and use the size of the whole
1493 enclosing object instead. */
1494 base = get_base_address (arg);
1495 offset = 0;
1496 }
1497 /* If the base object is a pointer no upper bound on the length
1498 can be determined. Otherwise the maximum length is equal to
1499 the size of the enclosing object minus the offset of
1500 the referenced subobject minus 1 (for the terminating nul). */
1501 tree type = TREE_TYPE (base);
1502 if (TREE_CODE (type) == POINTER_TYPE
1503 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1504 val = build_all_ones_cst (size_type_node);
1505 else
1506 {
1507 val = DECL_SIZE_UNIT (base);
1508 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1509 size_int (offset + 1));
1510 }
1511 }
1512 else
1513 return false;
1514 }
1515
730832cd 1516 if (pdata->maxlen)
fb471a13
MS
1517 {
1518 /* Adjust the more conservative bound if possible/necessary
1519 and fail otherwise. */
598f7235 1520 if (rkind != SRK_STRLEN)
fef5a0d9 1521 {
730832cd 1522 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1523 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1524 return false;
fef5a0d9 1525
730832cd
MS
1526 if (tree_int_cst_lt (pdata->maxlen, val))
1527 pdata->maxlen = val;
fb471a13
MS
1528 return true;
1529 }
730832cd 1530 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1531 {
1532 /* Fail if the length of this ARG is different from that
1533 previously determined from another ARG. */
1534 return false;
1535 }
fef5a0d9
RB
1536 }
1537
730832cd 1538 pdata->maxlen = val;
84de9426 1539 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1540}
1541
5d6655eb
MS
1542/* For an ARG referencing one or more strings, try to obtain the range
1543 of their lengths, or the size of the largest array ARG referes to if
1544 the range of lengths cannot be determined, and store all in *PDATA.
1545 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1546 the maximum constant value.
1547 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1548 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1549 length or if we are unable to determine the length, return false.
fb471a13 1550 VISITED is a bitmap of visited variables.
598f7235
MS
1551 RKIND determines the kind of value or range to obtain (see
1552 strlen_range_kind).
1553 Set PDATA->DECL if ARG refers to an unterminated constant array.
1554 On input, set ELTSIZE to 1 for normal single byte character strings,
1555 and either 2 or 4 for wide characer strings (the size of wchar_t).
1556 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1557
1558static bool
03c4a945
MS
1559get_range_strlen (tree arg, bitmap *visited,
1560 strlen_range_kind rkind,
1561 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1562{
1563
1564 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1565 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1566
fef5a0d9
RB
1567 /* If ARG is registered for SSA update we cannot look at its defining
1568 statement. */
1569 if (name_registered_for_update_p (arg))
1570 return false;
1571
1572 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1573 if (!*visited)
1574 *visited = BITMAP_ALLOC (NULL);
1575 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1576 return true;
1577
fb471a13
MS
1578 tree var = arg;
1579 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1580
fef5a0d9
RB
1581 switch (gimple_code (def_stmt))
1582 {
1583 case GIMPLE_ASSIGN:
598f7235
MS
1584 /* The RHS of the statement defining VAR must either have a
1585 constant length or come from another SSA_NAME with a constant
1586 length. */
fef5a0d9
RB
1587 if (gimple_assign_single_p (def_stmt)
1588 || gimple_assign_unary_nop_p (def_stmt))
1589 {
598f7235 1590 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1591 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1592 }
1593 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1594 {
c8602fe6
JJ
1595 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1596 gimple_assign_rhs3 (def_stmt) };
1597
1598 for (unsigned int i = 0; i < 2; i++)
03c4a945 1599 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1600 {
84de9426 1601 if (rkind != SRK_LENRANGE)
c8602fe6 1602 return false;
80c2bad6
MS
1603 /* Set the upper bound to the maximum to prevent
1604 it from being adjusted in the next iteration but
1605 leave MINLEN and the more conservative MAXBOUND
1606 determined so far alone (or leave them null if
1607 they haven't been set yet). That the MINLEN is
1608 in fact zero can be determined from MAXLEN being
1609 unbounded but the discovered minimum is used for
1610 diagnostics. */
730832cd 1611 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1612 }
1613 return true;
cc8bea0a 1614 }
fef5a0d9
RB
1615 return false;
1616
1617 case GIMPLE_PHI:
598f7235
MS
1618 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1619 must have a constant length. */
c8602fe6 1620 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1621 {
1622 tree arg = gimple_phi_arg (def_stmt, i)->def;
1623
1624 /* If this PHI has itself as an argument, we cannot
1625 determine the string length of this argument. However,
1626 if we can find a constant string length for the other
1627 PHI args then we can still be sure that this is a
1628 constant string length. So be optimistic and just
1629 continue with the next argument. */
1630 if (arg == gimple_phi_result (def_stmt))
1631 continue;
1632
03c4a945 1633 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1634 {
84de9426 1635 if (rkind != SRK_LENRANGE)
88d0c3f0 1636 return false;
80c2bad6
MS
1637 /* Set the upper bound to the maximum to prevent
1638 it from being adjusted in the next iteration but
1639 leave MINLEN and the more conservative MAXBOUND
1640 determined so far alone (or leave them null if
1641 they haven't been set yet). That the MINLEN is
1642 in fact zero can be determined from MAXLEN being
1643 unbounded but the discovered minimum is used for
1644 diagnostics. */
730832cd 1645 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1646 }
fef5a0d9 1647 }
fef5a0d9
RB
1648 return true;
1649
1650 default:
1651 return false;
1652 }
1653}
5d6655eb 1654
97623b52
MS
1655/* Try to obtain the range of the lengths of the string(s) referenced
1656 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1657 of lengths cannot be determined, and store all in *PDATA which must
1658 be zero-initialized on input except PDATA->MAXBOUND may be set to
1659 a non-null tree node other than INTEGER_CST to request to have it
1660 set to the length of the longest string in a PHI. ELTSIZE is
1661 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1662 some power of 2 for wide characters.
1663 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1664 for optimization. Returning false means that a nonzero PDATA->MINLEN
1665 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1666 is -1 (in that case, the actual range is indeterminate, i.e.,
1667 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1668
3f343040 1669bool
84de9426 1670get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1671{
1672 bitmap visited = NULL;
a7160771 1673 tree maxbound = pdata->maxbound;
88d0c3f0 1674
84de9426 1675 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1676 {
5d6655eb
MS
1677 /* On failure extend the length range to an impossible maximum
1678 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1679 members can stay unchanged regardless. */
1680 pdata->minlen = ssize_int (0);
1681 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1682 }
5d6655eb
MS
1683 else if (!pdata->minlen)
1684 pdata->minlen = ssize_int (0);
1685
a7160771
MS
1686 /* If it's unchanged from it initial non-null value, set the conservative
1687 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1688 if (maxbound && pdata->maxbound == maxbound)
1689 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1690
1691 if (visited)
1692 BITMAP_FREE (visited);
3f343040 1693
03c4a945 1694 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1695}
1696
5d6655eb
MS
1697/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1698 For ARG of pointer types, NONSTR indicates if the caller is prepared
1699 to handle unterminated strings. For integer ARG and when RKIND ==
1700 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1701
5d6655eb
MS
1702 If an unterminated array is discovered and our caller handles
1703 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1704 return the maximum size. Otherwise return NULL. */
1705
598f7235
MS
1706static tree
1707get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1708{
598f7235
MS
1709 /* A non-null NONSTR is meaningless when determining the maximum
1710 value of an integer ARG. */
1711 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1712 /* ARG must have an integral type when RKIND says so. */
1713 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1714
dcb7fae2 1715 bitmap visited = NULL;
3f343040 1716
5d6655eb
MS
1717 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1718 is unbounded. */
730832cd 1719 c_strlen_data lendata = { };
03c4a945 1720 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 1721 lendata.maxlen = NULL_TREE;
5d6655eb
MS
1722 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1723 lendata.maxlen = NULL_TREE;
1724
dcb7fae2
RB
1725 if (visited)
1726 BITMAP_FREE (visited);
1727
e08341bb
MS
1728 if (nonstr)
1729 {
1730 /* For callers prepared to handle unterminated arrays set
1731 *NONSTR to point to the declaration of the array and return
1732 the maximum length/size. */
730832cd
MS
1733 *nonstr = lendata.decl;
1734 return lendata.maxlen;
e08341bb
MS
1735 }
1736
1737 /* Fail if the constant array isn't nul-terminated. */
730832cd 1738 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
1739}
1740
fef5a0d9
RB
1741
1742/* Fold function call to builtin strcpy with arguments DEST and SRC.
1743 If LEN is not NULL, it represents the length of the string to be
1744 copied. Return NULL_TREE if no simplification can be made. */
1745
1746static bool
1747gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1748 tree dest, tree src)
fef5a0d9 1749{
cc8bea0a
MS
1750 gimple *stmt = gsi_stmt (*gsi);
1751 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1752 tree fn;
1753
1754 /* If SRC and DEST are the same (and not volatile), return DEST. */
1755 if (operand_equal_p (src, dest, 0))
1756 {
8cd95cec
MS
1757 /* Issue -Wrestrict unless the pointers are null (those do
1758 not point to objects and so do not indicate an overlap;
1759 such calls could be the result of sanitization and jump
1760 threading). */
1761 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1762 {
1763 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1764
e9b9fa4c
MS
1765 warning_at (loc, OPT_Wrestrict,
1766 "%qD source argument is the same as destination",
1767 func);
1768 }
cc8bea0a 1769
fef5a0d9
RB
1770 replace_call_with_value (gsi, dest);
1771 return true;
1772 }
1773
1774 if (optimize_function_for_size_p (cfun))
1775 return false;
1776
1777 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1778 if (!fn)
1779 return false;
1780
e08341bb
MS
1781 /* Set to non-null if ARG refers to an unterminated array. */
1782 tree nonstr = NULL;
598f7235 1783 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
1784
1785 if (nonstr)
1786 {
1787 /* Avoid folding calls with unterminated arrays. */
1788 if (!gimple_no_warning_p (stmt))
1789 warn_string_no_nul (loc, "strcpy", src, nonstr);
1790 gimple_set_no_warning (stmt, true);
1791 return false;
1792 }
1793
fef5a0d9 1794 if (!len)
dcb7fae2 1795 return false;
fef5a0d9
RB
1796
1797 len = fold_convert_loc (loc, size_type_node, len);
1798 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1799 len = force_gimple_operand_gsi (gsi, len, true,
1800 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1801 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1802 replace_call_with_call_and_fold (gsi, repl);
1803 return true;
1804}
1805
1806/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1807 If SLEN is not NULL, it represents the length of the source string.
1808 Return NULL_TREE if no simplification can be made. */
1809
1810static bool
dcb7fae2
RB
1811gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1812 tree dest, tree src, tree len)
fef5a0d9 1813{
025d57f0
MS
1814 gimple *stmt = gsi_stmt (*gsi);
1815 location_t loc = gimple_location (stmt);
6a33d0ff 1816 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1817
1818 /* If the LEN parameter is zero, return DEST. */
1819 if (integer_zerop (len))
1820 {
6a33d0ff
MS
1821 /* Avoid warning if the destination refers to a an array/pointer
1822 decorate with attribute nonstring. */
1823 if (!nonstring)
1824 {
1825 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1826
1827 /* Warn about the lack of nul termination: the result is not
1828 a (nul-terminated) string. */
598f7235 1829 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
1830 if (slen && !integer_zerop (slen))
1831 warning_at (loc, OPT_Wstringop_truncation,
1832 "%G%qD destination unchanged after copying no bytes "
1833 "from a string of length %E",
8a45b051 1834 stmt, fndecl, slen);
6a33d0ff
MS
1835 else
1836 warning_at (loc, OPT_Wstringop_truncation,
1837 "%G%qD destination unchanged after copying no bytes",
8a45b051 1838 stmt, fndecl);
6a33d0ff 1839 }
025d57f0 1840
fef5a0d9
RB
1841 replace_call_with_value (gsi, dest);
1842 return true;
1843 }
1844
1845 /* We can't compare slen with len as constants below if len is not a
1846 constant. */
dcb7fae2 1847 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1848 return false;
1849
fef5a0d9 1850 /* Now, we must be passed a constant src ptr parameter. */
598f7235 1851 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 1852 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1853 return false;
1854
025d57f0
MS
1855 /* The size of the source string including the terminating nul. */
1856 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1857
1858 /* We do not support simplification of this case, though we do
1859 support it when expanding trees into RTL. */
1860 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1861 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1862 return false;
1863
5d0d5d68
MS
1864 /* Diagnose truncation that leaves the copy unterminated. */
1865 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1866
fef5a0d9 1867 /* OK transform into builtin memcpy. */
025d57f0 1868 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1869 if (!fn)
1870 return false;
1871
1872 len = fold_convert_loc (loc, size_type_node, len);
1873 len = force_gimple_operand_gsi (gsi, len, true,
1874 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1875 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1876 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1877
fef5a0d9
RB
1878 return true;
1879}
1880
71dea1dd
WD
1881/* Fold function call to builtin strchr or strrchr.
1882 If both arguments are constant, evaluate and fold the result,
1883 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1884 In general strlen is significantly faster than strchr
1885 due to being a simpler operation. */
1886static bool
71dea1dd 1887gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1888{
1889 gimple *stmt = gsi_stmt (*gsi);
1890 tree str = gimple_call_arg (stmt, 0);
1891 tree c = gimple_call_arg (stmt, 1);
1892 location_t loc = gimple_location (stmt);
71dea1dd
WD
1893 const char *p;
1894 char ch;
912d9ec3 1895
71dea1dd 1896 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1897 return false;
1898
b5338fb3
MS
1899 /* Avoid folding if the first argument is not a nul-terminated array.
1900 Defer warning until later. */
1901 if (!check_nul_terminated_array (NULL_TREE, str))
1902 return false;
1903
71dea1dd
WD
1904 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1905 {
1906 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1907
1908 if (p1 == NULL)
1909 {
1910 replace_call_with_value (gsi, integer_zero_node);
1911 return true;
1912 }
1913
1914 tree len = build_int_cst (size_type_node, p1 - p);
1915 gimple_seq stmts = NULL;
1916 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1917 POINTER_PLUS_EXPR, str, len);
1918 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1919 gsi_replace_with_seq_vops (gsi, stmts);
1920 return true;
1921 }
1922
1923 if (!integer_zerop (c))
912d9ec3
WD
1924 return false;
1925
71dea1dd 1926 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1927 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1928 {
1929 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1930
c8952930 1931 if (strchr_fn)
71dea1dd
WD
1932 {
1933 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1934 replace_call_with_call_and_fold (gsi, repl);
1935 return true;
1936 }
1937
1938 return false;
1939 }
1940
912d9ec3
WD
1941 tree len;
1942 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1943
1944 if (!strlen_fn)
1945 return false;
1946
1947 /* Create newstr = strlen (str). */
1948 gimple_seq stmts = NULL;
1949 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1950 gimple_set_location (new_stmt, loc);
a15ebbcd 1951 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
1952 gimple_call_set_lhs (new_stmt, len);
1953 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1954
1955 /* Create (str p+ strlen (str)). */
1956 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1957 POINTER_PLUS_EXPR, str, len);
1958 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1959 gsi_replace_with_seq_vops (gsi, stmts);
1960 /* gsi now points at the assignment to the lhs, get a
1961 stmt iterator to the strlen.
1962 ??? We can't use gsi_for_stmt as that doesn't work when the
1963 CFG isn't built yet. */
1964 gimple_stmt_iterator gsi2 = *gsi;
1965 gsi_prev (&gsi2);
1966 fold_stmt (&gsi2);
1967 return true;
1968}
1969
c8952930
JJ
1970/* Fold function call to builtin strstr.
1971 If both arguments are constant, evaluate and fold the result,
1972 additionally fold strstr (x, "") into x and strstr (x, "c")
1973 into strchr (x, 'c'). */
1974static bool
1975gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1976{
1977 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
1978 if (!gimple_call_lhs (stmt))
1979 return false;
1980
c8952930
JJ
1981 tree haystack = gimple_call_arg (stmt, 0);
1982 tree needle = gimple_call_arg (stmt, 1);
c8952930 1983
b5338fb3
MS
1984 /* Avoid folding if either argument is not a nul-terminated array.
1985 Defer warning until later. */
1986 if (!check_nul_terminated_array (NULL_TREE, haystack)
1987 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
1988 return false;
1989
b5338fb3 1990 const char *q = c_getstr (needle);
c8952930
JJ
1991 if (q == NULL)
1992 return false;
1993
b5338fb3 1994 if (const char *p = c_getstr (haystack))
c8952930
JJ
1995 {
1996 const char *r = strstr (p, q);
1997
1998 if (r == NULL)
1999 {
2000 replace_call_with_value (gsi, integer_zero_node);
2001 return true;
2002 }
2003
2004 tree len = build_int_cst (size_type_node, r - p);
2005 gimple_seq stmts = NULL;
2006 gimple *new_stmt
2007 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2008 haystack, len);
2009 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2010 gsi_replace_with_seq_vops (gsi, stmts);
2011 return true;
2012 }
2013
2014 /* For strstr (x, "") return x. */
2015 if (q[0] == '\0')
2016 {
2017 replace_call_with_value (gsi, haystack);
2018 return true;
2019 }
2020
2021 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2022 if (q[1] == '\0')
2023 {
2024 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2025 if (strchr_fn)
2026 {
2027 tree c = build_int_cst (integer_type_node, q[0]);
2028 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2029 replace_call_with_call_and_fold (gsi, repl);
2030 return true;
2031 }
2032 }
2033
2034 return false;
2035}
2036
fef5a0d9
RB
2037/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2038 to the call.
2039
2040 Return NULL_TREE if no simplification was possible, otherwise return the
2041 simplified form of the call as a tree.
2042
2043 The simplified form may be a constant or other expression which
2044 computes the same value, but in a more efficient manner (including
2045 calls to other builtin functions).
2046
2047 The call may contain arguments which need to be evaluated, but
2048 which are not useful to determine the result of the call. In
2049 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2050 COMPOUND_EXPR will be an argument which must be evaluated.
2051 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2052 COMPOUND_EXPR in the chain will contain the tree for the simplified
2053 form of the builtin function call. */
2054
2055static bool
dcb7fae2 2056gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2057{
355fe088 2058 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2059 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2060
2061 const char *p = c_getstr (src);
2062
2063 /* If the string length is zero, return the dst parameter. */
2064 if (p && *p == '\0')
2065 {
2066 replace_call_with_value (gsi, dst);
2067 return true;
2068 }
2069
2070 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2071 return false;
2072
2073 /* See if we can store by pieces into (dst + strlen(dst)). */
2074 tree newdst;
2075 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2076 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2077
2078 if (!strlen_fn || !memcpy_fn)
2079 return false;
2080
2081 /* If the length of the source string isn't computable don't
2082 split strcat into strlen and memcpy. */
598f7235 2083 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2084 if (! len)
fef5a0d9
RB
2085 return false;
2086
2087 /* Create strlen (dst). */
2088 gimple_seq stmts = NULL, stmts2;
355fe088 2089 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2090 gimple_set_location (repl, loc);
a15ebbcd 2091 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2092 gimple_call_set_lhs (repl, newdst);
2093 gimple_seq_add_stmt_without_update (&stmts, repl);
2094
2095 /* Create (dst p+ strlen (dst)). */
2096 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2097 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2098 gimple_seq_add_seq_without_update (&stmts, stmts2);
2099
2100 len = fold_convert_loc (loc, size_type_node, len);
2101 len = size_binop_loc (loc, PLUS_EXPR, len,
2102 build_int_cst (size_type_node, 1));
2103 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2104 gimple_seq_add_seq_without_update (&stmts, stmts2);
2105
2106 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2107 gimple_seq_add_stmt_without_update (&stmts, repl);
2108 if (gimple_call_lhs (stmt))
2109 {
2110 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2111 gimple_seq_add_stmt_without_update (&stmts, repl);
2112 gsi_replace_with_seq_vops (gsi, stmts);
2113 /* gsi now points at the assignment to the lhs, get a
2114 stmt iterator to the memcpy call.
2115 ??? We can't use gsi_for_stmt as that doesn't work when the
2116 CFG isn't built yet. */
2117 gimple_stmt_iterator gsi2 = *gsi;
2118 gsi_prev (&gsi2);
2119 fold_stmt (&gsi2);
2120 }
2121 else
2122 {
2123 gsi_replace_with_seq_vops (gsi, stmts);
2124 fold_stmt (gsi);
2125 }
2126 return true;
2127}
2128
07f1cf56
RB
2129/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2130 are the arguments to the call. */
2131
2132static bool
2133gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2134{
355fe088 2135 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2136 tree dest = gimple_call_arg (stmt, 0);
2137 tree src = gimple_call_arg (stmt, 1);
2138 tree size = gimple_call_arg (stmt, 2);
2139 tree fn;
2140 const char *p;
2141
2142
2143 p = c_getstr (src);
2144 /* If the SRC parameter is "", return DEST. */
2145 if (p && *p == '\0')
2146 {
2147 replace_call_with_value (gsi, dest);
2148 return true;
2149 }
2150
2151 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2152 return false;
2153
2154 /* If __builtin_strcat_chk is used, assume strcat is available. */
2155 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2156 if (!fn)
2157 return false;
2158
355fe088 2159 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2160 replace_call_with_call_and_fold (gsi, repl);
2161 return true;
2162}
2163
ad03a744
RB
2164/* Simplify a call to the strncat builtin. */
2165
2166static bool
2167gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2168{
8a45b051 2169 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2170 tree dst = gimple_call_arg (stmt, 0);
2171 tree src = gimple_call_arg (stmt, 1);
2172 tree len = gimple_call_arg (stmt, 2);
2173
2174 const char *p = c_getstr (src);
2175
2176 /* If the requested length is zero, or the src parameter string
2177 length is zero, return the dst parameter. */
2178 if (integer_zerop (len) || (p && *p == '\0'))
2179 {
2180 replace_call_with_value (gsi, dst);
2181 return true;
2182 }
2183
025d57f0
MS
2184 if (TREE_CODE (len) != INTEGER_CST || !p)
2185 return false;
2186
2187 unsigned srclen = strlen (p);
2188
2189 int cmpsrc = compare_tree_int (len, srclen);
2190
2191 /* Return early if the requested len is less than the string length.
2192 Warnings will be issued elsewhere later. */
2193 if (cmpsrc < 0)
2194 return false;
2195
2196 unsigned HOST_WIDE_INT dstsize;
2197
2198 bool nowarn = gimple_no_warning_p (stmt);
2199
2200 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2201 {
025d57f0 2202 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2203
025d57f0
MS
2204 if (cmpdst >= 0)
2205 {
2206 tree fndecl = gimple_call_fndecl (stmt);
2207
2208 /* Strncat copies (at most) LEN bytes and always appends
2209 the terminating NUL so the specified bound should never
2210 be equal to (or greater than) the size of the destination.
2211 If it is, the copy could overflow. */
2212 location_t loc = gimple_location (stmt);
2213 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2214 cmpdst == 0
2215 ? G_("%G%qD specified bound %E equals "
2216 "destination size")
2217 : G_("%G%qD specified bound %E exceeds "
2218 "destination size %wu"),
2219 stmt, fndecl, len, dstsize);
2220 if (nowarn)
2221 gimple_set_no_warning (stmt, true);
2222 }
2223 }
ad03a744 2224
025d57f0
MS
2225 if (!nowarn && cmpsrc == 0)
2226 {
2227 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2228 location_t loc = gimple_location (stmt);
eec5f615
MS
2229
2230 /* To avoid possible overflow the specified bound should also
2231 not be equal to the length of the source, even when the size
2232 of the destination is unknown (it's not an uncommon mistake
2233 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2234 if (warning_at (loc, OPT_Wstringop_overflow_,
2235 "%G%qD specified bound %E equals source length",
2236 stmt, fndecl, len))
2237 gimple_set_no_warning (stmt, true);
ad03a744
RB
2238 }
2239
025d57f0
MS
2240 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2241
2242 /* If the replacement _DECL isn't initialized, don't do the
2243 transformation. */
2244 if (!fn)
2245 return false;
2246
2247 /* Otherwise, emit a call to strcat. */
2248 gcall *repl = gimple_build_call (fn, 2, dst, src);
2249 replace_call_with_call_and_fold (gsi, repl);
2250 return true;
ad03a744
RB
2251}
2252
745583f9
RB
2253/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2254 LEN, and SIZE. */
2255
2256static bool
2257gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2258{
355fe088 2259 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2260 tree dest = gimple_call_arg (stmt, 0);
2261 tree src = gimple_call_arg (stmt, 1);
2262 tree len = gimple_call_arg (stmt, 2);
2263 tree size = gimple_call_arg (stmt, 3);
2264 tree fn;
2265 const char *p;
2266
2267 p = c_getstr (src);
2268 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2269 if ((p && *p == '\0')
2270 || integer_zerop (len))
2271 {
2272 replace_call_with_value (gsi, dest);
2273 return true;
2274 }
2275
2276 if (! tree_fits_uhwi_p (size))
2277 return false;
2278
2279 if (! integer_all_onesp (size))
2280 {
2281 tree src_len = c_strlen (src, 1);
2282 if (src_len
2283 && tree_fits_uhwi_p (src_len)
2284 && tree_fits_uhwi_p (len)
2285 && ! tree_int_cst_lt (len, src_len))
2286 {
2287 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2288 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2289 if (!fn)
2290 return false;
2291
355fe088 2292 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2293 replace_call_with_call_and_fold (gsi, repl);
2294 return true;
2295 }
2296 return false;
2297 }
2298
2299 /* If __builtin_strncat_chk is used, assume strncat is available. */
2300 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2301 if (!fn)
2302 return false;
2303
355fe088 2304 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2305 replace_call_with_call_and_fold (gsi, repl);
2306 return true;
2307}
2308
a918bfbf
ML
2309/* Build and append gimple statements to STMTS that would load a first
2310 character of a memory location identified by STR. LOC is location
2311 of the statement. */
2312
2313static tree
2314gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2315{
2316 tree var;
2317
2318 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2319 tree cst_uchar_ptr_node
2320 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2321 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2322
2323 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2324 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2325 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2326
2327 gimple_assign_set_lhs (stmt, var);
2328 gimple_seq_add_stmt_without_update (stmts, stmt);
2329
2330 return var;
2331}
2332
d2f8402a 2333/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2334
2335static bool
2336gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2337{
2338 gimple *stmt = gsi_stmt (*gsi);
2339 tree callee = gimple_call_fndecl (stmt);
2340 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2341
2342 tree type = integer_type_node;
2343 tree str1 = gimple_call_arg (stmt, 0);
2344 tree str2 = gimple_call_arg (stmt, 1);
2345 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2346
2347 tree bound_node = NULL_TREE;
d2f8402a 2348 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2349
2350 /* Handle strncmp and strncasecmp functions. */
2351 if (gimple_call_num_args (stmt) == 3)
2352 {
d86d8b35
MS
2353 bound_node = gimple_call_arg (stmt, 2);
2354 if (tree_fits_uhwi_p (bound_node))
2355 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2356 }
2357
d86d8b35 2358 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2359 if (bound == 0)
a918bfbf
ML
2360 {
2361 replace_call_with_value (gsi, integer_zero_node);
2362 return true;
2363 }
2364
2365 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2366 if (operand_equal_p (str1, str2, 0))
2367 {
2368 replace_call_with_value (gsi, integer_zero_node);
2369 return true;
2370 }
2371
d2f8402a
MS
2372 /* Initially set to the number of characters, including the terminating
2373 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2374 the array Sx is not terminated by a nul.
2375 For nul-terminated strings then adjusted to their length so that
2376 LENx == NULPOSx holds. */
2377 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2378 const char *p1 = c_getstr (str1, &len1);
2379 const char *p2 = c_getstr (str2, &len2);
2380
2381 /* The position of the terminating nul character if one exists, otherwise
2382 a value greater than LENx. */
2383 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2384
2385 if (p1)
2386 {
2387 size_t n = strnlen (p1, len1);
2388 if (n < len1)
2389 len1 = nulpos1 = n;
2390 }
2391
2392 if (p2)
2393 {
2394 size_t n = strnlen (p2, len2);
2395 if (n < len2)
2396 len2 = nulpos2 = n;
2397 }
a918bfbf
ML
2398
2399 /* For known strings, return an immediate value. */
2400 if (p1 && p2)
2401 {
2402 int r = 0;
2403 bool known_result = false;
2404
2405 switch (fcode)
2406 {
2407 case BUILT_IN_STRCMP:
8b0b334a 2408 case BUILT_IN_STRCMP_EQ:
d2f8402a 2409 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2410 break;
d2f8402a
MS
2411
2412 r = strcmp (p1, p2);
2413 known_result = true;
2414 break;
2415
a918bfbf 2416 case BUILT_IN_STRNCMP:
8b0b334a 2417 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2418 {
d86d8b35
MS
2419 if (bound == HOST_WIDE_INT_M1U)
2420 break;
2421
d2f8402a
MS
2422 /* Reduce the bound to be no more than the length
2423 of the shorter of the two strings, or the sizes
2424 of the unterminated arrays. */
2425 unsigned HOST_WIDE_INT n = bound;
2426
2427 if (len1 == nulpos1 && len1 < n)
2428 n = len1 + 1;
2429 if (len2 == nulpos2 && len2 < n)
2430 n = len2 + 1;
2431
2432 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2433 break;
d2f8402a
MS
2434
2435 r = strncmp (p1, p2, n);
a918bfbf
ML
2436 known_result = true;
2437 break;
2438 }
2439 /* Only handleable situation is where the string are equal (result 0),
2440 which is already handled by operand_equal_p case. */
2441 case BUILT_IN_STRCASECMP:
2442 break;
2443 case BUILT_IN_STRNCASECMP:
2444 {
d2f8402a 2445 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2446 break;
d2f8402a 2447 r = strncmp (p1, p2, bound);
a918bfbf
ML
2448 if (r == 0)
2449 known_result = true;
5de73c05 2450 break;
a918bfbf
ML
2451 }
2452 default:
2453 gcc_unreachable ();
2454 }
2455
2456 if (known_result)
2457 {
2458 replace_call_with_value (gsi, build_cmp_result (type, r));
2459 return true;
2460 }
2461 }
2462
d2f8402a 2463 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2464 || fcode == BUILT_IN_STRCMP
8b0b334a 2465 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2466 || fcode == BUILT_IN_STRCASECMP;
2467
2468 location_t loc = gimple_location (stmt);
2469
2470 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2471 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2472 {
2473 gimple_seq stmts = NULL;
2474 tree var = gimple_load_first_char (loc, str1, &stmts);
2475 if (lhs)
2476 {
2477 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2478 gimple_seq_add_stmt_without_update (&stmts, stmt);
2479 }
2480
2481 gsi_replace_with_seq_vops (gsi, stmts);
2482 return true;
2483 }
2484
2485 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2486 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2487 {
2488 gimple_seq stmts = NULL;
2489 tree var = gimple_load_first_char (loc, str2, &stmts);
2490
2491 if (lhs)
2492 {
2493 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2494 stmt = gimple_build_assign (c, NOP_EXPR, var);
2495 gimple_seq_add_stmt_without_update (&stmts, stmt);
2496
2497 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2498 gimple_seq_add_stmt_without_update (&stmts, stmt);
2499 }
2500
2501 gsi_replace_with_seq_vops (gsi, stmts);
2502 return true;
2503 }
2504
d2f8402a 2505 /* If BOUND is one, return an expression corresponding to
a918bfbf 2506 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2507 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2508 {
2509 gimple_seq stmts = NULL;
2510 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2511 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2512
2513 if (lhs)
2514 {
2515 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2516 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2517 gimple_seq_add_stmt_without_update (&stmts, convert1);
2518
2519 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2520 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2521 gimple_seq_add_stmt_without_update (&stmts, convert2);
2522
2523 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2524 gimple_seq_add_stmt_without_update (&stmts, stmt);
2525 }
2526
2527 gsi_replace_with_seq_vops (gsi, stmts);
2528 return true;
2529 }
2530
d2f8402a
MS
2531 /* If BOUND is greater than the length of one constant string,
2532 and the other argument is also a nul-terminated string, replace
2533 strncmp with strcmp. */
2534 if (fcode == BUILT_IN_STRNCMP
2535 && bound > 0 && bound < HOST_WIDE_INT_M1U
2536 && ((p2 && len2 < bound && len2 == nulpos2)
2537 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2538 {
2539 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2540 if (!fn)
2541 return false;
2542 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2543 replace_call_with_call_and_fold (gsi, repl);
2544 return true;
2545 }
2546
a918bfbf
ML
2547 return false;
2548}
2549
488c6247
ML
2550/* Fold a call to the memchr pointed by GSI iterator. */
2551
2552static bool
2553gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2554{
2555 gimple *stmt = gsi_stmt (*gsi);
2556 tree lhs = gimple_call_lhs (stmt);
2557 tree arg1 = gimple_call_arg (stmt, 0);
2558 tree arg2 = gimple_call_arg (stmt, 1);
2559 tree len = gimple_call_arg (stmt, 2);
2560
2561 /* If the LEN parameter is zero, return zero. */
2562 if (integer_zerop (len))
2563 {
2564 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2565 return true;
2566 }
2567
2568 char c;
2569 if (TREE_CODE (arg2) != INTEGER_CST
2570 || !tree_fits_uhwi_p (len)
2571 || !target_char_cst_p (arg2, &c))
2572 return false;
2573
2574 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2575 unsigned HOST_WIDE_INT string_length;
2576 const char *p1 = c_getstr (arg1, &string_length);
2577
2578 if (p1)
2579 {
2580 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2581 if (r == NULL)
2582 {
5fd336bb
JM
2583 tree mem_size, offset_node;
2584 string_constant (arg1, &offset_node, &mem_size, NULL);
2585 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2586 ? 0 : tree_to_uhwi (offset_node);
2587 /* MEM_SIZE is the size of the array the string literal
2588 is stored in. */
2589 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2590 gcc_checking_assert (string_length <= string_size);
2591 if (length <= string_size)
488c6247
ML
2592 {
2593 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2594 return true;
2595 }
2596 }
2597 else
2598 {
2599 unsigned HOST_WIDE_INT offset = r - p1;
2600 gimple_seq stmts = NULL;
2601 if (lhs != NULL_TREE)
2602 {
2603 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2604 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2605 arg1, offset_cst);
2606 gimple_seq_add_stmt_without_update (&stmts, stmt);
2607 }
2608 else
2609 gimple_seq_add_stmt_without_update (&stmts,
2610 gimple_build_nop ());
2611
2612 gsi_replace_with_seq_vops (gsi, stmts);
2613 return true;
2614 }
2615 }
2616
2617 return false;
2618}
a918bfbf 2619
fef5a0d9
RB
2620/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2621 to the call. IGNORE is true if the value returned
2622 by the builtin will be ignored. UNLOCKED is true is true if this
2623 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2624 the known length of the string. Return NULL_TREE if no simplification
2625 was possible. */
2626
2627static bool
2628gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2629 tree arg0, tree arg1,
dcb7fae2 2630 bool unlocked)
fef5a0d9 2631{
355fe088 2632 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2633
fef5a0d9
RB
2634 /* If we're using an unlocked function, assume the other unlocked
2635 functions exist explicitly. */
2636 tree const fn_fputc = (unlocked
2637 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2638 : builtin_decl_implicit (BUILT_IN_FPUTC));
2639 tree const fn_fwrite = (unlocked
2640 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2641 : builtin_decl_implicit (BUILT_IN_FWRITE));
2642
2643 /* If the return value is used, don't do the transformation. */
dcb7fae2 2644 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2645 return false;
2646
fef5a0d9
RB
2647 /* Get the length of the string passed to fputs. If the length
2648 can't be determined, punt. */
598f7235 2649 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2650 if (!len
2651 || TREE_CODE (len) != INTEGER_CST)
2652 return false;
2653
2654 switch (compare_tree_int (len, 1))
2655 {
2656 case -1: /* length is 0, delete the call entirely . */
2657 replace_call_with_value (gsi, integer_zero_node);
2658 return true;
2659
2660 case 0: /* length is 1, call fputc. */
2661 {
2662 const char *p = c_getstr (arg0);
2663 if (p != NULL)
2664 {
2665 if (!fn_fputc)
2666 return false;
2667
355fe088 2668 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2669 build_int_cst
2670 (integer_type_node, p[0]), arg1);
2671 replace_call_with_call_and_fold (gsi, repl);
2672 return true;
2673 }
2674 }
2675 /* FALLTHROUGH */
2676 case 1: /* length is greater than 1, call fwrite. */
2677 {
2678 /* If optimizing for size keep fputs. */
2679 if (optimize_function_for_size_p (cfun))
2680 return false;
2681 /* New argument list transforming fputs(string, stream) to
2682 fwrite(string, 1, len, stream). */
2683 if (!fn_fwrite)
2684 return false;
2685
355fe088 2686 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2687 size_one_node, len, arg1);
2688 replace_call_with_call_and_fold (gsi, repl);
2689 return true;
2690 }
2691 default:
2692 gcc_unreachable ();
2693 }
2694 return false;
2695}
2696
2697/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2698 DEST, SRC, LEN, and SIZE are the arguments to the call.
2699 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2700 code of the builtin. If MAXLEN is not NULL, it is maximum length
2701 passed as third argument. */
2702
2703static bool
2704gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2705 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2706 enum built_in_function fcode)
2707{
355fe088 2708 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2709 location_t loc = gimple_location (stmt);
2710 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2711 tree fn;
2712
2713 /* If SRC and DEST are the same (and not volatile), return DEST
2714 (resp. DEST+LEN for __mempcpy_chk). */
2715 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2716 {
2717 if (fcode != BUILT_IN_MEMPCPY_CHK)
2718 {
2719 replace_call_with_value (gsi, dest);
2720 return true;
2721 }
2722 else
2723 {
74e3c262
RB
2724 gimple_seq stmts = NULL;
2725 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2726 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2727 TREE_TYPE (dest), dest, len);
74e3c262 2728 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2729 replace_call_with_value (gsi, temp);
2730 return true;
2731 }
2732 }
2733
2734 if (! tree_fits_uhwi_p (size))
2735 return false;
2736
598f7235 2737 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
2738 if (! integer_all_onesp (size))
2739 {
2740 if (! tree_fits_uhwi_p (len))
2741 {
2742 /* If LEN is not constant, try MAXLEN too.
2743 For MAXLEN only allow optimizing into non-_ocs function
2744 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2745 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2746 {
2747 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2748 {
2749 /* (void) __mempcpy_chk () can be optimized into
2750 (void) __memcpy_chk (). */
2751 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2752 if (!fn)
2753 return false;
2754
355fe088 2755 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2756 replace_call_with_call_and_fold (gsi, repl);
2757 return true;
2758 }
2759 return false;
2760 }
2761 }
2762 else
2763 maxlen = len;
2764
2765 if (tree_int_cst_lt (size, maxlen))
2766 return false;
2767 }
2768
2769 fn = NULL_TREE;
2770 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2771 mem{cpy,pcpy,move,set} is available. */
2772 switch (fcode)
2773 {
2774 case BUILT_IN_MEMCPY_CHK:
2775 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2776 break;
2777 case BUILT_IN_MEMPCPY_CHK:
2778 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2779 break;
2780 case BUILT_IN_MEMMOVE_CHK:
2781 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2782 break;
2783 case BUILT_IN_MEMSET_CHK:
2784 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2785 break;
2786 default:
2787 break;
2788 }
2789
2790 if (!fn)
2791 return false;
2792
355fe088 2793 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2794 replace_call_with_call_and_fold (gsi, repl);
2795 return true;
2796}
2797
2798/* Fold a call to the __st[rp]cpy_chk builtin.
2799 DEST, SRC, and SIZE are the arguments to the call.
2800 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2801 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2802 strings passed as second argument. */
2803
2804static bool
2805gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2806 tree dest,
fef5a0d9 2807 tree src, tree size,
fef5a0d9
RB
2808 enum built_in_function fcode)
2809{
355fe088 2810 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2811 location_t loc = gimple_location (stmt);
2812 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2813 tree len, fn;
2814
2815 /* If SRC and DEST are the same (and not volatile), return DEST. */
2816 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2817 {
8cd95cec
MS
2818 /* Issue -Wrestrict unless the pointers are null (those do
2819 not point to objects and so do not indicate an overlap;
2820 such calls could be the result of sanitization and jump
2821 threading). */
2822 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2823 {
2824 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2825
e9b9fa4c
MS
2826 warning_at (loc, OPT_Wrestrict,
2827 "%qD source argument is the same as destination",
2828 func);
2829 }
cc8bea0a 2830
fef5a0d9
RB
2831 replace_call_with_value (gsi, dest);
2832 return true;
2833 }
2834
2835 if (! tree_fits_uhwi_p (size))
2836 return false;
2837
598f7235 2838 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
2839 if (! integer_all_onesp (size))
2840 {
2841 len = c_strlen (src, 1);
2842 if (! len || ! tree_fits_uhwi_p (len))
2843 {
2844 /* If LEN is not constant, try MAXLEN too.
2845 For MAXLEN only allow optimizing into non-_ocs function
2846 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2847 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2848 {
2849 if (fcode == BUILT_IN_STPCPY_CHK)
2850 {
2851 if (! ignore)
2852 return false;
2853
2854 /* If return value of __stpcpy_chk is ignored,
2855 optimize into __strcpy_chk. */
2856 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2857 if (!fn)
2858 return false;
2859
355fe088 2860 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2861 replace_call_with_call_and_fold (gsi, repl);
2862 return true;
2863 }
2864
2865 if (! len || TREE_SIDE_EFFECTS (len))
2866 return false;
2867
2868 /* If c_strlen returned something, but not a constant,
2869 transform __strcpy_chk into __memcpy_chk. */
2870 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2871 if (!fn)
2872 return false;
2873
74e3c262 2874 gimple_seq stmts = NULL;
770fe3a3 2875 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
2876 len = gimple_convert (&stmts, loc, size_type_node, len);
2877 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2878 build_int_cst (size_type_node, 1));
2879 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2880 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2881 replace_call_with_call_and_fold (gsi, repl);
2882 return true;
2883 }
e256dfce 2884 }
fef5a0d9
RB
2885 else
2886 maxlen = len;
2887
2888 if (! tree_int_cst_lt (maxlen, size))
2889 return false;
e256dfce
RG
2890 }
2891
fef5a0d9
RB
2892 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2893 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2894 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2895 if (!fn)
2896 return false;
2897
355fe088 2898 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2899 replace_call_with_call_and_fold (gsi, repl);
2900 return true;
2901}
2902
2903/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2904 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2905 length passed as third argument. IGNORE is true if return value can be
2906 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2907
2908static bool
2909gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2910 tree dest, tree src,
dcb7fae2 2911 tree len, tree size,
fef5a0d9
RB
2912 enum built_in_function fcode)
2913{
355fe088 2914 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2915 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2916 tree fn;
2917
2918 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2919 {
fef5a0d9
RB
2920 /* If return value of __stpncpy_chk is ignored,
2921 optimize into __strncpy_chk. */
2922 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2923 if (fn)
2924 {
355fe088 2925 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2926 replace_call_with_call_and_fold (gsi, repl);
2927 return true;
2928 }
cbdd87d4
RG
2929 }
2930
fef5a0d9
RB
2931 if (! tree_fits_uhwi_p (size))
2932 return false;
2933
598f7235 2934 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 2935 if (! integer_all_onesp (size))
cbdd87d4 2936 {
fef5a0d9 2937 if (! tree_fits_uhwi_p (len))
fe2ef088 2938 {
fef5a0d9
RB
2939 /* If LEN is not constant, try MAXLEN too.
2940 For MAXLEN only allow optimizing into non-_ocs function
2941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2942 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2943 return false;
8a1561bc 2944 }
fef5a0d9
RB
2945 else
2946 maxlen = len;
2947
2948 if (tree_int_cst_lt (size, maxlen))
2949 return false;
cbdd87d4
RG
2950 }
2951
fef5a0d9
RB
2952 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2953 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2954 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2955 if (!fn)
2956 return false;
2957
355fe088 2958 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2959 replace_call_with_call_and_fold (gsi, repl);
2960 return true;
cbdd87d4
RG
2961}
2962
2625bb5d
RB
2963/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2964 Return NULL_TREE if no simplification can be made. */
2965
2966static bool
2967gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2968{
2969 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2970 location_t loc = gimple_location (stmt);
2971 tree dest = gimple_call_arg (stmt, 0);
2972 tree src = gimple_call_arg (stmt, 1);
01b0acb7 2973 tree fn, lenp1;
2625bb5d
RB
2974
2975 /* If the result is unused, replace stpcpy with strcpy. */
2976 if (gimple_call_lhs (stmt) == NULL_TREE)
2977 {
2978 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2979 if (!fn)
2980 return false;
2981 gimple_call_set_fndecl (stmt, fn);
2982 fold_stmt (gsi);
2983 return true;
2984 }
2985
01b0acb7 2986 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 2987 c_strlen_data data = { };
7d583f42 2988 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
2989 if (!len
2990 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 2991 {
7d583f42
JL
2992 data.decl = unterminated_array (src);
2993 if (!data.decl)
01b0acb7
MS
2994 return false;
2995 }
2996
7d583f42 2997 if (data.decl)
01b0acb7
MS
2998 {
2999 /* Avoid folding calls with unterminated arrays. */
3000 if (!gimple_no_warning_p (stmt))
7d583f42 3001 warn_string_no_nul (loc, "stpcpy", src, data.decl);
01b0acb7
MS
3002 gimple_set_no_warning (stmt, true);
3003 return false;
3004 }
2625bb5d
RB
3005
3006 if (optimize_function_for_size_p (cfun)
3007 /* If length is zero it's small enough. */
3008 && !integer_zerop (len))
3009 return false;
3010
3011 /* If the source has a known length replace stpcpy with memcpy. */
3012 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3013 if (!fn)
3014 return false;
3015
3016 gimple_seq stmts = NULL;
3017 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3018 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3019 tem, build_int_cst (size_type_node, 1));
3020 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3021 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3022 gimple_move_vops (repl, stmt);
2625bb5d
RB
3023 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3024 /* Replace the result with dest + len. */
3025 stmts = NULL;
3026 tem = gimple_convert (&stmts, loc, sizetype, len);
3027 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3028 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3029 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3030 gsi_replace (gsi, ret, false);
2625bb5d
RB
3031 /* Finally fold the memcpy call. */
3032 gimple_stmt_iterator gsi2 = *gsi;
3033 gsi_prev (&gsi2);
3034 fold_stmt (&gsi2);
3035 return true;
3036}
3037
fef5a0d9
RB
3038/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3039 NULL_TREE if a normal call should be emitted rather than expanding
3040 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3041 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3042 passed as second argument. */
cbdd87d4
RG
3043
3044static bool
fef5a0d9 3045gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3046 enum built_in_function fcode)
cbdd87d4 3047{
538dd0b7 3048 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3049 tree dest, size, len, fn, fmt, flag;
3050 const char *fmt_str;
cbdd87d4 3051
fef5a0d9
RB
3052 /* Verify the required arguments in the original call. */
3053 if (gimple_call_num_args (stmt) < 5)
3054 return false;
cbdd87d4 3055
fef5a0d9
RB
3056 dest = gimple_call_arg (stmt, 0);
3057 len = gimple_call_arg (stmt, 1);
3058 flag = gimple_call_arg (stmt, 2);
3059 size = gimple_call_arg (stmt, 3);
3060 fmt = gimple_call_arg (stmt, 4);
3061
3062 if (! tree_fits_uhwi_p (size))
3063 return false;
3064
3065 if (! integer_all_onesp (size))
3066 {
598f7235 3067 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3068 if (! tree_fits_uhwi_p (len))
cbdd87d4 3069 {
fef5a0d9
RB
3070 /* If LEN is not constant, try MAXLEN too.
3071 For MAXLEN only allow optimizing into non-_ocs function
3072 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3073 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3074 return false;
3075 }
3076 else
fef5a0d9 3077 maxlen = len;
cbdd87d4 3078
fef5a0d9
RB
3079 if (tree_int_cst_lt (size, maxlen))
3080 return false;
3081 }
cbdd87d4 3082
fef5a0d9
RB
3083 if (!init_target_chars ())
3084 return false;
cbdd87d4 3085
fef5a0d9
RB
3086 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3087 or if format doesn't contain % chars or is "%s". */
3088 if (! integer_zerop (flag))
3089 {
3090 fmt_str = c_getstr (fmt);
3091 if (fmt_str == NULL)
3092 return false;
3093 if (strchr (fmt_str, target_percent) != NULL
3094 && strcmp (fmt_str, target_percent_s))
3095 return false;
cbdd87d4
RG
3096 }
3097
fef5a0d9
RB
3098 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3099 available. */
3100 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3101 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3102 if (!fn)
491e0b9b
RG
3103 return false;
3104
fef5a0d9
RB
3105 /* Replace the called function and the first 5 argument by 3 retaining
3106 trailing varargs. */
3107 gimple_call_set_fndecl (stmt, fn);
3108 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3109 gimple_call_set_arg (stmt, 0, dest);
3110 gimple_call_set_arg (stmt, 1, len);
3111 gimple_call_set_arg (stmt, 2, fmt);
3112 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3113 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3114 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3115 fold_stmt (gsi);
3116 return true;
3117}
cbdd87d4 3118
fef5a0d9
RB
3119/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3120 Return NULL_TREE if a normal call should be emitted rather than
3121 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3122 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3123
fef5a0d9
RB
3124static bool
3125gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3126 enum built_in_function fcode)
3127{
538dd0b7 3128 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3129 tree dest, size, len, fn, fmt, flag;
3130 const char *fmt_str;
3131 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3132
fef5a0d9
RB
3133 /* Verify the required arguments in the original call. */
3134 if (nargs < 4)
3135 return false;
3136 dest = gimple_call_arg (stmt, 0);
3137 flag = gimple_call_arg (stmt, 1);
3138 size = gimple_call_arg (stmt, 2);
3139 fmt = gimple_call_arg (stmt, 3);
3140
3141 if (! tree_fits_uhwi_p (size))
3142 return false;
3143
3144 len = NULL_TREE;
3145
3146 if (!init_target_chars ())
3147 return false;
3148
3149 /* Check whether the format is a literal string constant. */
3150 fmt_str = c_getstr (fmt);
3151 if (fmt_str != NULL)
3152 {
3153 /* If the format doesn't contain % args or %%, we know the size. */
3154 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3155 {
fef5a0d9
RB
3156 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3157 len = build_int_cstu (size_type_node, strlen (fmt_str));
3158 }
3159 /* If the format is "%s" and first ... argument is a string literal,
3160 we know the size too. */
3161 else if (fcode == BUILT_IN_SPRINTF_CHK
3162 && strcmp (fmt_str, target_percent_s) == 0)
3163 {
3164 tree arg;
cbdd87d4 3165
fef5a0d9
RB
3166 if (nargs == 5)
3167 {
3168 arg = gimple_call_arg (stmt, 4);
3169 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3170 {
3171 len = c_strlen (arg, 1);
3172 if (! len || ! tree_fits_uhwi_p (len))
3173 len = NULL_TREE;
3174 }
3175 }
3176 }
3177 }
cbdd87d4 3178
fef5a0d9
RB
3179 if (! integer_all_onesp (size))
3180 {
3181 if (! len || ! tree_int_cst_lt (len, size))
3182 return false;
3183 }
cbdd87d4 3184
fef5a0d9
RB
3185 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3186 or if format doesn't contain % chars or is "%s". */
3187 if (! integer_zerop (flag))
3188 {
3189 if (fmt_str == NULL)
3190 return false;
3191 if (strchr (fmt_str, target_percent) != NULL
3192 && strcmp (fmt_str, target_percent_s))
3193 return false;
3194 }
cbdd87d4 3195
fef5a0d9
RB
3196 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3197 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3198 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3199 if (!fn)
3200 return false;
3201
3202 /* Replace the called function and the first 4 argument by 2 retaining
3203 trailing varargs. */
3204 gimple_call_set_fndecl (stmt, fn);
3205 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3206 gimple_call_set_arg (stmt, 0, dest);
3207 gimple_call_set_arg (stmt, 1, fmt);
3208 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3209 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3210 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3211 fold_stmt (gsi);
3212 return true;
3213}
3214
35770bb2
RB
3215/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3216 ORIG may be null if this is a 2-argument call. We don't attempt to
3217 simplify calls with more than 3 arguments.
3218
a104bd88 3219 Return true if simplification was possible, otherwise false. */
35770bb2 3220
a104bd88 3221bool
dcb7fae2 3222gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3223{
355fe088 3224 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3225 tree dest = gimple_call_arg (stmt, 0);
3226 tree fmt = gimple_call_arg (stmt, 1);
3227 tree orig = NULL_TREE;
3228 const char *fmt_str = NULL;
3229
3230 /* Verify the required arguments in the original call. We deal with two
3231 types of sprintf() calls: 'sprintf (str, fmt)' and
3232 'sprintf (dest, "%s", orig)'. */
3233 if (gimple_call_num_args (stmt) > 3)
3234 return false;
3235
3236 if (gimple_call_num_args (stmt) == 3)
3237 orig = gimple_call_arg (stmt, 2);
3238
3239 /* Check whether the format is a literal string constant. */
3240 fmt_str = c_getstr (fmt);
3241 if (fmt_str == NULL)
3242 return false;
3243
3244 if (!init_target_chars ())
3245 return false;
3246
3247 /* If the format doesn't contain % args or %%, use strcpy. */
3248 if (strchr (fmt_str, target_percent) == NULL)
3249 {
3250 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3251
3252 if (!fn)
3253 return false;
3254
3255 /* Don't optimize sprintf (buf, "abc", ptr++). */
3256 if (orig)
3257 return false;
3258
3259 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3260 'format' is known to contain no % formats. */
3261 gimple_seq stmts = NULL;
355fe088 3262 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3263
3264 /* Propagate the NO_WARNING bit to avoid issuing the same
3265 warning more than once. */
3266 if (gimple_no_warning_p (stmt))
3267 gimple_set_no_warning (repl, true);
3268
35770bb2 3269 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3270 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3271 {
a73468e8
JJ
3272 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3273 strlen (fmt_str)));
35770bb2
RB
3274 gimple_seq_add_stmt_without_update (&stmts, repl);
3275 gsi_replace_with_seq_vops (gsi, stmts);
3276 /* gsi now points at the assignment to the lhs, get a
3277 stmt iterator to the memcpy call.
3278 ??? We can't use gsi_for_stmt as that doesn't work when the
3279 CFG isn't built yet. */
3280 gimple_stmt_iterator gsi2 = *gsi;
3281 gsi_prev (&gsi2);
3282 fold_stmt (&gsi2);
3283 }
3284 else
3285 {
3286 gsi_replace_with_seq_vops (gsi, stmts);
3287 fold_stmt (gsi);
3288 }
3289 return true;
3290 }
3291
3292 /* If the format is "%s", use strcpy if the result isn't used. */
3293 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3294 {
3295 tree fn;
3296 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3297
3298 if (!fn)
3299 return false;
3300
3301 /* Don't crash on sprintf (str1, "%s"). */
3302 if (!orig)
3303 return false;
3304
dcb7fae2
RB
3305 tree orig_len = NULL_TREE;
3306 if (gimple_call_lhs (stmt))
35770bb2 3307 {
598f7235 3308 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3309 if (!orig_len)
35770bb2
RB
3310 return false;
3311 }
3312
3313 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3314 gimple_seq stmts = NULL;
355fe088 3315 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3316
3317 /* Propagate the NO_WARNING bit to avoid issuing the same
3318 warning more than once. */
3319 if (gimple_no_warning_p (stmt))
3320 gimple_set_no_warning (repl, true);
3321
35770bb2 3322 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3323 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3324 {
a73468e8 3325 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3326 TREE_TYPE (orig_len)))
a73468e8
JJ
3327 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3328 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3329 gimple_seq_add_stmt_without_update (&stmts, repl);
3330 gsi_replace_with_seq_vops (gsi, stmts);
3331 /* gsi now points at the assignment to the lhs, get a
3332 stmt iterator to the memcpy call.
3333 ??? We can't use gsi_for_stmt as that doesn't work when the
3334 CFG isn't built yet. */
3335 gimple_stmt_iterator gsi2 = *gsi;
3336 gsi_prev (&gsi2);
3337 fold_stmt (&gsi2);
3338 }
3339 else
3340 {
3341 gsi_replace_with_seq_vops (gsi, stmts);
3342 fold_stmt (gsi);
3343 }
3344 return true;
3345 }
3346 return false;
3347}
3348
d7e78447
RB
3349/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3350 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3351 attempt to simplify calls with more than 4 arguments.
35770bb2 3352
a104bd88 3353 Return true if simplification was possible, otherwise false. */
d7e78447 3354
a104bd88 3355bool
dcb7fae2 3356gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3357{
538dd0b7 3358 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3359 tree dest = gimple_call_arg (stmt, 0);
3360 tree destsize = gimple_call_arg (stmt, 1);
3361 tree fmt = gimple_call_arg (stmt, 2);
3362 tree orig = NULL_TREE;
3363 const char *fmt_str = NULL;
3364
3365 if (gimple_call_num_args (stmt) > 4)
3366 return false;
3367
3368 if (gimple_call_num_args (stmt) == 4)
3369 orig = gimple_call_arg (stmt, 3);
3370
3371 if (!tree_fits_uhwi_p (destsize))
3372 return false;
3373 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3374
3375 /* Check whether the format is a literal string constant. */
3376 fmt_str = c_getstr (fmt);
3377 if (fmt_str == NULL)
3378 return false;
3379
3380 if (!init_target_chars ())
3381 return false;
3382
3383 /* If the format doesn't contain % args or %%, use strcpy. */
3384 if (strchr (fmt_str, target_percent) == NULL)
3385 {
3386 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3387 if (!fn)
3388 return false;
3389
3390 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3391 if (orig)
3392 return false;
3393
3394 /* We could expand this as
3395 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3396 or to
3397 memcpy (str, fmt_with_nul_at_cstm1, cst);
3398 but in the former case that might increase code size
3399 and in the latter case grow .rodata section too much.
3400 So punt for now. */
3401 size_t len = strlen (fmt_str);
3402 if (len >= destlen)
3403 return false;
3404
3405 gimple_seq stmts = NULL;
355fe088 3406 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3407 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3408 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3409 {
a73468e8
JJ
3410 repl = gimple_build_assign (lhs,
3411 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3412 gimple_seq_add_stmt_without_update (&stmts, repl);
3413 gsi_replace_with_seq_vops (gsi, stmts);
3414 /* gsi now points at the assignment to the lhs, get a
3415 stmt iterator to the memcpy call.
3416 ??? We can't use gsi_for_stmt as that doesn't work when the
3417 CFG isn't built yet. */
3418 gimple_stmt_iterator gsi2 = *gsi;
3419 gsi_prev (&gsi2);
3420 fold_stmt (&gsi2);
3421 }
3422 else
3423 {
3424 gsi_replace_with_seq_vops (gsi, stmts);
3425 fold_stmt (gsi);
3426 }
3427 return true;
3428 }
3429
3430 /* If the format is "%s", use strcpy if the result isn't used. */
3431 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3432 {
3433 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3434 if (!fn)
3435 return false;
3436
3437 /* Don't crash on snprintf (str1, cst, "%s"). */
3438 if (!orig)
3439 return false;
3440
598f7235 3441 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3442 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3443 return false;
d7e78447
RB
3444
3445 /* We could expand this as
3446 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3447 or to
3448 memcpy (str1, str2_with_nul_at_cstm1, cst);
3449 but in the former case that might increase code size
3450 and in the latter case grow .rodata section too much.
3451 So punt for now. */
3452 if (compare_tree_int (orig_len, destlen) >= 0)
3453 return false;
3454
3455 /* Convert snprintf (str1, cst, "%s", str2) into
3456 strcpy (str1, str2) if strlen (str2) < cst. */
3457 gimple_seq stmts = NULL;
355fe088 3458 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3459 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3460 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3461 {
a73468e8 3462 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3463 TREE_TYPE (orig_len)))
a73468e8
JJ
3464 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3465 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3466 gimple_seq_add_stmt_without_update (&stmts, repl);
3467 gsi_replace_with_seq_vops (gsi, stmts);
3468 /* gsi now points at the assignment to the lhs, get a
3469 stmt iterator to the memcpy call.
3470 ??? We can't use gsi_for_stmt as that doesn't work when the
3471 CFG isn't built yet. */
3472 gimple_stmt_iterator gsi2 = *gsi;
3473 gsi_prev (&gsi2);
3474 fold_stmt (&gsi2);
3475 }
3476 else
3477 {
3478 gsi_replace_with_seq_vops (gsi, stmts);
3479 fold_stmt (gsi);
3480 }
3481 return true;
3482 }
3483 return false;
3484}
35770bb2 3485
edd7ae68
RB
3486/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3487 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3488 more than 3 arguments, and ARG may be null in the 2-argument case.
3489
3490 Return NULL_TREE if no simplification was possible, otherwise return the
3491 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3492 code of the function to be simplified. */
3493
3494static bool
3495gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3496 tree fp, tree fmt, tree arg,
3497 enum built_in_function fcode)
3498{
3499 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3500 tree fn_fputc, fn_fputs;
3501 const char *fmt_str = NULL;
3502
3503 /* If the return value is used, don't do the transformation. */
3504 if (gimple_call_lhs (stmt) != NULL_TREE)
3505 return false;
3506
3507 /* Check whether the format is a literal string constant. */
3508 fmt_str = c_getstr (fmt);
3509 if (fmt_str == NULL)
3510 return false;
3511
3512 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3513 {
3514 /* If we're using an unlocked function, assume the other
3515 unlocked functions exist explicitly. */
3516 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3517 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3518 }
3519 else
3520 {
3521 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3522 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3523 }
3524
3525 if (!init_target_chars ())
3526 return false;
3527
3528 /* If the format doesn't contain % args or %%, use strcpy. */
3529 if (strchr (fmt_str, target_percent) == NULL)
3530 {
3531 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3532 && arg)
3533 return false;
3534
3535 /* If the format specifier was "", fprintf does nothing. */
3536 if (fmt_str[0] == '\0')
3537 {
3538 replace_call_with_value (gsi, NULL_TREE);
3539 return true;
3540 }
3541
3542 /* When "string" doesn't contain %, replace all cases of
3543 fprintf (fp, string) with fputs (string, fp). The fputs
3544 builtin will take care of special cases like length == 1. */
3545 if (fn_fputs)
3546 {
3547 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3548 replace_call_with_call_and_fold (gsi, repl);
3549 return true;
3550 }
3551 }
3552
3553 /* The other optimizations can be done only on the non-va_list variants. */
3554 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3555 return false;
3556
3557 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3558 else if (strcmp (fmt_str, target_percent_s) == 0)
3559 {
3560 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3561 return false;
3562 if (fn_fputs)
3563 {
3564 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3565 replace_call_with_call_and_fold (gsi, repl);
3566 return true;
3567 }
3568 }
3569
3570 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3571 else if (strcmp (fmt_str, target_percent_c) == 0)
3572 {
3573 if (!arg
3574 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3575 return false;
3576 if (fn_fputc)
3577 {
3578 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3579 replace_call_with_call_and_fold (gsi, repl);
3580 return true;
3581 }
3582 }
3583
3584 return false;
3585}
3586
ad03a744
RB
3587/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3588 FMT and ARG are the arguments to the call; we don't fold cases with
3589 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3590
3591 Return NULL_TREE if no simplification was possible, otherwise return the
3592 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3593 code of the function to be simplified. */
3594
3595static bool
3596gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3597 tree arg, enum built_in_function fcode)
3598{
3599 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3600 tree fn_putchar, fn_puts, newarg;
3601 const char *fmt_str = NULL;
3602
3603 /* If the return value is used, don't do the transformation. */
3604 if (gimple_call_lhs (stmt) != NULL_TREE)
3605 return false;
3606
3607 /* Check whether the format is a literal string constant. */
3608 fmt_str = c_getstr (fmt);
3609 if (fmt_str == NULL)
3610 return false;
3611
3612 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3613 {
3614 /* If we're using an unlocked function, assume the other
3615 unlocked functions exist explicitly. */
3616 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3617 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3618 }
3619 else
3620 {
3621 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3622 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3623 }
3624
3625 if (!init_target_chars ())
3626 return false;
3627
3628 if (strcmp (fmt_str, target_percent_s) == 0
3629 || strchr (fmt_str, target_percent) == NULL)
3630 {
3631 const char *str;
3632
3633 if (strcmp (fmt_str, target_percent_s) == 0)
3634 {
3635 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3636 return false;
3637
3638 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3639 return false;
3640
3641 str = c_getstr (arg);
3642 if (str == NULL)
3643 return false;
3644 }
3645 else
3646 {
3647 /* The format specifier doesn't contain any '%' characters. */
3648 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3649 && arg)
3650 return false;
3651 str = fmt_str;
3652 }
3653
3654 /* If the string was "", printf does nothing. */
3655 if (str[0] == '\0')
3656 {
3657 replace_call_with_value (gsi, NULL_TREE);
3658 return true;
3659 }
3660
3661 /* If the string has length of 1, call putchar. */
3662 if (str[1] == '\0')
3663 {
3664 /* Given printf("c"), (where c is any one character,)
3665 convert "c"[0] to an int and pass that to the replacement
3666 function. */
3667 newarg = build_int_cst (integer_type_node, str[0]);
3668 if (fn_putchar)
3669 {
3670 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3671 replace_call_with_call_and_fold (gsi, repl);
3672 return true;
3673 }
3674 }
3675 else
3676 {
3677 /* If the string was "string\n", call puts("string"). */
3678 size_t len = strlen (str);
3679 if ((unsigned char)str[len - 1] == target_newline
3680 && (size_t) (int) len == len
3681 && (int) len > 0)
3682 {
3683 char *newstr;
ad03a744
RB
3684
3685 /* Create a NUL-terminated string that's one char shorter
3686 than the original, stripping off the trailing '\n'. */
a353fec4 3687 newstr = xstrdup (str);
ad03a744 3688 newstr[len - 1] = '\0';
a353fec4
BE
3689 newarg = build_string_literal (len, newstr);
3690 free (newstr);
ad03a744
RB
3691 if (fn_puts)
3692 {
3693 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3694 replace_call_with_call_and_fold (gsi, repl);
3695 return true;
3696 }
3697 }
3698 else
3699 /* We'd like to arrange to call fputs(string,stdout) here,
3700 but we need stdout and don't have a way to get it yet. */
3701 return false;
3702 }
3703 }
3704
3705 /* The other optimizations can be done only on the non-va_list variants. */
3706 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3707 return false;
3708
3709 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3710 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3711 {
3712 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3713 return false;
3714 if (fn_puts)
3715 {
3716 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3717 replace_call_with_call_and_fold (gsi, repl);
3718 return true;
3719 }
3720 }
3721
3722 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3723 else if (strcmp (fmt_str, target_percent_c) == 0)
3724 {
3725 if (!arg || ! useless_type_conversion_p (integer_type_node,
3726 TREE_TYPE (arg)))
3727 return false;
3728 if (fn_putchar)
3729 {
3730 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3731 replace_call_with_call_and_fold (gsi, repl);
3732 return true;
3733 }
3734 }
3735
3736 return false;
3737}
3738
edd7ae68 3739
fef5a0d9
RB
3740
3741/* Fold a call to __builtin_strlen with known length LEN. */
3742
3743static bool
dcb7fae2 3744gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3745{
355fe088 3746 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3747 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3748
3749 wide_int minlen;
3750 wide_int maxlen;
3751
5d6655eb 3752 c_strlen_data lendata = { };
03c4a945 3753 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
3754 && !lendata.decl
3755 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3756 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
3757 {
3758 /* The range of lengths refers to either a single constant
3759 string or to the longest and shortest constant string
3760 referenced by the argument of the strlen() call, or to
3761 the strings that can possibly be stored in the arrays
3762 the argument refers to. */
5d6655eb
MS
3763 minlen = wi::to_wide (lendata.minlen);
3764 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
3765 }
3766 else
3767 {
3768 unsigned prec = TYPE_PRECISION (sizetype);
3769
3770 minlen = wi::shwi (0, prec);
3771 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3772 }
3773
3774 if (minlen == maxlen)
3775 {
5d6655eb
MS
3776 /* Fold the strlen call to a constant. */
3777 tree type = TREE_TYPE (lendata.minlen);
3778 tree len = force_gimple_operand_gsi (gsi,
3779 wide_int_to_tree (type, minlen),
3780 true, NULL, true, GSI_SAME_STMT);
3781 replace_call_with_value (gsi, len);
c42d0aa0
MS
3782 return true;
3783 }
3784
d4bf6975 3785 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 3786 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 3787 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
3788
3789 return false;
cbdd87d4
RG
3790}
3791
48126138
NS
3792/* Fold a call to __builtin_acc_on_device. */
3793
3794static bool
3795gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3796{
3797 /* Defer folding until we know which compiler we're in. */
3798 if (symtab->state != EXPANSION)
3799 return false;
3800
3801 unsigned val_host = GOMP_DEVICE_HOST;
3802 unsigned val_dev = GOMP_DEVICE_NONE;
3803
3804#ifdef ACCEL_COMPILER
3805 val_host = GOMP_DEVICE_NOT_HOST;
3806 val_dev = ACCEL_COMPILER_acc_device;
3807#endif
3808
3809 location_t loc = gimple_location (gsi_stmt (*gsi));
3810
3811 tree host_eq = make_ssa_name (boolean_type_node);
3812 gimple *host_ass = gimple_build_assign
3813 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3814 gimple_set_location (host_ass, loc);
3815 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3816
3817 tree dev_eq = make_ssa_name (boolean_type_node);
3818 gimple *dev_ass = gimple_build_assign
3819 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3820 gimple_set_location (dev_ass, loc);
3821 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3822
3823 tree result = make_ssa_name (boolean_type_node);
3824 gimple *result_ass = gimple_build_assign
3825 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3826 gimple_set_location (result_ass, loc);
3827 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3828
3829 replace_call_with_value (gsi, result);
3830
3831 return true;
3832}
cbdd87d4 3833
fe75f732
PK
3834/* Fold realloc (0, n) -> malloc (n). */
3835
3836static bool
3837gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3838{
3839 gimple *stmt = gsi_stmt (*gsi);
3840 tree arg = gimple_call_arg (stmt, 0);
3841 tree size = gimple_call_arg (stmt, 1);
3842
3843 if (operand_equal_p (arg, null_pointer_node, 0))
3844 {
3845 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3846 if (fn_malloc)
3847 {
3848 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3849 replace_call_with_call_and_fold (gsi, repl);
3850 return true;
3851 }
3852 }
3853 return false;
3854}
3855
dcb7fae2
RB
3856/* Fold the non-target builtin at *GSI and return whether any simplification
3857 was made. */
cbdd87d4 3858
fef5a0d9 3859static bool
dcb7fae2 3860gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3861{
538dd0b7 3862 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3863 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3864
dcb7fae2
RB
3865 /* Give up for always_inline inline builtins until they are
3866 inlined. */
3867 if (avoid_folding_inline_builtin (callee))
3868 return false;
cbdd87d4 3869
edd7ae68
RB
3870 unsigned n = gimple_call_num_args (stmt);
3871 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3872 switch (fcode)
cbdd87d4 3873 {
b3d8d88e
MS
3874 case BUILT_IN_BCMP:
3875 return gimple_fold_builtin_bcmp (gsi);
3876 case BUILT_IN_BCOPY:
3877 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3878 case BUILT_IN_BZERO:
b3d8d88e
MS
3879 return gimple_fold_builtin_bzero (gsi);
3880
dcb7fae2
RB
3881 case BUILT_IN_MEMSET:
3882 return gimple_fold_builtin_memset (gsi,
3883 gimple_call_arg (stmt, 1),
3884 gimple_call_arg (stmt, 2));
dcb7fae2 3885 case BUILT_IN_MEMCPY:
dcb7fae2 3886 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
3887 case BUILT_IN_MEMMOVE:
3888 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 3889 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
3890 case BUILT_IN_SPRINTF_CHK:
3891 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3892 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3893 case BUILT_IN_STRCAT_CHK:
3894 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3895 case BUILT_IN_STRNCAT_CHK:
3896 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3897 case BUILT_IN_STRLEN:
dcb7fae2 3898 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3899 case BUILT_IN_STRCPY:
dcb7fae2 3900 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3901 gimple_call_arg (stmt, 0),
dcb7fae2 3902 gimple_call_arg (stmt, 1));
cbdd87d4 3903 case BUILT_IN_STRNCPY:
dcb7fae2 3904 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3905 gimple_call_arg (stmt, 0),
3906 gimple_call_arg (stmt, 1),
dcb7fae2 3907 gimple_call_arg (stmt, 2));
9a7eefec 3908 case BUILT_IN_STRCAT:
dcb7fae2
RB
3909 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3910 gimple_call_arg (stmt, 1));
ad03a744
RB
3911 case BUILT_IN_STRNCAT:
3912 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3913 case BUILT_IN_INDEX:
912d9ec3 3914 case BUILT_IN_STRCHR:
71dea1dd
WD
3915 return gimple_fold_builtin_strchr (gsi, false);
3916 case BUILT_IN_RINDEX:
3917 case BUILT_IN_STRRCHR:
3918 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3919 case BUILT_IN_STRSTR:
3920 return gimple_fold_builtin_strstr (gsi);
a918bfbf 3921 case BUILT_IN_STRCMP:
8b0b334a 3922 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
3923 case BUILT_IN_STRCASECMP:
3924 case BUILT_IN_STRNCMP:
8b0b334a 3925 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
3926 case BUILT_IN_STRNCASECMP:
3927 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3928 case BUILT_IN_MEMCHR:
3929 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3930 case BUILT_IN_FPUTS:
dcb7fae2
RB
3931 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3932 gimple_call_arg (stmt, 1), false);
cbdd87d4 3933 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3934 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3935 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3936 case BUILT_IN_MEMCPY_CHK:
3937 case BUILT_IN_MEMPCPY_CHK:
3938 case BUILT_IN_MEMMOVE_CHK:
3939 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3940 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3941 gimple_call_arg (stmt, 0),
3942 gimple_call_arg (stmt, 1),
3943 gimple_call_arg (stmt, 2),
3944 gimple_call_arg (stmt, 3),
edd7ae68 3945 fcode);
2625bb5d
RB
3946 case BUILT_IN_STPCPY:
3947 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3948 case BUILT_IN_STRCPY_CHK:
3949 case BUILT_IN_STPCPY_CHK:
dcb7fae2 3950 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
3951 gimple_call_arg (stmt, 0),
3952 gimple_call_arg (stmt, 1),
3953 gimple_call_arg (stmt, 2),
edd7ae68 3954 fcode);
cbdd87d4 3955 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 3956 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
3957 return gimple_fold_builtin_stxncpy_chk (gsi,
3958 gimple_call_arg (stmt, 0),
3959 gimple_call_arg (stmt, 1),
3960 gimple_call_arg (stmt, 2),
3961 gimple_call_arg (stmt, 3),
edd7ae68 3962 fcode);
cbdd87d4
RG
3963 case BUILT_IN_SNPRINTF_CHK:
3964 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 3965 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 3966
edd7ae68
RB
3967 case BUILT_IN_FPRINTF:
3968 case BUILT_IN_FPRINTF_UNLOCKED:
3969 case BUILT_IN_VFPRINTF:
3970 if (n == 2 || n == 3)
3971 return gimple_fold_builtin_fprintf (gsi,
3972 gimple_call_arg (stmt, 0),
3973 gimple_call_arg (stmt, 1),
3974 n == 3
3975 ? gimple_call_arg (stmt, 2)
3976 : NULL_TREE,
3977 fcode);
3978 break;
3979 case BUILT_IN_FPRINTF_CHK:
3980 case BUILT_IN_VFPRINTF_CHK:
3981 if (n == 3 || n == 4)
3982 return gimple_fold_builtin_fprintf (gsi,
3983 gimple_call_arg (stmt, 0),
3984 gimple_call_arg (stmt, 2),
3985 n == 4
3986 ? gimple_call_arg (stmt, 3)
3987 : NULL_TREE,
3988 fcode);
3989 break;
ad03a744
RB
3990 case BUILT_IN_PRINTF:
3991 case BUILT_IN_PRINTF_UNLOCKED:
3992 case BUILT_IN_VPRINTF:
3993 if (n == 1 || n == 2)
3994 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3995 n == 2
3996 ? gimple_call_arg (stmt, 1)
3997 : NULL_TREE, fcode);
3998 break;
3999 case BUILT_IN_PRINTF_CHK:
4000 case BUILT_IN_VPRINTF_CHK:
4001 if (n == 2 || n == 3)
4002 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4003 n == 3
4004 ? gimple_call_arg (stmt, 2)
4005 : NULL_TREE, fcode);
242a37f1 4006 break;
48126138
NS
4007 case BUILT_IN_ACC_ON_DEVICE:
4008 return gimple_fold_builtin_acc_on_device (gsi,
4009 gimple_call_arg (stmt, 0));
fe75f732
PK
4010 case BUILT_IN_REALLOC:
4011 return gimple_fold_builtin_realloc (gsi);
4012
fef5a0d9
RB
4013 default:;
4014 }
4015
4016 /* Try the generic builtin folder. */
4017 bool ignore = (gimple_call_lhs (stmt) == NULL);
4018 tree result = fold_call_stmt (stmt, ignore);
4019 if (result)
4020 {
4021 if (ignore)
4022 STRIP_NOPS (result);
4023 else
4024 result = fold_convert (gimple_call_return_type (stmt), result);
4025 if (!update_call_from_tree (gsi, result))
4026 gimplify_and_update_call_from_tree (gsi, result);
4027 return true;
4028 }
4029
4030 return false;
4031}
4032
451e8dae
NS
4033/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4034 function calls to constants, where possible. */
4035
4036static tree
4037fold_internal_goacc_dim (const gimple *call)
4038{
629b3d75
MJ
4039 int axis = oacc_get_ifn_dim_arg (call);
4040 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 4041 tree result = NULL_TREE;
67d2229e 4042 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 4043
67d2229e 4044 switch (gimple_call_internal_fn (call))
451e8dae 4045 {
67d2229e
TV
4046 case IFN_GOACC_DIM_POS:
4047 /* If the size is 1, we know the answer. */
4048 if (size == 1)
4049 result = build_int_cst (type, 0);
4050 break;
4051 case IFN_GOACC_DIM_SIZE:
4052 /* If the size is not dynamic, we know the answer. */
4053 if (size)
4054 result = build_int_cst (type, size);
4055 break;
4056 default:
4057 break;
451e8dae
NS
4058 }
4059
4060 return result;
4061}
4062
849a76a5
JJ
4063/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4064 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4065 &var where var is only addressable because of such calls. */
4066
4067bool
4068optimize_atomic_compare_exchange_p (gimple *stmt)
4069{
4070 if (gimple_call_num_args (stmt) != 6
4071 || !flag_inline_atomics
4072 || !optimize
45b2222a 4073 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
4074 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4075 || !gimple_vdef (stmt)
4076 || !gimple_vuse (stmt))
4077 return false;
4078
4079 tree fndecl = gimple_call_fndecl (stmt);
4080 switch (DECL_FUNCTION_CODE (fndecl))
4081 {
4082 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4083 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4084 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4085 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4086 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4087 break;
4088 default:
4089 return false;
4090 }
4091
4092 tree expected = gimple_call_arg (stmt, 1);
4093 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
4094 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4095 return false;
4096
4097 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4098 if (!is_gimple_reg_type (etype)
849a76a5 4099 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
4100 || TREE_THIS_VOLATILE (etype)
4101 || VECTOR_TYPE_P (etype)
4102 || TREE_CODE (etype) == COMPLEX_TYPE
4103 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4104 might not preserve all the bits. See PR71716. */
4105 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
4106 || maybe_ne (TYPE_PRECISION (etype),
4107 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
4108 return false;
4109
4110 tree weak = gimple_call_arg (stmt, 3);
4111 if (!integer_zerop (weak) && !integer_onep (weak))
4112 return false;
4113
4114 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4115 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4116 machine_mode mode = TYPE_MODE (itype);
4117
4118 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4119 == CODE_FOR_nothing
4120 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4121 return false;
4122
cf098191 4123 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
4124 return false;
4125
4126 return true;
4127}
4128
4129/* Fold
4130 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4131 into
4132 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4133 i = IMAGPART_EXPR <t>;
4134 r = (_Bool) i;
4135 e = REALPART_EXPR <t>; */
4136
4137void
4138fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4139{
4140 gimple *stmt = gsi_stmt (*gsi);
4141 tree fndecl = gimple_call_fndecl (stmt);
4142 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4143 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4144 tree ctype = build_complex_type (itype);
4145 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
4146 bool throws = false;
4147 edge e = NULL;
849a76a5
JJ
4148 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4149 expected);
4150 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4151 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4152 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4153 {
4154 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4155 build1 (VIEW_CONVERT_EXPR, itype,
4156 gimple_assign_lhs (g)));
4157 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4158 }
4159 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4160 + int_size_in_bytes (itype);
4161 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4162 gimple_call_arg (stmt, 0),
4163 gimple_assign_lhs (g),
4164 gimple_call_arg (stmt, 2),
4165 build_int_cst (integer_type_node, flag),
4166 gimple_call_arg (stmt, 4),
4167 gimple_call_arg (stmt, 5));
4168 tree lhs = make_ssa_name (ctype);
4169 gimple_call_set_lhs (g, lhs);
779724a5 4170 gimple_move_vops (g, stmt);
cc195d46 4171 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 4172 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
4173 {
4174 throws = true;
4175 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4176 }
4177 gimple_call_set_nothrow (as_a <gcall *> (g),
4178 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4179 gimple_call_set_lhs (stmt, NULL_TREE);
4180 gsi_replace (gsi, g, true);
4181 if (oldlhs)
849a76a5 4182 {
849a76a5
JJ
4183 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4184 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
4185 if (throws)
4186 {
4187 gsi_insert_on_edge_immediate (e, g);
4188 *gsi = gsi_for_stmt (g);
4189 }
4190 else
4191 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4192 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4193 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 4194 }
849a76a5
JJ
4195 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4196 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
4197 if (throws && oldlhs == NULL_TREE)
4198 {
4199 gsi_insert_on_edge_immediate (e, g);
4200 *gsi = gsi_for_stmt (g);
4201 }
4202 else
4203 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
4204 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4205 {
4206 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4207 VIEW_CONVERT_EXPR,
4208 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4209 gimple_assign_lhs (g)));
4210 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4211 }
4212 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4213 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4214 *gsi = gsiret;
4215}
4216
1304953e
JJ
4217/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4218 doesn't fit into TYPE. The test for overflow should be regardless of
4219 -fwrapv, and even for unsigned types. */
4220
4221bool
4222arith_overflowed_p (enum tree_code code, const_tree type,
4223 const_tree arg0, const_tree arg1)
4224{
1304953e
JJ
4225 widest2_int warg0 = widest2_int_cst (arg0);
4226 widest2_int warg1 = widest2_int_cst (arg1);
4227 widest2_int wres;
4228 switch (code)
4229 {
4230 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4231 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4232 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4233 default: gcc_unreachable ();
4234 }
4235 signop sign = TYPE_SIGN (type);
4236 if (sign == UNSIGNED && wi::neg_p (wres))
4237 return true;
4238 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4239}
4240
868363d4
RS
4241/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4242 for the memory it references, otherwise return null. VECTYPE is the
4243 type of the memory vector. */
4244
4245static tree
4246gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4247{
4248 tree ptr = gimple_call_arg (call, 0);
4249 tree alias_align = gimple_call_arg (call, 1);
4250 tree mask = gimple_call_arg (call, 2);
4251 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4252 return NULL_TREE;
4253
4254 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4255 if (TYPE_ALIGN (vectype) != align)
4256 vectype = build_aligned_type (vectype, align);
4257 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4258 return fold_build2 (MEM_REF, vectype, ptr, offset);
4259}
4260
4261/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4262
4263static bool
4264gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4265{
4266 tree lhs = gimple_call_lhs (call);
4267 if (!lhs)
4268 return false;
4269
4270 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4271 {
4272 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4273 gimple_set_location (new_stmt, gimple_location (call));
4274 gimple_move_vops (new_stmt, call);
4275 gsi_replace (gsi, new_stmt, false);
4276 return true;
4277 }
4278 return false;
4279}
4280
4281/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4282
4283static bool
4284gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4285{
4286 tree rhs = gimple_call_arg (call, 3);
4287 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4288 {
4289 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4290 gimple_set_location (new_stmt, gimple_location (call));
4291 gimple_move_vops (new_stmt, call);
4292 gsi_replace (gsi, new_stmt, false);
4293 return true;
4294 }
4295 return false;
4296}
4297
cbdd87d4
RG
4298/* Attempt to fold a call statement referenced by the statement iterator GSI.
4299 The statement may be replaced by another statement, e.g., if the call
4300 simplifies to a constant value. Return true if any changes were made.
4301 It is assumed that the operands have been previously folded. */
4302
e021c122 4303static bool
ceeffab0 4304gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4305{
538dd0b7 4306 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4307 tree callee;
e021c122
RG
4308 bool changed = false;
4309 unsigned i;
cbdd87d4 4310
e021c122
RG
4311 /* Fold *& in call arguments. */
4312 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4313 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4314 {
4315 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4316 if (tmp)
4317 {
4318 gimple_call_set_arg (stmt, i, tmp);
4319 changed = true;
4320 }
4321 }
3b45a007
RG
4322
4323 /* Check for virtual calls that became direct calls. */
4324 callee = gimple_call_fn (stmt);
25583c4f 4325 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4326 {
49c471e3
MJ
4327 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4328 {
450ad0cd
JH
4329 if (dump_file && virtual_method_call_p (callee)
4330 && !possible_polymorphic_call_target_p
6f8091fc
JH
4331 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4332 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4333 {
4334 fprintf (dump_file,
a70e9985 4335 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4336 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4337 fprintf (dump_file, " to ");
4338 print_generic_expr (dump_file, callee, TDF_SLIM);
4339 fprintf (dump_file, "\n");
4340 }
4341
49c471e3 4342 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4343 changed = true;
4344 }
a70e9985 4345 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4346 {
61dd6a2e
JH
4347 bool final;
4348 vec <cgraph_node *>targets
058d0a90 4349 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4350 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4351 {
a70e9985 4352 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4353 if (dump_enabled_p ())
4354 {
4f5b9c80 4355 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4356 "folding virtual function call to %s\n",
4357 targets.length () == 1
4358 ? targets[0]->name ()
4359 : "__builtin_unreachable");
4360 }
61dd6a2e 4361 if (targets.length () == 1)
cf3e5a89 4362 {
18954840
JJ
4363 tree fndecl = targets[0]->decl;
4364 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4365 changed = true;
18954840
JJ
4366 /* If changing the call to __cxa_pure_virtual
4367 or similar noreturn function, adjust gimple_call_fntype
4368 too. */
865f7046 4369 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4370 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4371 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4372 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4373 == void_type_node))
4374 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4375 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4376 if (lhs
4377 && gimple_call_noreturn_p (stmt)
18954840 4378 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4379 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4380 {
4381 if (TREE_CODE (lhs) == SSA_NAME)
4382 {
b731b390 4383 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4384 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4385 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4386 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4387 }
4388 gimple_call_set_lhs (stmt, NULL_TREE);
4389 }
0b986c6a 4390 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4391 }
a70e9985 4392 else
cf3e5a89
JJ
4393 {
4394 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4395 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4396 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4397 /* If the call had a SSA name as lhs morph that into
4398 an uninitialized value. */
a70e9985
JJ
4399 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4400 {
b731b390 4401 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4402 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4403 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4404 set_ssa_default_def (cfun, var, lhs);
42e52a51 4405 }
779724a5 4406 gimple_move_vops (new_stmt, stmt);
2da6996c 4407 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4408 return true;
4409 }
e021c122 4410 }
49c471e3 4411 }
e021c122 4412 }
49c471e3 4413
f2d3d07e
RH
4414 /* Check for indirect calls that became direct calls, and then
4415 no longer require a static chain. */
4416 if (gimple_call_chain (stmt))
4417 {
4418 tree fn = gimple_call_fndecl (stmt);
4419 if (fn && !DECL_STATIC_CHAIN (fn))
4420 {
4421 gimple_call_set_chain (stmt, NULL);
4422 changed = true;
4423 }
4424 else
4425 {
4426 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4427 if (tmp)
4428 {
4429 gimple_call_set_chain (stmt, tmp);
4430 changed = true;
4431 }
4432 }
4433 }
4434
e021c122
RG
4435 if (inplace)
4436 return changed;
4437
4438 /* Check for builtins that CCP can handle using information not
4439 available in the generic fold routines. */
fef5a0d9
RB
4440 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4441 {
4442 if (gimple_fold_builtin (gsi))
4443 changed = true;
4444 }
4445 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4446 {
ea679d55 4447 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4448 }
368b454d 4449 else if (gimple_call_internal_p (stmt))
ed9c79e1 4450 {
368b454d
JJ
4451 enum tree_code subcode = ERROR_MARK;
4452 tree result = NULL_TREE;
1304953e
JJ
4453 bool cplx_result = false;
4454 tree overflow = NULL_TREE;
368b454d
JJ
4455 switch (gimple_call_internal_fn (stmt))
4456 {
4457 case IFN_BUILTIN_EXPECT:
4458 result = fold_builtin_expect (gimple_location (stmt),
4459 gimple_call_arg (stmt, 0),
4460 gimple_call_arg (stmt, 1),
1e9168b2
ML
4461 gimple_call_arg (stmt, 2),
4462 NULL_TREE);
368b454d 4463 break;
0e82f089 4464 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4465 {
4466 tree offset = gimple_call_arg (stmt, 1);
4467 tree objsize = gimple_call_arg (stmt, 2);
4468 if (integer_all_onesp (objsize)
4469 || (TREE_CODE (offset) == INTEGER_CST
4470 && TREE_CODE (objsize) == INTEGER_CST
4471 && tree_int_cst_le (offset, objsize)))
4472 {
4473 replace_call_with_value (gsi, NULL_TREE);
4474 return true;
4475 }
4476 }
4477 break;
4478 case IFN_UBSAN_PTR:
4479 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4480 {
ca1150f0 4481 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4482 return true;
4483 }
4484 break;
ca1150f0
JJ
4485 case IFN_UBSAN_BOUNDS:
4486 {
4487 tree index = gimple_call_arg (stmt, 1);
4488 tree bound = gimple_call_arg (stmt, 2);
4489 if (TREE_CODE (index) == INTEGER_CST
4490 && TREE_CODE (bound) == INTEGER_CST)
4491 {
4492 index = fold_convert (TREE_TYPE (bound), index);
4493 if (TREE_CODE (index) == INTEGER_CST
4494 && tree_int_cst_le (index, bound))
4495 {
4496 replace_call_with_value (gsi, NULL_TREE);
4497 return true;
4498 }
4499 }
4500 }
4501 break;
451e8dae
NS
4502 case IFN_GOACC_DIM_SIZE:
4503 case IFN_GOACC_DIM_POS:
4504 result = fold_internal_goacc_dim (stmt);
4505 break;
368b454d
JJ
4506 case IFN_UBSAN_CHECK_ADD:
4507 subcode = PLUS_EXPR;
4508 break;
4509 case IFN_UBSAN_CHECK_SUB:
4510 subcode = MINUS_EXPR;
4511 break;
4512 case IFN_UBSAN_CHECK_MUL:
4513 subcode = MULT_EXPR;
4514 break;
1304953e
JJ
4515 case IFN_ADD_OVERFLOW:
4516 subcode = PLUS_EXPR;
4517 cplx_result = true;
4518 break;
4519 case IFN_SUB_OVERFLOW:
4520 subcode = MINUS_EXPR;
4521 cplx_result = true;
4522 break;
4523 case IFN_MUL_OVERFLOW:
4524 subcode = MULT_EXPR;
4525 cplx_result = true;
4526 break;
868363d4
RS
4527 case IFN_MASK_LOAD:
4528 changed |= gimple_fold_mask_load (gsi, stmt);
4529 break;
4530 case IFN_MASK_STORE:
4531 changed |= gimple_fold_mask_store (gsi, stmt);
4532 break;
368b454d
JJ
4533 default:
4534 break;
4535 }
4536 if (subcode != ERROR_MARK)
4537 {
4538 tree arg0 = gimple_call_arg (stmt, 0);
4539 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4540 tree type = TREE_TYPE (arg0);
4541 if (cplx_result)
4542 {
4543 tree lhs = gimple_call_lhs (stmt);
4544 if (lhs == NULL_TREE)
4545 type = NULL_TREE;
4546 else
4547 type = TREE_TYPE (TREE_TYPE (lhs));
4548 }
4549 if (type == NULL_TREE)
4550 ;
368b454d 4551 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4552 else if (integer_zerop (arg1))
4553 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4554 /* x = 0 + y; x = 0 * y; */
4555 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4556 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4557 /* x = y - y; */
4558 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4559 result = integer_zero_node;
368b454d 4560 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4561 else if (subcode == MULT_EXPR && integer_onep (arg1))
4562 result = arg0;
4563 else if (subcode == MULT_EXPR && integer_onep (arg0))
4564 result = arg1;
4565 else if (TREE_CODE (arg0) == INTEGER_CST
4566 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4567 {
1304953e
JJ
4568 if (cplx_result)
4569 result = int_const_binop (subcode, fold_convert (type, arg0),
4570 fold_convert (type, arg1));
4571 else
4572 result = int_const_binop (subcode, arg0, arg1);
4573 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4574 {
4575 if (cplx_result)
4576 overflow = build_one_cst (type);
4577 else
4578 result = NULL_TREE;
4579 }
4580 }
4581 if (result)
4582 {
4583 if (result == integer_zero_node)
4584 result = build_zero_cst (type);
4585 else if (cplx_result && TREE_TYPE (result) != type)
4586 {
4587 if (TREE_CODE (result) == INTEGER_CST)
4588 {
4589 if (arith_overflowed_p (PLUS_EXPR, type, result,
4590 integer_zero_node))
4591 overflow = build_one_cst (type);
4592 }
4593 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4594 && TYPE_UNSIGNED (type))
4595 || (TYPE_PRECISION (type)
4596 < (TYPE_PRECISION (TREE_TYPE (result))
4597 + (TYPE_UNSIGNED (TREE_TYPE (result))
4598 && !TYPE_UNSIGNED (type)))))
4599 result = NULL_TREE;
4600 if (result)
4601 result = fold_convert (type, result);
4602 }
368b454d
JJ
4603 }
4604 }
1304953e 4605
ed9c79e1
JJ
4606 if (result)
4607 {
1304953e
JJ
4608 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4609 result = drop_tree_overflow (result);
4610 if (cplx_result)
4611 {
4612 if (overflow == NULL_TREE)
4613 overflow = build_zero_cst (TREE_TYPE (result));
4614 tree ctype = build_complex_type (TREE_TYPE (result));
4615 if (TREE_CODE (result) == INTEGER_CST
4616 && TREE_CODE (overflow) == INTEGER_CST)
4617 result = build_complex (ctype, result, overflow);
4618 else
4619 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4620 ctype, result, overflow);
4621 }
ed9c79e1
JJ
4622 if (!update_call_from_tree (gsi, result))
4623 gimplify_and_update_call_from_tree (gsi, result);
4624 changed = true;
4625 }
4626 }
3b45a007 4627
e021c122 4628 return changed;
cbdd87d4
RG
4629}
4630
e0ee10ed 4631
89a79e96
RB
4632/* Return true whether NAME has a use on STMT. */
4633
4634static bool
355fe088 4635has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4636{
4637 imm_use_iterator iter;
4638 use_operand_p use_p;
4639 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4640 if (USE_STMT (use_p) == stmt)
4641 return true;
4642 return false;
4643}
4644
e0ee10ed
RB
4645/* Worker for fold_stmt_1 dispatch to pattern based folding with
4646 gimple_simplify.
4647
4648 Replaces *GSI with the simplification result in RCODE and OPS
4649 and the associated statements in *SEQ. Does the replacement
4650 according to INPLACE and returns true if the operation succeeded. */
4651
4652static bool
4653replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4654 gimple_match_op *res_op,
e0ee10ed
RB
4655 gimple_seq *seq, bool inplace)
4656{
355fe088 4657 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4658 tree *ops = res_op->ops;
4659 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4660
4661 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4662 newly created statements. See also maybe_push_res_to_seq.
4663 As an exception allow such uses if there was a use of the
4664 same SSA name on the old stmt. */
5d75ad95
RS
4665 for (unsigned int i = 0; i < num_ops; ++i)
4666 if (TREE_CODE (ops[i]) == SSA_NAME
4667 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4668 && !has_use_on_stmt (ops[i], stmt))
4669 return false;
4670
4671 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4672 for (unsigned int i = 0; i < 2; ++i)
4673 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4674 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4675 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4676 return false;
e0ee10ed 4677
fec40d06
RS
4678 /* Don't insert new statements when INPLACE is true, even if we could
4679 reuse STMT for the final statement. */
4680 if (inplace && !gimple_seq_empty_p (*seq))
4681 return false;
4682
538dd0b7 4683 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4684 {
5d75ad95
RS
4685 gcc_assert (res_op->code.is_tree_code ());
4686 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4687 /* GIMPLE_CONDs condition may not throw. */
4688 && (!flag_exceptions
4689 || !cfun->can_throw_non_call_exceptions
5d75ad95 4690 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4691 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4692 false, NULL_TREE)))
5d75ad95
RS
4693 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4694 else if (res_op->code == SSA_NAME)
538dd0b7 4695 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4696 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4697 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4698 {
4699 if (integer_zerop (ops[0]))
538dd0b7 4700 gimple_cond_make_false (cond_stmt);
e0ee10ed 4701 else
538dd0b7 4702 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4703 }
4704 else if (!inplace)
4705 {
5d75ad95 4706 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4707 if (!res)
4708 return false;
538dd0b7 4709 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4710 build_zero_cst (TREE_TYPE (res)));
4711 }
4712 else
4713 return false;
4714 if (dump_file && (dump_flags & TDF_DETAILS))
4715 {
4716 fprintf (dump_file, "gimple_simplified to ");
4717 if (!gimple_seq_empty_p (*seq))
4718 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4719 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4720 0, TDF_SLIM);
4721 }
4722 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4723 return true;
4724 }
4725 else if (is_gimple_assign (stmt)
5d75ad95 4726 && res_op->code.is_tree_code ())
e0ee10ed
RB
4727 {
4728 if (!inplace
5d75ad95 4729 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4730 {
5d75ad95
RS
4731 maybe_build_generic_op (res_op);
4732 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4733 res_op->op_or_null (0),
4734 res_op->op_or_null (1),
4735 res_op->op_or_null (2));
e0ee10ed
RB
4736 if (dump_file && (dump_flags & TDF_DETAILS))
4737 {
4738 fprintf (dump_file, "gimple_simplified to ");
4739 if (!gimple_seq_empty_p (*seq))
4740 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4741 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4742 0, TDF_SLIM);
4743 }
4744 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4745 return true;
4746 }
4747 }
5d75ad95
RS
4748 else if (res_op->code.is_fn_code ()
4749 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4750 {
5d75ad95
RS
4751 gcc_assert (num_ops == gimple_call_num_args (stmt));
4752 for (unsigned int i = 0; i < num_ops; ++i)
4753 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4754 if (dump_file && (dump_flags & TDF_DETAILS))
4755 {
4756 fprintf (dump_file, "gimple_simplified to ");
4757 if (!gimple_seq_empty_p (*seq))
4758 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4759 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4760 }
4761 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4762 return true;
4763 }
e0ee10ed
RB
4764 else if (!inplace)
4765 {
4766 if (gimple_has_lhs (stmt))
4767 {
4768 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4769 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4770 return false;
e0ee10ed
RB
4771 if (dump_file && (dump_flags & TDF_DETAILS))
4772 {
4773 fprintf (dump_file, "gimple_simplified to ");
4774 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4775 }
4776 gsi_replace_with_seq_vops (gsi, *seq);
4777 return true;
4778 }
4779 else
4780 gcc_unreachable ();
4781 }
4782
4783 return false;
4784}
4785
040292e7
RB
4786/* Canonicalize MEM_REFs invariant address operand after propagation. */
4787
4788static bool
4789maybe_canonicalize_mem_ref_addr (tree *t)
4790{
4791 bool res = false;
4792
4793 if (TREE_CODE (*t) == ADDR_EXPR)
4794 t = &TREE_OPERAND (*t, 0);
4795
f17a223d
RB
4796 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4797 generic vector extension. The actual vector referenced is
4798 view-converted to an array type for this purpose. If the index
4799 is constant the canonical representation in the middle-end is a
4800 BIT_FIELD_REF so re-write the former to the latter here. */
4801 if (TREE_CODE (*t) == ARRAY_REF
4802 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4803 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4804 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4805 {
4806 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4807 if (VECTOR_TYPE_P (vtype))
4808 {
4809 tree low = array_ref_low_bound (*t);
4810 if (TREE_CODE (low) == INTEGER_CST)
4811 {
4812 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4813 {
4814 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4815 wi::to_widest (low));
4816 idx = wi::mul (idx, wi::to_widest
4817 (TYPE_SIZE (TREE_TYPE (*t))));
4818 widest_int ext
4819 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4820 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4821 {
4822 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4823 TREE_TYPE (*t),
4824 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4825 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4826 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4827 res = true;
4828 }
4829 }
4830 }
4831 }
4832 }
4833
040292e7
RB
4834 while (handled_component_p (*t))
4835 t = &TREE_OPERAND (*t, 0);
4836
4837 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4838 of invariant addresses into a SSA name MEM_REF address. */
4839 if (TREE_CODE (*t) == MEM_REF
4840 || TREE_CODE (*t) == TARGET_MEM_REF)
4841 {
4842 tree addr = TREE_OPERAND (*t, 0);
4843 if (TREE_CODE (addr) == ADDR_EXPR
4844 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4845 || handled_component_p (TREE_OPERAND (addr, 0))))
4846 {
4847 tree base;
a90c8804 4848 poly_int64 coffset;
040292e7
RB
4849 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4850 &coffset);
4851 if (!base)
4852 gcc_unreachable ();
4853
4854 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4855 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4856 TREE_OPERAND (*t, 1),
4857 size_int (coffset));
4858 res = true;
4859 }
4860 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4861 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4862 }
4863
4864 /* Canonicalize back MEM_REFs to plain reference trees if the object
4865 accessed is a decl that has the same access semantics as the MEM_REF. */
4866 if (TREE_CODE (*t) == MEM_REF
4867 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4868 && integer_zerop (TREE_OPERAND (*t, 1))
4869 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4870 {
4871 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4872 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4873 if (/* Same volatile qualification. */
4874 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4875 /* Same TBAA behavior with -fstrict-aliasing. */
4876 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4877 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4878 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4879 /* Same alignment. */
4880 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4881 /* We have to look out here to not drop a required conversion
4882 from the rhs to the lhs if *t appears on the lhs or vice-versa
4883 if it appears on the rhs. Thus require strict type
4884 compatibility. */
4885 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4886 {
4887 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4888 res = true;
4889 }
4890 }
4891
4892 /* Canonicalize TARGET_MEM_REF in particular with respect to
4893 the indexes becoming constant. */
4894 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4895 {
4896 tree tem = maybe_fold_tmr (*t);
4897 if (tem)
4898 {
4899 *t = tem;
4900 res = true;
4901 }
4902 }
4903
4904 return res;
4905}
4906
cbdd87d4
RG
4907/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4908 distinguishes both cases. */
4909
4910static bool
e0ee10ed 4911fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4912{
4913 bool changed = false;
355fe088 4914 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4915 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4916 unsigned i;
a8b85ce9 4917 fold_defer_overflow_warnings ();
cbdd87d4 4918
040292e7
RB
4919 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4920 after propagation.
4921 ??? This shouldn't be done in generic folding but in the
4922 propagation helpers which also know whether an address was
89a79e96
RB
4923 propagated.
4924 Also canonicalize operand order. */
040292e7
RB
4925 switch (gimple_code (stmt))
4926 {
4927 case GIMPLE_ASSIGN:
4928 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4929 {
4930 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4931 if ((REFERENCE_CLASS_P (*rhs)
4932 || TREE_CODE (*rhs) == ADDR_EXPR)
4933 && maybe_canonicalize_mem_ref_addr (rhs))
4934 changed = true;
4935 tree *lhs = gimple_assign_lhs_ptr (stmt);
4936 if (REFERENCE_CLASS_P (*lhs)
4937 && maybe_canonicalize_mem_ref_addr (lhs))
4938 changed = true;
4939 }
89a79e96
RB
4940 else
4941 {
4942 /* Canonicalize operand order. */
4943 enum tree_code code = gimple_assign_rhs_code (stmt);
4944 if (TREE_CODE_CLASS (code) == tcc_comparison
4945 || commutative_tree_code (code)
4946 || commutative_ternary_tree_code (code))
4947 {
4948 tree rhs1 = gimple_assign_rhs1 (stmt);
4949 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 4950 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
4951 {
4952 gimple_assign_set_rhs1 (stmt, rhs2);
4953 gimple_assign_set_rhs2 (stmt, rhs1);
4954 if (TREE_CODE_CLASS (code) == tcc_comparison)
4955 gimple_assign_set_rhs_code (stmt,
4956 swap_tree_comparison (code));
4957 changed = true;
4958 }
4959 }
4960 }
040292e7
RB
4961 break;
4962 case GIMPLE_CALL:
4963 {
4964 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4965 {
4966 tree *arg = gimple_call_arg_ptr (stmt, i);
4967 if (REFERENCE_CLASS_P (*arg)
4968 && maybe_canonicalize_mem_ref_addr (arg))
4969 changed = true;
4970 }
4971 tree *lhs = gimple_call_lhs_ptr (stmt);
4972 if (*lhs
4973 && REFERENCE_CLASS_P (*lhs)
4974 && maybe_canonicalize_mem_ref_addr (lhs))
4975 changed = true;
4976 break;
4977 }
4978 case GIMPLE_ASM:
4979 {
538dd0b7
DM
4980 gasm *asm_stmt = as_a <gasm *> (stmt);
4981 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 4982 {
538dd0b7 4983 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
4984 tree op = TREE_VALUE (link);
4985 if (REFERENCE_CLASS_P (op)
4986 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4987 changed = true;
4988 }
538dd0b7 4989 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 4990 {
538dd0b7 4991 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
4992 tree op = TREE_VALUE (link);
4993 if ((REFERENCE_CLASS_P (op)
4994 || TREE_CODE (op) == ADDR_EXPR)
4995 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4996 changed = true;
4997 }
4998 }
4999 break;
5000 case GIMPLE_DEBUG:
5001 if (gimple_debug_bind_p (stmt))
5002 {
5003 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5004 if (*val
5005 && (REFERENCE_CLASS_P (*val)
5006 || TREE_CODE (*val) == ADDR_EXPR)
5007 && maybe_canonicalize_mem_ref_addr (val))
5008 changed = true;
5009 }
5010 break;
89a79e96
RB
5011 case GIMPLE_COND:
5012 {
5013 /* Canonicalize operand order. */
5014 tree lhs = gimple_cond_lhs (stmt);
5015 tree rhs = gimple_cond_rhs (stmt);
14e72812 5016 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
5017 {
5018 gcond *gc = as_a <gcond *> (stmt);
5019 gimple_cond_set_lhs (gc, rhs);
5020 gimple_cond_set_rhs (gc, lhs);
5021 gimple_cond_set_code (gc,
5022 swap_tree_comparison (gimple_cond_code (gc)));
5023 changed = true;
5024 }
5025 }
040292e7
RB
5026 default:;
5027 }
5028
e0ee10ed
RB
5029 /* Dispatch to pattern-based folding. */
5030 if (!inplace
5031 || is_gimple_assign (stmt)
5032 || gimple_code (stmt) == GIMPLE_COND)
5033 {
5034 gimple_seq seq = NULL;
5d75ad95
RS
5035 gimple_match_op res_op;
5036 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 5037 valueize, valueize))
e0ee10ed 5038 {
5d75ad95 5039 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
5040 changed = true;
5041 else
5042 gimple_seq_discard (seq);
5043 }
5044 }
5045
5046 stmt = gsi_stmt (*gsi);
5047
cbdd87d4
RG
5048 /* Fold the main computation performed by the statement. */
5049 switch (gimple_code (stmt))
5050 {
5051 case GIMPLE_ASSIGN:
5052 {
819ec64c
RB
5053 /* Try to canonicalize for boolean-typed X the comparisons
5054 X == 0, X == 1, X != 0, and X != 1. */
5055 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5056 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 5057 {
819ec64c
RB
5058 tree lhs = gimple_assign_lhs (stmt);
5059 tree op1 = gimple_assign_rhs1 (stmt);
5060 tree op2 = gimple_assign_rhs2 (stmt);
5061 tree type = TREE_TYPE (op1);
5062
5063 /* Check whether the comparison operands are of the same boolean
5064 type as the result type is.
5065 Check that second operand is an integer-constant with value
5066 one or zero. */
5067 if (TREE_CODE (op2) == INTEGER_CST
5068 && (integer_zerop (op2) || integer_onep (op2))
5069 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5070 {
5071 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5072 bool is_logical_not = false;
5073
5074 /* X == 0 and X != 1 is a logical-not.of X
5075 X == 1 and X != 0 is X */
5076 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5077 || (cmp_code == NE_EXPR && integer_onep (op2)))
5078 is_logical_not = true;
5079
5080 if (is_logical_not == false)
5081 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5082 /* Only for one-bit precision typed X the transformation
5083 !X -> ~X is valied. */
5084 else if (TYPE_PRECISION (type) == 1)
5085 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5086 /* Otherwise we use !X -> X ^ 1. */
5087 else
5088 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5089 build_int_cst (type, 1));
5090 changed = true;
5091 break;
5092 }
5fbcc0ed 5093 }
819ec64c
RB
5094
5095 unsigned old_num_ops = gimple_num_ops (stmt);
5096 tree lhs = gimple_assign_lhs (stmt);
5097 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
5098 if (new_rhs
5099 && !useless_type_conversion_p (TREE_TYPE (lhs),
5100 TREE_TYPE (new_rhs)))
5101 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5102 if (new_rhs
5103 && (!inplace
5104 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5105 {
5106 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5107 changed = true;
5108 }
5109 break;
5110 }
5111
cbdd87d4 5112 case GIMPLE_CALL:
ceeffab0 5113 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
5114 break;
5115
5116 case GIMPLE_ASM:
5117 /* Fold *& in asm operands. */
38384150 5118 {
538dd0b7 5119 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
5120 size_t noutputs;
5121 const char **oconstraints;
5122 const char *constraint;
5123 bool allows_mem, allows_reg;
5124
538dd0b7 5125 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
5126 oconstraints = XALLOCAVEC (const char *, noutputs);
5127
538dd0b7 5128 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 5129 {
538dd0b7 5130 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
5131 tree op = TREE_VALUE (link);
5132 oconstraints[i]
5133 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5134 if (REFERENCE_CLASS_P (op)
5135 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5136 {
5137 TREE_VALUE (link) = op;
5138 changed = true;
5139 }
5140 }
538dd0b7 5141 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 5142 {
538dd0b7 5143 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
5144 tree op = TREE_VALUE (link);
5145 constraint
5146 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5147 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5148 oconstraints, &allows_mem, &allows_reg);
5149 if (REFERENCE_CLASS_P (op)
5150 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5151 != NULL_TREE)
5152 {
5153 TREE_VALUE (link) = op;
5154 changed = true;
5155 }
5156 }
5157 }
cbdd87d4
RG
5158 break;
5159
bd422c4a
RG
5160 case GIMPLE_DEBUG:
5161 if (gimple_debug_bind_p (stmt))
5162 {
5163 tree val = gimple_debug_bind_get_value (stmt);
5164 if (val
5165 && REFERENCE_CLASS_P (val))
5166 {
5167 tree tem = maybe_fold_reference (val, false);
5168 if (tem)
5169 {
5170 gimple_debug_bind_set_value (stmt, tem);
5171 changed = true;
5172 }
5173 }
3e888a5e
RG
5174 else if (val
5175 && TREE_CODE (val) == ADDR_EXPR)
5176 {
5177 tree ref = TREE_OPERAND (val, 0);
5178 tree tem = maybe_fold_reference (ref, false);
5179 if (tem)
5180 {
5181 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5182 gimple_debug_bind_set_value (stmt, tem);
5183 changed = true;
5184 }
5185 }
bd422c4a
RG
5186 }
5187 break;
5188
cfe3d653
PK
5189 case GIMPLE_RETURN:
5190 {
5191 greturn *ret_stmt = as_a<greturn *> (stmt);
5192 tree ret = gimple_return_retval(ret_stmt);
5193
5194 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5195 {
5196 tree val = valueize (ret);
1af928db
RB
5197 if (val && val != ret
5198 && may_propagate_copy (ret, val))
cfe3d653
PK
5199 {
5200 gimple_return_set_retval (ret_stmt, val);
5201 changed = true;
5202 }
5203 }
5204 }
5205 break;
5206
cbdd87d4
RG
5207 default:;
5208 }
5209
5210 stmt = gsi_stmt (*gsi);
5211
37376165
RB
5212 /* Fold *& on the lhs. */
5213 if (gimple_has_lhs (stmt))
cbdd87d4
RG
5214 {
5215 tree lhs = gimple_get_lhs (stmt);
5216 if (lhs && REFERENCE_CLASS_P (lhs))
5217 {
5218 tree new_lhs = maybe_fold_reference (lhs, true);
5219 if (new_lhs)
5220 {
5221 gimple_set_lhs (stmt, new_lhs);
5222 changed = true;
5223 }
5224 }
5225 }
5226
a8b85ce9 5227 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
5228 return changed;
5229}
5230
e0ee10ed
RB
5231/* Valueziation callback that ends up not following SSA edges. */
5232
5233tree
5234no_follow_ssa_edges (tree)
5235{
5236 return NULL_TREE;
5237}
5238
45cc9f96
RB
5239/* Valueization callback that ends up following single-use SSA edges only. */
5240
5241tree
5242follow_single_use_edges (tree val)
5243{
5244 if (TREE_CODE (val) == SSA_NAME
5245 && !has_single_use (val))
5246 return NULL_TREE;
5247 return val;
5248}
5249
c566cc9f
RS
5250/* Valueization callback that follows all SSA edges. */
5251
5252tree
5253follow_all_ssa_edges (tree val)
5254{
5255 return val;
5256}
5257
cbdd87d4
RG
5258/* Fold the statement pointed to by GSI. In some cases, this function may
5259 replace the whole statement with a new one. Returns true iff folding
5260 makes any changes.
5261 The statement pointed to by GSI should be in valid gimple form but may
5262 be in unfolded state as resulting from for example constant propagation
5263 which can produce *&x = 0. */
5264
5265bool
5266fold_stmt (gimple_stmt_iterator *gsi)
5267{
e0ee10ed
RB
5268 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5269}
5270
5271bool
5272fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5273{
5274 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
5275}
5276
59401b92 5277/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
5278 *&x created by constant propagation are handled. The statement cannot
5279 be replaced with a new one. Return true if the statement was
5280 changed, false otherwise.
59401b92 5281 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
5282 be in unfolded state as resulting from for example constant propagation
5283 which can produce *&x = 0. */
5284
5285bool
59401b92 5286fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 5287{
355fe088 5288 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 5289 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 5290 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5291 return changed;
5292}
5293
e89065a1
SL
5294/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5295 if EXPR is null or we don't know how.
5296 If non-null, the result always has boolean type. */
5297
5298static tree
5299canonicalize_bool (tree expr, bool invert)
5300{
5301 if (!expr)
5302 return NULL_TREE;
5303 else if (invert)
5304 {
5305 if (integer_nonzerop (expr))
5306 return boolean_false_node;
5307 else if (integer_zerop (expr))
5308 return boolean_true_node;
5309 else if (TREE_CODE (expr) == SSA_NAME)
5310 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5311 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5312 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5313 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5314 boolean_type_node,
5315 TREE_OPERAND (expr, 0),
5316 TREE_OPERAND (expr, 1));
5317 else
5318 return NULL_TREE;
5319 }
5320 else
5321 {
5322 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5323 return expr;
5324 if (integer_nonzerop (expr))
5325 return boolean_true_node;
5326 else if (integer_zerop (expr))
5327 return boolean_false_node;
5328 else if (TREE_CODE (expr) == SSA_NAME)
5329 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5330 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5331 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5332 return fold_build2 (TREE_CODE (expr),
5333 boolean_type_node,
5334 TREE_OPERAND (expr, 0),
5335 TREE_OPERAND (expr, 1));
5336 else
5337 return NULL_TREE;
5338 }
5339}
5340
5341/* Check to see if a boolean expression EXPR is logically equivalent to the
5342 comparison (OP1 CODE OP2). Check for various identities involving
5343 SSA_NAMEs. */
5344
5345static bool
5346same_bool_comparison_p (const_tree expr, enum tree_code code,
5347 const_tree op1, const_tree op2)
5348{
355fe088 5349 gimple *s;
e89065a1
SL
5350
5351 /* The obvious case. */
5352 if (TREE_CODE (expr) == code
5353 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5354 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5355 return true;
5356
5357 /* Check for comparing (name, name != 0) and the case where expr
5358 is an SSA_NAME with a definition matching the comparison. */
5359 if (TREE_CODE (expr) == SSA_NAME
5360 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5361 {
5362 if (operand_equal_p (expr, op1, 0))
5363 return ((code == NE_EXPR && integer_zerop (op2))
5364 || (code == EQ_EXPR && integer_nonzerop (op2)));
5365 s = SSA_NAME_DEF_STMT (expr);
5366 if (is_gimple_assign (s)
5367 && gimple_assign_rhs_code (s) == code
5368 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5369 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5370 return true;
5371 }
5372
5373 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5374 of name is a comparison, recurse. */
5375 if (TREE_CODE (op1) == SSA_NAME
5376 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5377 {
5378 s = SSA_NAME_DEF_STMT (op1);
5379 if (is_gimple_assign (s)
5380 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5381 {
5382 enum tree_code c = gimple_assign_rhs_code (s);
5383 if ((c == NE_EXPR && integer_zerop (op2))
5384 || (c == EQ_EXPR && integer_nonzerop (op2)))
5385 return same_bool_comparison_p (expr, c,
5386 gimple_assign_rhs1 (s),
5387 gimple_assign_rhs2 (s));
5388 if ((c == EQ_EXPR && integer_zerop (op2))
5389 || (c == NE_EXPR && integer_nonzerop (op2)))
5390 return same_bool_comparison_p (expr,
5391 invert_tree_comparison (c, false),
5392 gimple_assign_rhs1 (s),
5393 gimple_assign_rhs2 (s));
5394 }
5395 }
5396 return false;
5397}
5398
5399/* Check to see if two boolean expressions OP1 and OP2 are logically
5400 equivalent. */
5401
5402static bool
5403same_bool_result_p (const_tree op1, const_tree op2)
5404{
5405 /* Simple cases first. */
5406 if (operand_equal_p (op1, op2, 0))
5407 return true;
5408
5409 /* Check the cases where at least one of the operands is a comparison.
5410 These are a bit smarter than operand_equal_p in that they apply some
5411 identifies on SSA_NAMEs. */
98209db3 5412 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5413 && same_bool_comparison_p (op1, TREE_CODE (op2),
5414 TREE_OPERAND (op2, 0),
5415 TREE_OPERAND (op2, 1)))
5416 return true;
98209db3 5417 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5418 && same_bool_comparison_p (op2, TREE_CODE (op1),
5419 TREE_OPERAND (op1, 0),
5420 TREE_OPERAND (op1, 1)))
5421 return true;
5422
5423 /* Default case. */
5424 return false;
5425}
5426
5427/* Forward declarations for some mutually recursive functions. */
5428
5429static tree
5f487a34 5430and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5431 enum tree_code code2, tree op2a, tree op2b);
5432static tree
5f487a34 5433and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5434 enum tree_code code2, tree op2a, tree op2b);
5435static tree
5f487a34 5436and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5437 enum tree_code code2, tree op2a, tree op2b);
5438static tree
5f487a34 5439or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5440 enum tree_code code2, tree op2a, tree op2b);
5441static tree
5f487a34 5442or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
5443 enum tree_code code2, tree op2a, tree op2b);
5444static tree
5f487a34 5445or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
5446 enum tree_code code2, tree op2a, tree op2b);
5447
5448/* Helper function for and_comparisons_1: try to simplify the AND of the
5449 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5450 If INVERT is true, invert the value of the VAR before doing the AND.
5451 Return NULL_EXPR if we can't simplify this to a single expression. */
5452
5453static tree
5f487a34 5454and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5455 enum tree_code code2, tree op2a, tree op2b)
5456{
5457 tree t;
355fe088 5458 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5459
5460 /* We can only deal with variables whose definitions are assignments. */
5461 if (!is_gimple_assign (stmt))
5462 return NULL_TREE;
5463
5464 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5465 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5466 Then we only have to consider the simpler non-inverted cases. */
5467 if (invert)
5f487a34 5468 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
5469 invert_tree_comparison (code2, false),
5470 op2a, op2b);
5471 else
5f487a34 5472 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
5473 return canonicalize_bool (t, invert);
5474}
5475
5476/* Try to simplify the AND of the ssa variable defined by the assignment
5477 STMT with the comparison specified by (OP2A CODE2 OP2B).
5478 Return NULL_EXPR if we can't simplify this to a single expression. */
5479
5480static tree
5f487a34 5481and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5482 enum tree_code code2, tree op2a, tree op2b)
5483{
5484 tree var = gimple_assign_lhs (stmt);
5485 tree true_test_var = NULL_TREE;
5486 tree false_test_var = NULL_TREE;
5487 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5488
5489 /* Check for identities like (var AND (var == 0)) => false. */
5490 if (TREE_CODE (op2a) == SSA_NAME
5491 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5492 {
5493 if ((code2 == NE_EXPR && integer_zerop (op2b))
5494 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5495 {
5496 true_test_var = op2a;
5497 if (var == true_test_var)
5498 return var;
5499 }
5500 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5501 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5502 {
5503 false_test_var = op2a;
5504 if (var == false_test_var)
5505 return boolean_false_node;
5506 }
5507 }
5508
5509 /* If the definition is a comparison, recurse on it. */
5510 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5511 {
5f487a34 5512 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
5513 gimple_assign_rhs1 (stmt),
5514 gimple_assign_rhs2 (stmt),
5515 code2,
5516 op2a,
5517 op2b);
5518 if (t)
5519 return t;
5520 }
5521
5522 /* If the definition is an AND or OR expression, we may be able to
5523 simplify by reassociating. */
eb9820c0
KT
5524 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5525 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5526 {
5527 tree inner1 = gimple_assign_rhs1 (stmt);
5528 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5529 gimple *s;
e89065a1
SL
5530 tree t;
5531 tree partial = NULL_TREE;
eb9820c0 5532 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5533
5534 /* Check for boolean identities that don't require recursive examination
5535 of inner1/inner2:
5536 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5537 inner1 AND (inner1 OR inner2) => inner1
5538 !inner1 AND (inner1 AND inner2) => false
5539 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5540 Likewise for similar cases involving inner2. */
5541 if (inner1 == true_test_var)
5542 return (is_and ? var : inner1);
5543 else if (inner2 == true_test_var)
5544 return (is_and ? var : inner2);
5545 else if (inner1 == false_test_var)
5546 return (is_and
5547 ? boolean_false_node
5f487a34
LJH
5548 : and_var_with_comparison (type, inner2, false, code2, op2a,
5549 op2b));
e89065a1
SL
5550 else if (inner2 == false_test_var)
5551 return (is_and
5552 ? boolean_false_node
5f487a34
LJH
5553 : and_var_with_comparison (type, inner1, false, code2, op2a,
5554 op2b));
e89065a1
SL
5555
5556 /* Next, redistribute/reassociate the AND across the inner tests.
5557 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5558 if (TREE_CODE (inner1) == SSA_NAME
5559 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5560 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 5561 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
5562 gimple_assign_rhs1 (s),
5563 gimple_assign_rhs2 (s),
5564 code2, op2a, op2b)))
5565 {
5566 /* Handle the AND case, where we are reassociating:
5567 (inner1 AND inner2) AND (op2a code2 op2b)
5568 => (t AND inner2)
5569 If the partial result t is a constant, we win. Otherwise
5570 continue on to try reassociating with the other inner test. */
5571 if (is_and)
5572 {
5573 if (integer_onep (t))
5574 return inner2;
5575 else if (integer_zerop (t))
5576 return boolean_false_node;
5577 }
5578
5579 /* Handle the OR case, where we are redistributing:
5580 (inner1 OR inner2) AND (op2a code2 op2b)
5581 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5582 else if (integer_onep (t))
5583 return boolean_true_node;
5584
5585 /* Save partial result for later. */
5586 partial = t;
e89065a1
SL
5587 }
5588
5589 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5590 if (TREE_CODE (inner2) == SSA_NAME
5591 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5592 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 5593 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
5594 gimple_assign_rhs1 (s),
5595 gimple_assign_rhs2 (s),
5596 code2, op2a, op2b)))
5597 {
5598 /* Handle the AND case, where we are reassociating:
5599 (inner1 AND inner2) AND (op2a code2 op2b)
5600 => (inner1 AND t) */
5601 if (is_and)
5602 {
5603 if (integer_onep (t))
5604 return inner1;
5605 else if (integer_zerop (t))
5606 return boolean_false_node;
8236c8eb
JJ
5607 /* If both are the same, we can apply the identity
5608 (x AND x) == x. */
5609 else if (partial && same_bool_result_p (t, partial))
5610 return t;
e89065a1
SL
5611 }
5612
5613 /* Handle the OR case. where we are redistributing:
5614 (inner1 OR inner2) AND (op2a code2 op2b)
5615 => (t OR (inner1 AND (op2a code2 op2b)))
5616 => (t OR partial) */
5617 else
5618 {
5619 if (integer_onep (t))
5620 return boolean_true_node;
5621 else if (partial)
5622 {
5623 /* We already got a simplification for the other
5624 operand to the redistributed OR expression. The
5625 interesting case is when at least one is false.
5626 Or, if both are the same, we can apply the identity
5627 (x OR x) == x. */
5628 if (integer_zerop (partial))
5629 return t;
5630 else if (integer_zerop (t))
5631 return partial;
5632 else if (same_bool_result_p (t, partial))
5633 return t;
5634 }
5635 }
5636 }
5637 }
5638 return NULL_TREE;
5639}
5640
5641/* Try to simplify the AND of two comparisons defined by
5642 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5643 If this can be done without constructing an intermediate value,
5644 return the resulting tree; otherwise NULL_TREE is returned.
5645 This function is deliberately asymmetric as it recurses on SSA_DEFs
5646 in the first comparison but not the second. */
5647
5648static tree
5f487a34 5649and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5650 enum tree_code code2, tree op2a, tree op2b)
5651{
ae22ac3c 5652 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5653
e89065a1
SL
5654 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5655 if (operand_equal_p (op1a, op2a, 0)
5656 && operand_equal_p (op1b, op2b, 0))
5657 {
eb9820c0 5658 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5659 tree t = combine_comparisons (UNKNOWN_LOCATION,
5660 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5661 truth_type, op1a, op1b);
e89065a1
SL
5662 if (t)
5663 return t;
5664 }
5665
5666 /* Likewise the swapped case of the above. */
5667 if (operand_equal_p (op1a, op2b, 0)
5668 && operand_equal_p (op1b, op2a, 0))
5669 {
eb9820c0 5670 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5671 tree t = combine_comparisons (UNKNOWN_LOCATION,
5672 TRUTH_ANDIF_EXPR, code1,
5673 swap_tree_comparison (code2),
31ed6226 5674 truth_type, op1a, op1b);
e89065a1
SL
5675 if (t)
5676 return t;
5677 }
5678
e89065a1
SL
5679 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5680 NAME's definition is a truth value. See if there are any simplifications
5681 that can be done against the NAME's definition. */
5682 if (TREE_CODE (op1a) == SSA_NAME
5683 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5684 && (integer_zerop (op1b) || integer_onep (op1b)))
5685 {
5686 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5687 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5688 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5689 switch (gimple_code (stmt))
5690 {
5691 case GIMPLE_ASSIGN:
5692 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
5693 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5694 op2b);
e89065a1
SL
5695
5696 case GIMPLE_PHI:
5697 /* If every argument to the PHI produces the same result when
5698 ANDed with the second comparison, we win.
5699 Do not do this unless the type is bool since we need a bool
5700 result here anyway. */
5701 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5702 {
5703 tree result = NULL_TREE;
5704 unsigned i;
5705 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5706 {
5707 tree arg = gimple_phi_arg_def (stmt, i);
5708
5709 /* If this PHI has itself as an argument, ignore it.
5710 If all the other args produce the same result,
5711 we're still OK. */
5712 if (arg == gimple_phi_result (stmt))
5713 continue;
5714 else if (TREE_CODE (arg) == INTEGER_CST)
5715 {
5716 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5717 {
5718 if (!result)
5719 result = boolean_false_node;
5720 else if (!integer_zerop (result))
5721 return NULL_TREE;
5722 }
5723 else if (!result)
5724 result = fold_build2 (code2, boolean_type_node,
5725 op2a, op2b);
5726 else if (!same_bool_comparison_p (result,
5727 code2, op2a, op2b))
5728 return NULL_TREE;
5729 }
0e8b84ec
JJ
5730 else if (TREE_CODE (arg) == SSA_NAME
5731 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5732 {
6c66f733 5733 tree temp;
355fe088 5734 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5735 /* In simple cases we can look through PHI nodes,
5736 but we have to be careful with loops.
5737 See PR49073. */
5738 if (! dom_info_available_p (CDI_DOMINATORS)
5739 || gimple_bb (def_stmt) == gimple_bb (stmt)
5740 || dominated_by_p (CDI_DOMINATORS,
5741 gimple_bb (def_stmt),
5742 gimple_bb (stmt)))
5743 return NULL_TREE;
5f487a34 5744 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 5745 op2a, op2b);
e89065a1
SL
5746 if (!temp)
5747 return NULL_TREE;
5748 else if (!result)
5749 result = temp;
5750 else if (!same_bool_result_p (result, temp))
5751 return NULL_TREE;
5752 }
5753 else
5754 return NULL_TREE;
5755 }
5756 return result;
5757 }
5758
5759 default:
5760 break;
5761 }
5762 }
5763 return NULL_TREE;
5764}
5765
5f487a34
LJH
5766/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5767 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5768 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5769 simplify this to a single expression. As we are going to lower the cost
5770 of building SSA names / gimple stmts significantly, we need to allocate
5771 them ont the stack. This will cause the code to be a bit ugly. */
5772
5773static tree
5774maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5775 enum tree_code code1,
5776 tree op1a, tree op1b,
5777 enum tree_code code2, tree op2a,
5778 tree op2b)
5779{
5780 /* Allocate gimple stmt1 on the stack. */
5781 gassign *stmt1
5782 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5783 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5784 gimple_assign_set_rhs_code (stmt1, code1);
5785 gimple_assign_set_rhs1 (stmt1, op1a);
5786 gimple_assign_set_rhs2 (stmt1, op1b);
5787
5788 /* Allocate gimple stmt2 on the stack. */
5789 gassign *stmt2
5790 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5791 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5792 gimple_assign_set_rhs_code (stmt2, code2);
5793 gimple_assign_set_rhs1 (stmt2, op2a);
5794 gimple_assign_set_rhs2 (stmt2, op2b);
5795
5796 /* Allocate SSA names(lhs1) on the stack. */
5797 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5798 memset (lhs1, 0, sizeof (tree_ssa_name));
5799 TREE_SET_CODE (lhs1, SSA_NAME);
5800 TREE_TYPE (lhs1) = type;
5801 init_ssa_name_imm_use (lhs1);
5802
5803 /* Allocate SSA names(lhs2) on the stack. */
5804 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5805 memset (lhs2, 0, sizeof (tree_ssa_name));
5806 TREE_SET_CODE (lhs2, SSA_NAME);
5807 TREE_TYPE (lhs2) = type;
5808 init_ssa_name_imm_use (lhs2);
5809
5810 gimple_assign_set_lhs (stmt1, lhs1);
5811 gimple_assign_set_lhs (stmt2, lhs2);
5812
5813 gimple_match_op op (gimple_match_cond::UNCOND, code,
5814 type, gimple_assign_lhs (stmt1),
5815 gimple_assign_lhs (stmt2));
5816 if (op.resimplify (NULL, follow_all_ssa_edges))
5817 {
5818 if (gimple_simplified_result_is_gimple_val (&op))
5819 {
5820 tree res = op.ops[0];
5821 if (res == lhs1)
5822 return build2 (code1, type, op1a, op1b);
5823 else if (res == lhs2)
5824 return build2 (code2, type, op2a, op2b);
5825 else
5826 return res;
5827 }
ae9c3507
ML
5828 else if (op.code.is_tree_code ()
5829 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5830 {
5831 tree op0 = op.ops[0];
5832 tree op1 = op.ops[1];
5833 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5834 return NULL_TREE; /* not simple */
5835
5836 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5837 }
5f487a34
LJH
5838 }
5839
5840 return NULL_TREE;
5841}
5842
e89065a1
SL
5843/* Try to simplify the AND of two comparisons, specified by
5844 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5845 If this can be simplified to a single expression (without requiring
5846 introducing more SSA variables to hold intermediate values),
5847 return the resulting tree. Otherwise return NULL_TREE.
5848 If the result expression is non-null, it has boolean type. */
5849
5850tree
5f487a34
LJH
5851maybe_fold_and_comparisons (tree type,
5852 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5853 enum tree_code code2, tree op2a, tree op2b)
5854{
5f487a34 5855 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 5856 return t;
5f487a34
LJH
5857
5858 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5859 return t;
5860
5861 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5862 op1a, op1b, code2, op2a,
5863 op2b))
5864 return t;
5865
5866 return NULL_TREE;
e89065a1
SL
5867}
5868
5869/* Helper function for or_comparisons_1: try to simplify the OR of the
5870 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5871 If INVERT is true, invert the value of VAR before doing the OR.
5872 Return NULL_EXPR if we can't simplify this to a single expression. */
5873
5874static tree
5f487a34 5875or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5876 enum tree_code code2, tree op2a, tree op2b)
5877{
5878 tree t;
355fe088 5879 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5880
5881 /* We can only deal with variables whose definitions are assignments. */
5882 if (!is_gimple_assign (stmt))
5883 return NULL_TREE;
5884
5885 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5886 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5887 Then we only have to consider the simpler non-inverted cases. */
5888 if (invert)
5f487a34 5889 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
5890 invert_tree_comparison (code2, false),
5891 op2a, op2b);
5892 else
5f487a34 5893 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
5894 return canonicalize_bool (t, invert);
5895}
5896
5897/* Try to simplify the OR of the ssa variable defined by the assignment
5898 STMT with the comparison specified by (OP2A CODE2 OP2B).
5899 Return NULL_EXPR if we can't simplify this to a single expression. */
5900
5901static tree
5f487a34 5902or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5903 enum tree_code code2, tree op2a, tree op2b)
5904{
5905 tree var = gimple_assign_lhs (stmt);
5906 tree true_test_var = NULL_TREE;
5907 tree false_test_var = NULL_TREE;
5908 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5909
5910 /* Check for identities like (var OR (var != 0)) => true . */
5911 if (TREE_CODE (op2a) == SSA_NAME
5912 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5913 {
5914 if ((code2 == NE_EXPR && integer_zerop (op2b))
5915 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5916 {
5917 true_test_var = op2a;
5918 if (var == true_test_var)
5919 return var;
5920 }
5921 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5922 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5923 {
5924 false_test_var = op2a;
5925 if (var == false_test_var)
5926 return boolean_true_node;
5927 }
5928 }
5929
5930 /* If the definition is a comparison, recurse on it. */
5931 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5932 {
5f487a34 5933 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
5934 gimple_assign_rhs1 (stmt),
5935 gimple_assign_rhs2 (stmt),
5936 code2,
5937 op2a,
5938 op2b);
5939 if (t)
5940 return t;
5941 }
5942
5943 /* If the definition is an AND or OR expression, we may be able to
5944 simplify by reassociating. */
eb9820c0
KT
5945 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5946 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5947 {
5948 tree inner1 = gimple_assign_rhs1 (stmt);
5949 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5950 gimple *s;
e89065a1
SL
5951 tree t;
5952 tree partial = NULL_TREE;
eb9820c0 5953 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
5954
5955 /* Check for boolean identities that don't require recursive examination
5956 of inner1/inner2:
5957 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5958 inner1 OR (inner1 AND inner2) => inner1
5959 !inner1 OR (inner1 OR inner2) => true
5960 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5961 */
5962 if (inner1 == true_test_var)
5963 return (is_or ? var : inner1);
5964 else if (inner2 == true_test_var)
5965 return (is_or ? var : inner2);
5966 else if (inner1 == false_test_var)
5967 return (is_or
5968 ? boolean_true_node
5f487a34
LJH
5969 : or_var_with_comparison (type, inner2, false, code2, op2a,
5970 op2b));
e89065a1
SL
5971 else if (inner2 == false_test_var)
5972 return (is_or
5973 ? boolean_true_node
5f487a34
LJH
5974 : or_var_with_comparison (type, inner1, false, code2, op2a,
5975 op2b));
e89065a1
SL
5976
5977 /* Next, redistribute/reassociate the OR across the inner tests.
5978 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5979 if (TREE_CODE (inner1) == SSA_NAME
5980 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5981 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 5982 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
5983 gimple_assign_rhs1 (s),
5984 gimple_assign_rhs2 (s),
5985 code2, op2a, op2b)))
5986 {
5987 /* Handle the OR case, where we are reassociating:
5988 (inner1 OR inner2) OR (op2a code2 op2b)
5989 => (t OR inner2)
5990 If the partial result t is a constant, we win. Otherwise
5991 continue on to try reassociating with the other inner test. */
8236c8eb 5992 if (is_or)
e89065a1
SL
5993 {
5994 if (integer_onep (t))
5995 return boolean_true_node;
5996 else if (integer_zerop (t))
5997 return inner2;
5998 }
5999
6000 /* Handle the AND case, where we are redistributing:
6001 (inner1 AND inner2) OR (op2a code2 op2b)
6002 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
6003 else if (integer_zerop (t))
6004 return boolean_false_node;
6005
6006 /* Save partial result for later. */
6007 partial = t;
e89065a1
SL
6008 }
6009
6010 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6011 if (TREE_CODE (inner2) == SSA_NAME
6012 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6013 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6014 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6015 gimple_assign_rhs1 (s),
6016 gimple_assign_rhs2 (s),
6017 code2, op2a, op2b)))
6018 {
6019 /* Handle the OR case, where we are reassociating:
6020 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
6021 => (inner1 OR t)
6022 => (t OR partial) */
6023 if (is_or)
e89065a1
SL
6024 {
6025 if (integer_zerop (t))
6026 return inner1;
6027 else if (integer_onep (t))
6028 return boolean_true_node;
8236c8eb
JJ
6029 /* If both are the same, we can apply the identity
6030 (x OR x) == x. */
6031 else if (partial && same_bool_result_p (t, partial))
6032 return t;
e89065a1
SL
6033 }
6034
6035 /* Handle the AND case, where we are redistributing:
6036 (inner1 AND inner2) OR (op2a code2 op2b)
6037 => (t AND (inner1 OR (op2a code2 op2b)))
6038 => (t AND partial) */
6039 else
6040 {
6041 if (integer_zerop (t))
6042 return boolean_false_node;
6043 else if (partial)
6044 {
6045 /* We already got a simplification for the other
6046 operand to the redistributed AND expression. The
6047 interesting case is when at least one is true.
6048 Or, if both are the same, we can apply the identity
8236c8eb 6049 (x AND x) == x. */
e89065a1
SL
6050 if (integer_onep (partial))
6051 return t;
6052 else if (integer_onep (t))
6053 return partial;
6054 else if (same_bool_result_p (t, partial))
8236c8eb 6055 return t;
e89065a1
SL
6056 }
6057 }
6058 }
6059 }
6060 return NULL_TREE;
6061}
6062
6063/* Try to simplify the OR of two comparisons defined by
6064 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6065 If this can be done without constructing an intermediate value,
6066 return the resulting tree; otherwise NULL_TREE is returned.
6067 This function is deliberately asymmetric as it recurses on SSA_DEFs
6068 in the first comparison but not the second. */
6069
6070static tree
5f487a34 6071or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6072 enum tree_code code2, tree op2a, tree op2b)
6073{
ae22ac3c 6074 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6075
e89065a1
SL
6076 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6077 if (operand_equal_p (op1a, op2a, 0)
6078 && operand_equal_p (op1b, op2b, 0))
6079 {
eb9820c0 6080 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6081 tree t = combine_comparisons (UNKNOWN_LOCATION,
6082 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 6083 truth_type, op1a, op1b);
e89065a1
SL
6084 if (t)
6085 return t;
6086 }
6087
6088 /* Likewise the swapped case of the above. */
6089 if (operand_equal_p (op1a, op2b, 0)
6090 && operand_equal_p (op1b, op2a, 0))
6091 {
eb9820c0 6092 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6093 tree t = combine_comparisons (UNKNOWN_LOCATION,
6094 TRUTH_ORIF_EXPR, code1,
6095 swap_tree_comparison (code2),
31ed6226 6096 truth_type, op1a, op1b);
e89065a1
SL
6097 if (t)
6098 return t;
6099 }
6100
e89065a1
SL
6101 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6102 NAME's definition is a truth value. See if there are any simplifications
6103 that can be done against the NAME's definition. */
6104 if (TREE_CODE (op1a) == SSA_NAME
6105 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6106 && (integer_zerop (op1b) || integer_onep (op1b)))
6107 {
6108 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6109 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6110 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6111 switch (gimple_code (stmt))
6112 {
6113 case GIMPLE_ASSIGN:
6114 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6115 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6116 op2b);
e89065a1
SL
6117
6118 case GIMPLE_PHI:
6119 /* If every argument to the PHI produces the same result when
6120 ORed with the second comparison, we win.
6121 Do not do this unless the type is bool since we need a bool
6122 result here anyway. */
6123 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6124 {
6125 tree result = NULL_TREE;
6126 unsigned i;
6127 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6128 {
6129 tree arg = gimple_phi_arg_def (stmt, i);
6130
6131 /* If this PHI has itself as an argument, ignore it.
6132 If all the other args produce the same result,
6133 we're still OK. */
6134 if (arg == gimple_phi_result (stmt))
6135 continue;
6136 else if (TREE_CODE (arg) == INTEGER_CST)
6137 {
6138 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6139 {
6140 if (!result)
6141 result = boolean_true_node;
6142 else if (!integer_onep (result))
6143 return NULL_TREE;
6144 }
6145 else if (!result)
6146 result = fold_build2 (code2, boolean_type_node,
6147 op2a, op2b);
6148 else if (!same_bool_comparison_p (result,
6149 code2, op2a, op2b))
6150 return NULL_TREE;
6151 }
0e8b84ec
JJ
6152 else if (TREE_CODE (arg) == SSA_NAME
6153 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6154 {
6c66f733 6155 tree temp;
355fe088 6156 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6157 /* In simple cases we can look through PHI nodes,
6158 but we have to be careful with loops.
6159 See PR49073. */
6160 if (! dom_info_available_p (CDI_DOMINATORS)
6161 || gimple_bb (def_stmt) == gimple_bb (stmt)
6162 || dominated_by_p (CDI_DOMINATORS,
6163 gimple_bb (def_stmt),
6164 gimple_bb (stmt)))
6165 return NULL_TREE;
5f487a34 6166 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 6167 op2a, op2b);
e89065a1
SL
6168 if (!temp)
6169 return NULL_TREE;
6170 else if (!result)
6171 result = temp;
6172 else if (!same_bool_result_p (result, temp))
6173 return NULL_TREE;
6174 }
6175 else
6176 return NULL_TREE;
6177 }
6178 return result;
6179 }
6180
6181 default:
6182 break;
6183 }
6184 }
6185 return NULL_TREE;
6186}
6187
6188/* Try to simplify the OR of two comparisons, specified by
6189 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6190 If this can be simplified to a single expression (without requiring
6191 introducing more SSA variables to hold intermediate values),
6192 return the resulting tree. Otherwise return NULL_TREE.
6193 If the result expression is non-null, it has boolean type. */
6194
6195tree
5f487a34
LJH
6196maybe_fold_or_comparisons (tree type,
6197 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6198 enum tree_code code2, tree op2a, tree op2b)
6199{
5f487a34 6200 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6201 return t;
cfef45c8 6202
5f487a34
LJH
6203 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6204 return t;
6205
6206 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6207 op1a, op1b, code2, op2a,
6208 op2b))
6209 return t;
6210
6211 return NULL_TREE;
6212}
cfef45c8
RG
6213
6214/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6215
6216 Either NULL_TREE, a simplified but non-constant or a constant
6217 is returned.
6218
6219 ??? This should go into a gimple-fold-inline.h file to be eventually
6220 privatized with the single valueize function used in the various TUs
6221 to avoid the indirect function call overhead. */
6222
6223tree
355fe088 6224gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6225 tree (*gvalueize) (tree))
cfef45c8 6226{
5d75ad95 6227 gimple_match_op res_op;
45cc9f96
RB
6228 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6229 edges if there are intermediate VARYING defs. For this reason
6230 do not follow SSA edges here even though SCCVN can technically
6231 just deal fine with that. */
5d75ad95 6232 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6233 {
34050b6b 6234 tree res = NULL_TREE;
5d75ad95
RS
6235 if (gimple_simplified_result_is_gimple_val (&res_op))
6236 res = res_op.ops[0];
34050b6b 6237 else if (mprts_hook)
5d75ad95 6238 res = mprts_hook (&res_op);
34050b6b 6239 if (res)
45cc9f96 6240 {
34050b6b
RB
6241 if (dump_file && dump_flags & TDF_DETAILS)
6242 {
6243 fprintf (dump_file, "Match-and-simplified ");
6244 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6245 fprintf (dump_file, " to ");
ef6cb4c7 6246 print_generic_expr (dump_file, res);
34050b6b
RB
6247 fprintf (dump_file, "\n");
6248 }
6249 return res;
45cc9f96 6250 }
45cc9f96
RB
6251 }
6252
cfef45c8
RG
6253 location_t loc = gimple_location (stmt);
6254 switch (gimple_code (stmt))
6255 {
6256 case GIMPLE_ASSIGN:
6257 {
6258 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6259
6260 switch (get_gimple_rhs_class (subcode))
6261 {
6262 case GIMPLE_SINGLE_RHS:
6263 {
6264 tree rhs = gimple_assign_rhs1 (stmt);
6265 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6266
6267 if (TREE_CODE (rhs) == SSA_NAME)
6268 {
6269 /* If the RHS is an SSA_NAME, return its known constant value,
6270 if any. */
6271 return (*valueize) (rhs);
6272 }
6273 /* Handle propagating invariant addresses into address
6274 operations. */
6275 else if (TREE_CODE (rhs) == ADDR_EXPR
6276 && !is_gimple_min_invariant (rhs))
6277 {
a90c8804 6278 poly_int64 offset = 0;
cfef45c8
RG
6279 tree base;
6280 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6281 &offset,
6282 valueize);
6283 if (base
6284 && (CONSTANT_CLASS_P (base)
6285 || decl_address_invariant_p (base)))
6286 return build_invariant_address (TREE_TYPE (rhs),
6287 base, offset);
6288 }
6289 else if (TREE_CODE (rhs) == CONSTRUCTOR
6290 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6291 && known_eq (CONSTRUCTOR_NELTS (rhs),
6292 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6293 {
794e3180
RS
6294 unsigned i, nelts;
6295 tree val;
cfef45c8 6296
928686b1 6297 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6298 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6299 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6300 {
6301 val = (*valueize) (val);
6302 if (TREE_CODE (val) == INTEGER_CST
6303 || TREE_CODE (val) == REAL_CST
6304 || TREE_CODE (val) == FIXED_CST)
794e3180 6305 vec.quick_push (val);
cfef45c8
RG
6306 else
6307 return NULL_TREE;
6308 }
6309
5ebaa477 6310 return vec.build ();
cfef45c8 6311 }
bdf37f7a
JH
6312 if (subcode == OBJ_TYPE_REF)
6313 {
6314 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6315 /* If callee is constant, we can fold away the wrapper. */
6316 if (is_gimple_min_invariant (val))
6317 return val;
6318 }
cfef45c8
RG
6319
6320 if (kind == tcc_reference)
6321 {
6322 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6323 || TREE_CODE (rhs) == REALPART_EXPR
6324 || TREE_CODE (rhs) == IMAGPART_EXPR)
6325 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6326 {
6327 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6328 return fold_unary_loc (EXPR_LOCATION (rhs),
6329 TREE_CODE (rhs),
6330 TREE_TYPE (rhs), val);
6331 }
6332 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6333 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6334 {
6335 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6336 return fold_ternary_loc (EXPR_LOCATION (rhs),
6337 TREE_CODE (rhs),
6338 TREE_TYPE (rhs), val,
6339 TREE_OPERAND (rhs, 1),
6340 TREE_OPERAND (rhs, 2));
6341 }
6342 else if (TREE_CODE (rhs) == MEM_REF
6343 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6344 {
6345 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6346 if (TREE_CODE (val) == ADDR_EXPR
6347 && is_gimple_min_invariant (val))
6348 {
6349 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6350 unshare_expr (val),
6351 TREE_OPERAND (rhs, 1));
6352 if (tem)
6353 rhs = tem;
6354 }
6355 }
6356 return fold_const_aggregate_ref_1 (rhs, valueize);
6357 }
6358 else if (kind == tcc_declaration)
6359 return get_symbol_constant_value (rhs);
6360 return rhs;
6361 }
6362
6363 case GIMPLE_UNARY_RHS:
f3582e54 6364 return NULL_TREE;
cfef45c8
RG
6365
6366 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6367 /* Translate &x + CST into an invariant form suitable for
6368 further propagation. */
6369 if (subcode == POINTER_PLUS_EXPR)
6370 {
4b1b9e64
RB
6371 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6372 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6373 if (TREE_CODE (op0) == ADDR_EXPR
6374 && TREE_CODE (op1) == INTEGER_CST)
6375 {
6376 tree off = fold_convert (ptr_type_node, op1);
6377 return build_fold_addr_expr_loc
6378 (loc,
6379 fold_build2 (MEM_REF,
6380 TREE_TYPE (TREE_TYPE (op0)),
6381 unshare_expr (op0), off));
6382 }
6383 }
59c20dc7
RB
6384 /* Canonicalize bool != 0 and bool == 0 appearing after
6385 valueization. While gimple_simplify handles this
6386 it can get confused by the ~X == 1 -> X == 0 transform
6387 which we cant reduce to a SSA name or a constant
6388 (and we have no way to tell gimple_simplify to not
6389 consider those transforms in the first place). */
6390 else if (subcode == EQ_EXPR
6391 || subcode == NE_EXPR)
6392 {
6393 tree lhs = gimple_assign_lhs (stmt);
6394 tree op0 = gimple_assign_rhs1 (stmt);
6395 if (useless_type_conversion_p (TREE_TYPE (lhs),
6396 TREE_TYPE (op0)))
6397 {
6398 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6399 op0 = (*valueize) (op0);
8861704d
RB
6400 if (TREE_CODE (op0) == INTEGER_CST)
6401 std::swap (op0, op1);
6402 if (TREE_CODE (op1) == INTEGER_CST
6403 && ((subcode == NE_EXPR && integer_zerop (op1))
6404 || (subcode == EQ_EXPR && integer_onep (op1))))
6405 return op0;
59c20dc7
RB
6406 }
6407 }
4b1b9e64 6408 return NULL_TREE;
cfef45c8
RG
6409
6410 case GIMPLE_TERNARY_RHS:
6411 {
6412 /* Handle ternary operators that can appear in GIMPLE form. */
6413 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6414 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6415 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6416 return fold_ternary_loc (loc, subcode,
6417 gimple_expr_type (stmt), op0, op1, op2);
6418 }
6419
6420 default:
6421 gcc_unreachable ();
6422 }
6423 }
6424
6425 case GIMPLE_CALL:
6426 {
25583c4f 6427 tree fn;
538dd0b7 6428 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6429
6430 if (gimple_call_internal_p (stmt))
31e071ae
MP
6431 {
6432 enum tree_code subcode = ERROR_MARK;
6433 switch (gimple_call_internal_fn (stmt))
6434 {
6435 case IFN_UBSAN_CHECK_ADD:
6436 subcode = PLUS_EXPR;
6437 break;
6438 case IFN_UBSAN_CHECK_SUB:
6439 subcode = MINUS_EXPR;
6440 break;
6441 case IFN_UBSAN_CHECK_MUL:
6442 subcode = MULT_EXPR;
6443 break;
68fa96d6
ML
6444 case IFN_BUILTIN_EXPECT:
6445 {
6446 tree arg0 = gimple_call_arg (stmt, 0);
6447 tree op0 = (*valueize) (arg0);
6448 if (TREE_CODE (op0) == INTEGER_CST)
6449 return op0;
6450 return NULL_TREE;
6451 }
31e071ae
MP
6452 default:
6453 return NULL_TREE;
6454 }
368b454d
JJ
6455 tree arg0 = gimple_call_arg (stmt, 0);
6456 tree arg1 = gimple_call_arg (stmt, 1);
6457 tree op0 = (*valueize) (arg0);
6458 tree op1 = (*valueize) (arg1);
31e071ae
MP
6459
6460 if (TREE_CODE (op0) != INTEGER_CST
6461 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6462 {
6463 switch (subcode)
6464 {
6465 case MULT_EXPR:
6466 /* x * 0 = 0 * x = 0 without overflow. */
6467 if (integer_zerop (op0) || integer_zerop (op1))
6468 return build_zero_cst (TREE_TYPE (arg0));
6469 break;
6470 case MINUS_EXPR:
6471 /* y - y = 0 without overflow. */
6472 if (operand_equal_p (op0, op1, 0))
6473 return build_zero_cst (TREE_TYPE (arg0));
6474 break;
6475 default:
6476 break;
6477 }
6478 }
6479 tree res
6480 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6481 if (res
6482 && TREE_CODE (res) == INTEGER_CST
6483 && !TREE_OVERFLOW (res))
6484 return res;
6485 return NULL_TREE;
6486 }
25583c4f
RS
6487
6488 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 6489 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 6490 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 6491 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
6492 && gimple_builtin_call_types_compatible_p (stmt,
6493 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6494 {
6495 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6496 tree retval;
cfef45c8
RG
6497 unsigned i;
6498 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6499 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6500 retval = fold_builtin_call_array (loc,
538dd0b7 6501 gimple_call_return_type (call_stmt),
cfef45c8 6502 fn, gimple_call_num_args (stmt), args);
cfef45c8 6503 if (retval)
5c944c6c
RB
6504 {
6505 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6506 STRIP_NOPS (retval);
538dd0b7
DM
6507 retval = fold_convert (gimple_call_return_type (call_stmt),
6508 retval);
5c944c6c 6509 }
cfef45c8
RG
6510 return retval;
6511 }
6512 return NULL_TREE;
6513 }
6514
6515 default:
6516 return NULL_TREE;
6517 }
6518}
6519
6520/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6521 Returns NULL_TREE if folding to a constant is not possible, otherwise
6522 returns a constant according to is_gimple_min_invariant. */
6523
6524tree
355fe088 6525gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6526{
6527 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6528 if (res && is_gimple_min_invariant (res))
6529 return res;
6530 return NULL_TREE;
6531}
6532
6533
6534/* The following set of functions are supposed to fold references using
6535 their constant initializers. */
6536
cfef45c8
RG
6537/* See if we can find constructor defining value of BASE.
6538 When we know the consructor with constant offset (such as
6539 base is array[40] and we do know constructor of array), then
6540 BIT_OFFSET is adjusted accordingly.
6541
6542 As a special case, return error_mark_node when constructor
6543 is not explicitly available, but it is known to be zero
6544 such as 'static const int a;'. */
6545static tree
588db50c 6546get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6547 tree (*valueize)(tree))
6548{
588db50c 6549 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6550 bool reverse;
6551
cfef45c8
RG
6552 if (TREE_CODE (base) == MEM_REF)
6553 {
6a5aca53
ML
6554 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6555 if (!boff.to_shwi (bit_offset))
6556 return NULL_TREE;
cfef45c8
RG
6557
6558 if (valueize
6559 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6560 base = valueize (TREE_OPERAND (base, 0));
6561 if (!base || TREE_CODE (base) != ADDR_EXPR)
6562 return NULL_TREE;
6563 base = TREE_OPERAND (base, 0);
6564 }
13e88953
RB
6565 else if (valueize
6566 && TREE_CODE (base) == SSA_NAME)
6567 base = valueize (base);
cfef45c8
RG
6568
6569 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6570 DECL_INITIAL. If BASE is a nested reference into another
6571 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6572 the inner reference. */
6573 switch (TREE_CODE (base))
6574 {
6575 case VAR_DECL:
cfef45c8 6576 case CONST_DECL:
6a6dac52
JH
6577 {
6578 tree init = ctor_for_folding (base);
6579
688010ba 6580 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6581 NULL means unknown, while error_mark_node is 0. */
6582 if (init == error_mark_node)
6583 return NULL_TREE;
6584 if (!init)
6585 return error_mark_node;
6586 return init;
6587 }
cfef45c8 6588
13e88953
RB
6589 case VIEW_CONVERT_EXPR:
6590 return get_base_constructor (TREE_OPERAND (base, 0),
6591 bit_offset, valueize);
6592
cfef45c8
RG
6593 case ARRAY_REF:
6594 case COMPONENT_REF:
ee45a32d
EB
6595 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6596 &reverse);
588db50c 6597 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6598 return NULL_TREE;
6599 *bit_offset += bit_offset2;
6600 return get_base_constructor (base, bit_offset, valueize);
6601
cfef45c8
RG
6602 case CONSTRUCTOR:
6603 return base;
6604
6605 default:
13e88953
RB
6606 if (CONSTANT_CLASS_P (base))
6607 return base;
6608
cfef45c8
RG
6609 return NULL_TREE;
6610 }
6611}
6612
35b4d3a6
MS
6613/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6614 to the memory at bit OFFSET. When non-null, TYPE is the expected
6615 type of the reference; otherwise the type of the referenced element
6616 is used instead. When SIZE is zero, attempt to fold a reference to
6617 the entire element which OFFSET refers to. Increment *SUBOFF by
6618 the bit offset of the accessed element. */
cfef45c8
RG
6619
6620static tree
6621fold_array_ctor_reference (tree type, tree ctor,
6622 unsigned HOST_WIDE_INT offset,
c44c2088 6623 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6624 tree from_decl,
6625 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6626{
807e902e
KZ
6627 offset_int low_bound;
6628 offset_int elt_size;
807e902e 6629 offset_int access_index;
6a636014 6630 tree domain_type = NULL_TREE;
cfef45c8
RG
6631 HOST_WIDE_INT inner_offset;
6632
6633 /* Compute low bound and elt size. */
eb8f1123
RG
6634 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6635 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6636 if (domain_type && TYPE_MIN_VALUE (domain_type))
6637 {
6aa238a1 6638 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
6639 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6640 return NULL_TREE;
807e902e 6641 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6642 }
6643 else
807e902e 6644 low_bound = 0;
6aa238a1 6645 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
6646 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6647 return NULL_TREE;
807e902e 6648 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6649
35b4d3a6 6650 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 6651 access of a multiple of the array element size. Avoid division
6aa238a1
MS
6652 by zero below when ELT_SIZE is zero, such as with the result of
6653 an initializer for a zero-length array or an empty struct. */
6654 if (elt_size == 0
6655 || (type
6656 && (!TYPE_SIZE_UNIT (type)
831e688a 6657 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
6658 return NULL_TREE;
6659
6660 /* Compute the array index we look for. */
807e902e
KZ
6661 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6662 elt_size);
27bcd47c 6663 access_index += low_bound;
cfef45c8
RG
6664
6665 /* And offset within the access. */
27bcd47c 6666 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 6667
831e688a
RB
6668 if (size > elt_size.to_uhwi () * BITS_PER_UNIT)
6669 {
6670 /* native_encode_expr constraints. */
6671 if (size > MAX_BITSIZE_MODE_ANY_MODE
6672 || size % BITS_PER_UNIT != 0
6673 || inner_offset % BITS_PER_UNIT != 0)
6674 return NULL_TREE;
6675
6676 unsigned ctor_idx;
6677 tree val = get_array_ctor_element_at_index (ctor, access_index,
6678 &ctor_idx);
6679 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6680 return build_zero_cst (type);
6681
6682 /* native-encode adjacent ctor elements. */
6683 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6684 unsigned bufoff = 0;
6685 offset_int index = 0;
6686 offset_int max_index = access_index;
6687 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6688 if (!val)
6689 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6690 else if (!CONSTANT_CLASS_P (val))
6691 return NULL_TREE;
6692 if (!elt->index)
6693 ;
6694 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6695 {
6696 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6697 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6698 }
6699 else
6700 index = max_index = wi::to_offset (elt->index);
6701 index = wi::umax (index, access_index);
6702 do
6703 {
6704 int len = native_encode_expr (val, buf + bufoff,
6705 elt_size.to_uhwi (),
6706 inner_offset / BITS_PER_UNIT);
6707 if (len != elt_size - inner_offset / BITS_PER_UNIT)
6708 return NULL_TREE;
6709 inner_offset = 0;
6710 bufoff += len;
6711
6712 access_index += 1;
6713 if (wi::cmpu (access_index, index) == 0)
6714 val = elt->value;
6715 else if (wi::cmpu (access_index, max_index) > 0)
6716 {
6717 ctor_idx++;
6718 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6719 {
6720 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6721 ++max_index;
6722 }
6723 else
6724 {
6725 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6726 index = 0;
6727 max_index = access_index;
6728 if (!elt->index)
6729 ;
6730 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6731 {
6732 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6733 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6734 }
6735 else
6736 index = max_index = wi::to_offset (elt->index);
6737 index = wi::umax (index, access_index);
6738 if (wi::cmpu (access_index, index) == 0)
6739 val = elt->value;
6740 else
6741 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6742 }
6743 }
6744 }
6745 while (bufoff < size / BITS_PER_UNIT);
6746 *suboff += size;
6747 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6748 }
6749
6a636014 6750 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6751 {
6752 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6753 {
6754 /* For the final reference to the entire accessed element
6755 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6756 may be null) in favor of the type of the element, and set
6757 SIZE to the size of the accessed element. */
6758 inner_offset = 0;
6759 type = TREE_TYPE (val);
6760 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6761 }
6762
6763 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6764 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6765 suboff);
6766 }
cfef45c8 6767
35b4d3a6
MS
6768 /* Memory not explicitly mentioned in constructor is 0 (or
6769 the reference is out of range). */
6770 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6771}
6772
35b4d3a6
MS
6773/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6774 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6775 is the expected type of the reference; otherwise the type of
6776 the referenced member is used instead. When SIZE is zero,
6777 attempt to fold a reference to the entire member which OFFSET
6778 refers to; in this case. Increment *SUBOFF by the bit offset
6779 of the accessed member. */
cfef45c8
RG
6780
6781static tree
6782fold_nonarray_ctor_reference (tree type, tree ctor,
6783 unsigned HOST_WIDE_INT offset,
c44c2088 6784 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6785 tree from_decl,
6786 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6787{
6788 unsigned HOST_WIDE_INT cnt;
6789 tree cfield, cval;
6790
6791 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6792 cval)
6793 {
6794 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6795 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6796 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6797
6798 if (!field_size)
6799 {
6800 /* Determine the size of the flexible array member from
6801 the size of the initializer provided for it. */
6802 field_size = TYPE_SIZE (TREE_TYPE (cval));
6803 }
cfef45c8
RG
6804
6805 /* Variable sized objects in static constructors makes no sense,
6806 but field_size can be NULL for flexible array members. */
6807 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6808 && TREE_CODE (byte_offset) == INTEGER_CST
6809 && (field_size != NULL_TREE
6810 ? TREE_CODE (field_size) == INTEGER_CST
6811 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6812
6813 /* Compute bit offset of the field. */
35b4d3a6
MS
6814 offset_int bitoffset
6815 = (wi::to_offset (field_offset)
6816 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6817 /* Compute bit offset where the field ends. */
35b4d3a6 6818 offset_int bitoffset_end;
cfef45c8 6819 if (field_size != NULL_TREE)
807e902e 6820 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6821 else
807e902e 6822 bitoffset_end = 0;
cfef45c8 6823
35b4d3a6
MS
6824 /* Compute the bit offset of the end of the desired access.
6825 As a special case, if the size of the desired access is
6826 zero, assume the access is to the entire field (and let
6827 the caller make any necessary adjustments by storing
6828 the actual bounds of the field in FIELDBOUNDS). */
6829 offset_int access_end = offset_int (offset);
6830 if (size)
6831 access_end += size;
6832 else
6833 access_end = bitoffset_end;
b8b2b009 6834
35b4d3a6
MS
6835 /* Is there any overlap between the desired access at
6836 [OFFSET, OFFSET+SIZE) and the offset of the field within
6837 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6838 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6839 && (field_size == NULL_TREE
807e902e 6840 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6841 {
35b4d3a6
MS
6842 *suboff += bitoffset.to_uhwi ();
6843
6844 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6845 {
6846 /* For the final reference to the entire accessed member
6847 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6848 be null) in favor of the type of the member, and set
6849 SIZE to the size of the accessed member. */
6850 offset = bitoffset.to_uhwi ();
6851 type = TREE_TYPE (cval);
6852 size = (bitoffset_end - bitoffset).to_uhwi ();
6853 }
6854
6855 /* We do have overlap. Now see if the field is large enough
6856 to cover the access. Give up for accesses that extend
6857 beyond the end of the object or that span multiple fields. */
807e902e 6858 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6859 return NULL_TREE;
032c80e9 6860 if (offset < bitoffset)
b8b2b009 6861 return NULL_TREE;
35b4d3a6
MS
6862
6863 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6864 return fold_ctor_reference (type, cval,
27bcd47c 6865 inner_offset.to_uhwi (), size,
35b4d3a6 6866 from_decl, suboff);
cfef45c8
RG
6867 }
6868 }
14b7950f
MS
6869
6870 if (!type)
6871 return NULL_TREE;
6872
6873 return build_zero_cst (type);
cfef45c8
RG
6874}
6875
35b4d3a6 6876/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 6877 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
6878 is zero, attempt to fold a reference to the entire subobject
6879 which OFFSET refers to. This is used when folding accesses to
6880 string members of aggregates. When non-null, set *SUBOFF to
6881 the bit offset of the accessed subobject. */
cfef45c8 6882
8403c2cf 6883tree
35b4d3a6
MS
6884fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6885 const poly_uint64 &poly_size, tree from_decl,
6886 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
6887{
6888 tree ret;
6889
6890 /* We found the field with exact match. */
35b4d3a6
MS
6891 if (type
6892 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6893 && known_eq (poly_offset, 0U))
9d60be38 6894 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6895
30acf282
RS
6896 /* The remaining optimizations need a constant size and offset. */
6897 unsigned HOST_WIDE_INT size, offset;
6898 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6899 return NULL_TREE;
6900
cfef45c8
RG
6901 /* We are at the end of walk, see if we can view convert the
6902 result. */
6903 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6904 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6905 && !compare_tree_int (TYPE_SIZE (type), size)
6906 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6907 {
9d60be38 6908 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6909 if (ret)
672d9f8e
RB
6910 {
6911 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6912 if (ret)
6913 STRIP_USELESS_TYPE_CONVERSION (ret);
6914 }
cfef45c8
RG
6915 return ret;
6916 }
b2505143
RB
6917 /* For constants and byte-aligned/sized reads try to go through
6918 native_encode/interpret. */
6919 if (CONSTANT_CLASS_P (ctor)
6920 && BITS_PER_UNIT == 8
6921 && offset % BITS_PER_UNIT == 0
6922 && size % BITS_PER_UNIT == 0
6923 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6924 {
6925 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
6926 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6927 offset / BITS_PER_UNIT);
6928 if (len > 0)
6929 return native_interpret_expr (type, buf, len);
b2505143 6930 }
cfef45c8
RG
6931 if (TREE_CODE (ctor) == CONSTRUCTOR)
6932 {
35b4d3a6
MS
6933 unsigned HOST_WIDE_INT dummy = 0;
6934 if (!suboff)
6935 suboff = &dummy;
cfef45c8 6936
eb8f1123
RG
6937 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6938 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088 6939 return fold_array_ctor_reference (type, ctor, offset, size,
35b4d3a6
MS
6940 from_decl, suboff);
6941
6942 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6943 from_decl, suboff);
cfef45c8
RG
6944 }
6945
6946 return NULL_TREE;
6947}
6948
6949/* Return the tree representing the element referenced by T if T is an
6950 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6951 names using VALUEIZE. Return NULL_TREE otherwise. */
6952
6953tree
6954fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6955{
6956 tree ctor, idx, base;
588db50c 6957 poly_int64 offset, size, max_size;
cfef45c8 6958 tree tem;
ee45a32d 6959 bool reverse;
cfef45c8 6960
f8a7df45
RG
6961 if (TREE_THIS_VOLATILE (t))
6962 return NULL_TREE;
6963
3a65ee74 6964 if (DECL_P (t))
cfef45c8
RG
6965 return get_symbol_constant_value (t);
6966
6967 tem = fold_read_from_constant_string (t);
6968 if (tem)
6969 return tem;
6970
6971 switch (TREE_CODE (t))
6972 {
6973 case ARRAY_REF:
6974 case ARRAY_RANGE_REF:
6975 /* Constant indexes are handled well by get_base_constructor.
6976 Only special case variable offsets.
6977 FIXME: This code can't handle nested references with variable indexes
6978 (they will be handled only by iteration of ccp). Perhaps we can bring
6979 get_ref_base_and_extent here and make it use a valueize callback. */
6980 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6981 && valueize
6982 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 6983 && poly_int_tree_p (idx))
cfef45c8
RG
6984 {
6985 tree low_bound, unit_size;
6986
6987 /* If the resulting bit-offset is constant, track it. */
6988 if ((low_bound = array_ref_low_bound (t),
588db50c 6989 poly_int_tree_p (low_bound))
cfef45c8 6990 && (unit_size = array_ref_element_size (t),
807e902e 6991 tree_fits_uhwi_p (unit_size)))
cfef45c8 6992 {
588db50c
RS
6993 poly_offset_int woffset
6994 = wi::sext (wi::to_poly_offset (idx)
6995 - wi::to_poly_offset (low_bound),
807e902e 6996 TYPE_PRECISION (TREE_TYPE (idx)));
a9e6359a
RB
6997 woffset *= tree_to_uhwi (unit_size);
6998 woffset *= BITS_PER_UNIT;
588db50c 6999 if (woffset.to_shwi (&offset))
807e902e 7000 {
807e902e
KZ
7001 base = TREE_OPERAND (t, 0);
7002 ctor = get_base_constructor (base, &offset, valueize);
7003 /* Empty constructor. Always fold to 0. */
7004 if (ctor == error_mark_node)
7005 return build_zero_cst (TREE_TYPE (t));
7006 /* Out of bound array access. Value is undefined,
7007 but don't fold. */
588db50c 7008 if (maybe_lt (offset, 0))
807e902e 7009 return NULL_TREE;
67914693 7010 /* We cannot determine ctor. */
807e902e
KZ
7011 if (!ctor)
7012 return NULL_TREE;
7013 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7014 tree_to_uhwi (unit_size)
7015 * BITS_PER_UNIT,
7016 base);
7017 }
cfef45c8
RG
7018 }
7019 }
7020 /* Fallthru. */
7021
7022 case COMPONENT_REF:
7023 case BIT_FIELD_REF:
7024 case TARGET_MEM_REF:
7025 case MEM_REF:
ee45a32d 7026 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
7027 ctor = get_base_constructor (base, &offset, valueize);
7028
7029 /* Empty constructor. Always fold to 0. */
7030 if (ctor == error_mark_node)
7031 return build_zero_cst (TREE_TYPE (t));
7032 /* We do not know precise address. */
588db50c 7033 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 7034 return NULL_TREE;
67914693 7035 /* We cannot determine ctor. */
cfef45c8
RG
7036 if (!ctor)
7037 return NULL_TREE;
7038
7039 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 7040 if (maybe_lt (offset, 0))
cfef45c8
RG
7041 return NULL_TREE;
7042
c44c2088
JH
7043 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7044 base);
cfef45c8
RG
7045
7046 case REALPART_EXPR:
7047 case IMAGPART_EXPR:
7048 {
7049 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7050 if (c && TREE_CODE (c) == COMPLEX_CST)
7051 return fold_build1_loc (EXPR_LOCATION (t),
7052 TREE_CODE (t), TREE_TYPE (t), c);
7053 break;
7054 }
7055
7056 default:
7057 break;
7058 }
7059
7060 return NULL_TREE;
7061}
7062
7063tree
7064fold_const_aggregate_ref (tree t)
7065{
7066 return fold_const_aggregate_ref_1 (t, NULL);
7067}
06bc3ec7 7068
85942f45 7069/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
7070 at OFFSET.
7071 Set CAN_REFER if non-NULL to false if method
7072 is not referable or if the virtual table is ill-formed (such as rewriten
7073 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
7074
7075tree
85942f45
JH
7076gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7077 tree v,
ec77d61f
JH
7078 unsigned HOST_WIDE_INT offset,
7079 bool *can_refer)
81fa35bd 7080{
85942f45
JH
7081 tree vtable = v, init, fn;
7082 unsigned HOST_WIDE_INT size;
8c311b50
JH
7083 unsigned HOST_WIDE_INT elt_size, access_index;
7084 tree domain_type;
81fa35bd 7085
ec77d61f
JH
7086 if (can_refer)
7087 *can_refer = true;
7088
9de2f554 7089 /* First of all double check we have virtual table. */
8813a647 7090 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 7091 {
ec77d61f
JH
7092 /* Pass down that we lost track of the target. */
7093 if (can_refer)
7094 *can_refer = false;
7095 return NULL_TREE;
7096 }
9de2f554 7097
2aa3da06
JH
7098 init = ctor_for_folding (v);
7099
9de2f554 7100 /* The virtual tables should always be born with constructors
2aa3da06
JH
7101 and we always should assume that they are avaialble for
7102 folding. At the moment we do not stream them in all cases,
7103 but it should never happen that ctor seem unreachable. */
7104 gcc_assert (init);
7105 if (init == error_mark_node)
7106 {
ec77d61f
JH
7107 /* Pass down that we lost track of the target. */
7108 if (can_refer)
7109 *can_refer = false;
2aa3da06
JH
7110 return NULL_TREE;
7111 }
81fa35bd 7112 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 7113 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 7114 offset *= BITS_PER_UNIT;
81fa35bd 7115 offset += token * size;
9de2f554 7116
8c311b50
JH
7117 /* Lookup the value in the constructor that is assumed to be array.
7118 This is equivalent to
7119 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7120 offset, size, NULL);
7121 but in a constant time. We expect that frontend produced a simple
7122 array without indexed initializers. */
7123
7124 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7125 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7126 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7127 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7128
7129 access_index = offset / BITS_PER_UNIT / elt_size;
7130 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7131
bf8d8309
MP
7132 /* The C++ FE can now produce indexed fields, and we check if the indexes
7133 match. */
8c311b50
JH
7134 if (access_index < CONSTRUCTOR_NELTS (init))
7135 {
7136 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
7137 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7138 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
7139 STRIP_NOPS (fn);
7140 }
7141 else
7142 fn = NULL;
9de2f554
JH
7143
7144 /* For type inconsistent program we may end up looking up virtual method
7145 in virtual table that does not contain TOKEN entries. We may overrun
7146 the virtual table and pick up a constant or RTTI info pointer.
7147 In any case the call is undefined. */
7148 if (!fn
7149 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7150 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7151 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7152 else
7153 {
7154 fn = TREE_OPERAND (fn, 0);
7155
7156 /* When cgraph node is missing and function is not public, we cannot
7157 devirtualize. This can happen in WHOPR when the actual method
7158 ends up in other partition, because we found devirtualization
7159 possibility too late. */
7160 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
7161 {
7162 if (can_refer)
7163 {
7164 *can_refer = false;
7165 return fn;
7166 }
7167 return NULL_TREE;
7168 }
9de2f554 7169 }
81fa35bd 7170
7501ca28
RG
7171 /* Make sure we create a cgraph node for functions we'll reference.
7172 They can be non-existent if the reference comes from an entry
7173 of an external vtable for example. */
d52f5295 7174 cgraph_node::get_create (fn);
7501ca28 7175
81fa35bd
MJ
7176 return fn;
7177}
7178
85942f45
JH
7179/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7180 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7181 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
7182 OBJ_TYPE_REF_OBJECT(REF).
7183 Set CAN_REFER if non-NULL to false if method
7184 is not referable or if the virtual table is ill-formed (such as rewriten
7185 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
7186
7187tree
ec77d61f
JH
7188gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7189 bool *can_refer)
85942f45
JH
7190{
7191 unsigned HOST_WIDE_INT offset;
7192 tree v;
7193
7194 v = BINFO_VTABLE (known_binfo);
7195 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7196 if (!v)
7197 return NULL_TREE;
7198
7199 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
7200 {
7201 if (can_refer)
7202 *can_refer = false;
7203 return NULL_TREE;
7204 }
7205 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
7206}
7207
737f500a
RB
7208/* Given a pointer value T, return a simplified version of an
7209 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
7210 possible. Note that the resulting type may be different from
7211 the type pointed to in the sense that it is still compatible
7212 from the langhooks point of view. */
7213
7214tree
7215gimple_fold_indirect_ref (tree t)
7216{
7217 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7218 tree sub = t;
7219 tree subtype;
7220
7221 STRIP_NOPS (sub);
7222 subtype = TREE_TYPE (sub);
737f500a
RB
7223 if (!POINTER_TYPE_P (subtype)
7224 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7225 return NULL_TREE;
7226
7227 if (TREE_CODE (sub) == ADDR_EXPR)
7228 {
7229 tree op = TREE_OPERAND (sub, 0);
7230 tree optype = TREE_TYPE (op);
7231 /* *&p => p */
7232 if (useless_type_conversion_p (type, optype))
7233 return op;
7234
7235 /* *(foo *)&fooarray => fooarray[0] */
7236 if (TREE_CODE (optype) == ARRAY_TYPE
7237 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7238 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7239 {
7240 tree type_domain = TYPE_DOMAIN (optype);
7241 tree min_val = size_zero_node;
7242 if (type_domain && TYPE_MIN_VALUE (type_domain))
7243 min_val = TYPE_MIN_VALUE (type_domain);
7244 if (TREE_CODE (min_val) == INTEGER_CST)
7245 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7246 }
7247 /* *(foo *)&complexfoo => __real__ complexfoo */
7248 else if (TREE_CODE (optype) == COMPLEX_TYPE
7249 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7250 return fold_build1 (REALPART_EXPR, type, op);
7251 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7252 else if (TREE_CODE (optype) == VECTOR_TYPE
7253 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7254 {
7255 tree part_width = TYPE_SIZE (type);
7256 tree index = bitsize_int (0);
7257 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7258 }
7259 }
7260
7261 /* *(p + CST) -> ... */
7262 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7263 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7264 {
7265 tree addr = TREE_OPERAND (sub, 0);
7266 tree off = TREE_OPERAND (sub, 1);
7267 tree addrtype;
7268
7269 STRIP_NOPS (addr);
7270 addrtype = TREE_TYPE (addr);
7271
7272 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7273 if (TREE_CODE (addr) == ADDR_EXPR
7274 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7275 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7276 && tree_fits_uhwi_p (off))
b184c8f1 7277 {
ae7e9ddd 7278 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7279 tree part_width = TYPE_SIZE (type);
7280 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7281 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7282 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7283 tree index = bitsize_int (indexi);
928686b1
RS
7284 if (known_lt (offset / part_widthi,
7285 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7286 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7287 part_width, index);
7288 }
7289
7290 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7291 if (TREE_CODE (addr) == ADDR_EXPR
7292 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7293 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7294 {
7295 tree size = TYPE_SIZE_UNIT (type);
7296 if (tree_int_cst_equal (size, off))
7297 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7298 }
7299
7300 /* *(p + CST) -> MEM_REF <p, CST>. */
7301 if (TREE_CODE (addr) != ADDR_EXPR
7302 || DECL_P (TREE_OPERAND (addr, 0)))
7303 return fold_build2 (MEM_REF, type,
7304 addr,
8e6cdc90 7305 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7306 }
7307
7308 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7309 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7310 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7311 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7312 {
7313 tree type_domain;
7314 tree min_val = size_zero_node;
7315 tree osub = sub;
7316 sub = gimple_fold_indirect_ref (sub);
7317 if (! sub)
7318 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7319 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7320 if (type_domain && TYPE_MIN_VALUE (type_domain))
7321 min_val = TYPE_MIN_VALUE (type_domain);
7322 if (TREE_CODE (min_val) == INTEGER_CST)
7323 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7324 }
7325
7326 return NULL_TREE;
7327}
19e51b40
JJ
7328
7329/* Return true if CODE is an operation that when operating on signed
7330 integer types involves undefined behavior on overflow and the
7331 operation can be expressed with unsigned arithmetic. */
7332
7333bool
7334arith_code_with_undefined_signed_overflow (tree_code code)
7335{
7336 switch (code)
7337 {
8e2c037d 7338 case ABS_EXPR:
19e51b40
JJ
7339 case PLUS_EXPR:
7340 case MINUS_EXPR:
7341 case MULT_EXPR:
7342 case NEGATE_EXPR:
7343 case POINTER_PLUS_EXPR:
7344 return true;
7345 default:
7346 return false;
7347 }
7348}
7349
7350/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7351 operation that can be transformed to unsigned arithmetic by converting
7352 its operand, carrying out the operation in the corresponding unsigned
7353 type and converting the result back to the original type.
7354
7355 Returns a sequence of statements that replace STMT and also contain
7356 a modified form of STMT itself. */
7357
7358gimple_seq
355fe088 7359rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7360{
7361 if (dump_file && (dump_flags & TDF_DETAILS))
7362 {
7363 fprintf (dump_file, "rewriting stmt with undefined signed "
7364 "overflow ");
7365 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7366 }
7367
7368 tree lhs = gimple_assign_lhs (stmt);
7369 tree type = unsigned_type_for (TREE_TYPE (lhs));
7370 gimple_seq stmts = NULL;
8e2c037d
RB
7371 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7372 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7373 else
7374 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7375 {
7376 tree op = gimple_op (stmt, i);
7377 op = gimple_convert (&stmts, type, op);
7378 gimple_set_op (stmt, i, op);
7379 }
19e51b40
JJ
7380 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7381 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7382 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7383 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7384 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7385 gimple_seq_add_stmt (&stmts, cvt);
7386
7387 return stmts;
7388}
d4f5cd5e 7389
3d2cf79f 7390
c26de36d
RB
7391/* The valueization hook we use for the gimple_build API simplification.
7392 This makes us match fold_buildN behavior by only combining with
7393 statements in the sequence(s) we are currently building. */
7394
7395static tree
7396gimple_build_valueize (tree op)
7397{
7398 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7399 return op;
7400 return NULL_TREE;
7401}
7402
3d2cf79f 7403/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7404 simplifying it first if possible. Returns the built
3d2cf79f
RB
7405 expression value and appends statements possibly defining it
7406 to SEQ. */
7407
7408tree
7409gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7410 enum tree_code code, tree type, tree op0)
3d2cf79f 7411{
c26de36d 7412 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7413 if (!res)
7414 {
a15ebbcd 7415 res = create_tmp_reg_or_ssa_name (type);
355fe088 7416 gimple *stmt;
3d2cf79f
RB
7417 if (code == REALPART_EXPR
7418 || code == IMAGPART_EXPR
7419 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7420 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7421 else
0d0e4a03 7422 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7423 gimple_set_location (stmt, loc);
7424 gimple_seq_add_stmt_without_update (seq, stmt);
7425 }
7426 return res;
7427}
7428
7429/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7430 simplifying it first if possible. Returns the built
3d2cf79f
RB
7431 expression value and appends statements possibly defining it
7432 to SEQ. */
7433
7434tree
7435gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7436 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7437{
c26de36d 7438 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7439 if (!res)
7440 {
a15ebbcd 7441 res = create_tmp_reg_or_ssa_name (type);
355fe088 7442 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7443 gimple_set_location (stmt, loc);
7444 gimple_seq_add_stmt_without_update (seq, stmt);
7445 }
7446 return res;
7447}
7448
7449/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7450 simplifying it first if possible. Returns the built
3d2cf79f
RB
7451 expression value and appends statements possibly defining it
7452 to SEQ. */
7453
7454tree
7455gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7456 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7457{
7458 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7459 seq, gimple_build_valueize);
3d2cf79f
RB
7460 if (!res)
7461 {
a15ebbcd 7462 res = create_tmp_reg_or_ssa_name (type);
355fe088 7463 gimple *stmt;
3d2cf79f 7464 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7465 stmt = gimple_build_assign (res, code,
7466 build3 (code, type, op0, op1, op2));
3d2cf79f 7467 else
0d0e4a03 7468 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7469 gimple_set_location (stmt, loc);
7470 gimple_seq_add_stmt_without_update (seq, stmt);
7471 }
7472 return res;
7473}
7474
7475/* Build the call FN (ARG0) with a result of type TYPE
7476 (or no result if TYPE is void) with location LOC,
c26de36d 7477 simplifying it first if possible. Returns the built
3d2cf79f
RB
7478 expression value (or NULL_TREE if TYPE is void) and appends
7479 statements possibly defining it to SEQ. */
7480
7481tree
eb69361d
RS
7482gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7483 tree type, tree arg0)
3d2cf79f 7484{
c26de36d 7485 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7486 if (!res)
7487 {
eb69361d
RS
7488 gcall *stmt;
7489 if (internal_fn_p (fn))
7490 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7491 else
7492 {
7493 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7494 stmt = gimple_build_call (decl, 1, arg0);
7495 }
3d2cf79f
RB
7496 if (!VOID_TYPE_P (type))
7497 {
a15ebbcd 7498 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7499 gimple_call_set_lhs (stmt, res);
7500 }
7501 gimple_set_location (stmt, loc);
7502 gimple_seq_add_stmt_without_update (seq, stmt);
7503 }
7504 return res;
7505}
7506
7507/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7508 (or no result if TYPE is void) with location LOC,
c26de36d 7509 simplifying it first if possible. Returns the built
3d2cf79f
RB
7510 expression value (or NULL_TREE if TYPE is void) and appends
7511 statements possibly defining it to SEQ. */
7512
7513tree
eb69361d
RS
7514gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7515 tree type, tree arg0, tree arg1)
3d2cf79f 7516{
c26de36d 7517 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7518 if (!res)
7519 {
eb69361d
RS
7520 gcall *stmt;
7521 if (internal_fn_p (fn))
7522 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7523 else
7524 {
7525 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7526 stmt = gimple_build_call (decl, 2, arg0, arg1);
7527 }
3d2cf79f
RB
7528 if (!VOID_TYPE_P (type))
7529 {
a15ebbcd 7530 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7531 gimple_call_set_lhs (stmt, res);
7532 }
7533 gimple_set_location (stmt, loc);
7534 gimple_seq_add_stmt_without_update (seq, stmt);
7535 }
7536 return res;
7537}
7538
7539/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7540 (or no result if TYPE is void) with location LOC,
c26de36d 7541 simplifying it first if possible. Returns the built
3d2cf79f
RB
7542 expression value (or NULL_TREE if TYPE is void) and appends
7543 statements possibly defining it to SEQ. */
7544
7545tree
eb69361d
RS
7546gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7547 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7548{
c26de36d
RB
7549 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7550 seq, gimple_build_valueize);
3d2cf79f
RB
7551 if (!res)
7552 {
eb69361d
RS
7553 gcall *stmt;
7554 if (internal_fn_p (fn))
7555 stmt = gimple_build_call_internal (as_internal_fn (fn),
7556 3, arg0, arg1, arg2);
7557 else
7558 {
7559 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7560 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7561 }
3d2cf79f
RB
7562 if (!VOID_TYPE_P (type))
7563 {
a15ebbcd 7564 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7565 gimple_call_set_lhs (stmt, res);
7566 }
7567 gimple_set_location (stmt, loc);
7568 gimple_seq_add_stmt_without_update (seq, stmt);
7569 }
7570 return res;
7571}
7572
7573/* Build the conversion (TYPE) OP with a result of type TYPE
7574 with location LOC if such conversion is neccesary in GIMPLE,
7575 simplifying it first.
7576 Returns the built expression value and appends
7577 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7578
7579tree
7580gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7581{
7582 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7583 return op;
3d2cf79f 7584 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7585}
68e57f04 7586
74e3c262
RB
7587/* Build the conversion (ptrofftype) OP with a result of a type
7588 compatible with ptrofftype with location LOC if such conversion
7589 is neccesary in GIMPLE, simplifying it first.
7590 Returns the built expression value and appends
7591 statements possibly defining it to SEQ. */
7592
7593tree
7594gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7595{
7596 if (ptrofftype_p (TREE_TYPE (op)))
7597 return op;
7598 return gimple_convert (seq, loc, sizetype, op);
7599}
7600
e7c45b66
RS
7601/* Build a vector of type TYPE in which each element has the value OP.
7602 Return a gimple value for the result, appending any new statements
7603 to SEQ. */
7604
7605tree
7606gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7607 tree op)
7608{
928686b1
RS
7609 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7610 && !CONSTANT_CLASS_P (op))
7611 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7612
e7c45b66
RS
7613 tree res, vec = build_vector_from_val (type, op);
7614 if (is_gimple_val (vec))
7615 return vec;
7616 if (gimple_in_ssa_p (cfun))
7617 res = make_ssa_name (type);
7618 else
7619 res = create_tmp_reg (type);
7620 gimple *stmt = gimple_build_assign (res, vec);
7621 gimple_set_location (stmt, loc);
7622 gimple_seq_add_stmt_without_update (seq, stmt);
7623 return res;
7624}
7625
abe73c3d
RS
7626/* Build a vector from BUILDER, handling the case in which some elements
7627 are non-constant. Return a gimple value for the result, appending any
7628 new instructions to SEQ.
7629
7630 BUILDER must not have a stepped encoding on entry. This is because
7631 the function is not geared up to handle the arithmetic that would
7632 be needed in the variable case, and any code building a vector that
7633 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7634
7635tree
abe73c3d
RS
7636gimple_build_vector (gimple_seq *seq, location_t loc,
7637 tree_vector_builder *builder)
e7c45b66 7638{
abe73c3d
RS
7639 gcc_assert (builder->nelts_per_pattern () <= 2);
7640 unsigned int encoded_nelts = builder->encoded_nelts ();
7641 for (unsigned int i = 0; i < encoded_nelts; ++i)
7642 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7643 {
abe73c3d 7644 tree type = builder->type ();
928686b1 7645 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7646 vec<constructor_elt, va_gc> *v;
7647 vec_alloc (v, nelts);
7648 for (i = 0; i < nelts; ++i)
abe73c3d 7649 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7650
7651 tree res;
7652 if (gimple_in_ssa_p (cfun))
7653 res = make_ssa_name (type);
7654 else
7655 res = create_tmp_reg (type);
7656 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7657 gimple_set_location (stmt, loc);
7658 gimple_seq_add_stmt_without_update (seq, stmt);
7659 return res;
7660 }
abe73c3d 7661 return builder->build ();
e7c45b66
RS
7662}
7663
68e57f04
RS
7664/* Return true if the result of assignment STMT is known to be non-negative.
7665 If the return value is based on the assumption that signed overflow is
7666 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7667 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7668
7669static bool
7670gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7671 int depth)
7672{
7673 enum tree_code code = gimple_assign_rhs_code (stmt);
7674 switch (get_gimple_rhs_class (code))
7675 {
7676 case GIMPLE_UNARY_RHS:
7677 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7678 gimple_expr_type (stmt),
7679 gimple_assign_rhs1 (stmt),
7680 strict_overflow_p, depth);
7681 case GIMPLE_BINARY_RHS:
7682 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7683 gimple_expr_type (stmt),
7684 gimple_assign_rhs1 (stmt),
7685 gimple_assign_rhs2 (stmt),
7686 strict_overflow_p, depth);
7687 case GIMPLE_TERNARY_RHS:
7688 return false;
7689 case GIMPLE_SINGLE_RHS:
7690 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7691 strict_overflow_p, depth);
7692 case GIMPLE_INVALID_RHS:
7693 break;
7694 }
7695 gcc_unreachable ();
7696}
7697
7698/* Return true if return value of call STMT is known to be non-negative.
7699 If the return value is based on the assumption that signed overflow is
7700 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7701 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7702
7703static bool
7704gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7705 int depth)
7706{
7707 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7708 gimple_call_arg (stmt, 0) : NULL_TREE;
7709 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7710 gimple_call_arg (stmt, 1) : NULL_TREE;
7711
7712 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7713 gimple_call_combined_fn (stmt),
68e57f04
RS
7714 arg0,
7715 arg1,
7716 strict_overflow_p, depth);
7717}
7718
4534c203
RB
7719/* Return true if return value of call STMT is known to be non-negative.
7720 If the return value is based on the assumption that signed overflow is
7721 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7722 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7723
7724static bool
7725gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7726 int depth)
7727{
7728 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7729 {
7730 tree arg = gimple_phi_arg_def (stmt, i);
7731 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7732 return false;
7733 }
7734 return true;
7735}
7736
68e57f04
RS
7737/* Return true if STMT is known to compute a non-negative value.
7738 If the return value is based on the assumption that signed overflow is
7739 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7740 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7741
7742bool
7743gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7744 int depth)
7745{
7746 switch (gimple_code (stmt))
7747 {
7748 case GIMPLE_ASSIGN:
7749 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7750 depth);
7751 case GIMPLE_CALL:
7752 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7753 depth);
4534c203
RB
7754 case GIMPLE_PHI:
7755 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7756 depth);
68e57f04
RS
7757 default:
7758 return false;
7759 }
7760}
67dbe582
RS
7761
7762/* Return true if the floating-point value computed by assignment STMT
7763 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7764 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7765
7766 DEPTH is the current nesting depth of the query. */
7767
7768static bool
7769gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7770{
7771 enum tree_code code = gimple_assign_rhs_code (stmt);
7772 switch (get_gimple_rhs_class (code))
7773 {
7774 case GIMPLE_UNARY_RHS:
7775 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7776 gimple_assign_rhs1 (stmt), depth);
7777 case GIMPLE_BINARY_RHS:
7778 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7779 gimple_assign_rhs1 (stmt),
7780 gimple_assign_rhs2 (stmt), depth);
7781 case GIMPLE_TERNARY_RHS:
7782 return false;
7783 case GIMPLE_SINGLE_RHS:
7784 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7785 case GIMPLE_INVALID_RHS:
7786 break;
7787 }
7788 gcc_unreachable ();
7789}
7790
7791/* Return true if the floating-point value computed by call STMT is known
7792 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7793 considered integer values. Return false for signaling NaN.
67dbe582
RS
7794
7795 DEPTH is the current nesting depth of the query. */
7796
7797static bool
7798gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7799{
7800 tree arg0 = (gimple_call_num_args (stmt) > 0
7801 ? gimple_call_arg (stmt, 0)
7802 : NULL_TREE);
7803 tree arg1 = (gimple_call_num_args (stmt) > 1
7804 ? gimple_call_arg (stmt, 1)
7805 : NULL_TREE);
1d9da71f 7806 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7807 arg0, arg1, depth);
7808}
7809
7810/* Return true if the floating-point result of phi STMT is known to have
7811 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7812 integer values. Return false for signaling NaN.
67dbe582
RS
7813
7814 DEPTH is the current nesting depth of the query. */
7815
7816static bool
7817gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7818{
7819 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7820 {
7821 tree arg = gimple_phi_arg_def (stmt, i);
7822 if (!integer_valued_real_single_p (arg, depth + 1))
7823 return false;
7824 }
7825 return true;
7826}
7827
7828/* Return true if the floating-point value computed by STMT is known
7829 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7830 considered integer values. Return false for signaling NaN.
67dbe582
RS
7831
7832 DEPTH is the current nesting depth of the query. */
7833
7834bool
7835gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7836{
7837 switch (gimple_code (stmt))
7838 {
7839 case GIMPLE_ASSIGN:
7840 return gimple_assign_integer_valued_real_p (stmt, depth);
7841 case GIMPLE_CALL:
7842 return gimple_call_integer_valued_real_p (stmt, depth);
7843 case GIMPLE_PHI:
7844 return gimple_phi_integer_valued_real_p (stmt, depth);
7845 default:
7846 return false;
7847 }
7848}