]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
i386: Improve vector mode and TFmode ABS and NEG patterns
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
8d9254fc 2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
e7868dc6 68#include "varasm.h"
cbdd87d4 69
598f7235
MS
70enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
598f7235
MS
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83};
84
03c4a945
MS
85static bool
86get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 87
b3b9f3d0 88/* Return true when DECL can be referenced from current unit.
c44c2088
JH
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
1389294c 92
1389294c
JH
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
3e89949e 105 we devirtualize only during final compilation stage.
b3b9f3d0
JH
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
1389294c 110static bool
c44c2088 111can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 112{
2c8326a5 113 varpool_node *vnode;
1389294c 114 struct cgraph_node *node;
5e20cdc9 115 symtab_node *snode;
c44c2088 116
00de328a 117 if (DECL_ABSTRACT_P (decl))
1632a686
JH
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 122 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
123 return true;
124
d4babd37
JM
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
1632a686 128 {
d4babd37
JM
129 if (DECL_EXTERNAL (decl))
130 return false;
3aaf0529
JH
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
3dafb85c 133 if (symtab->function_flags_ready)
3aaf0529 134 return true;
d52f5295 135 snode = symtab_node::get (decl);
3aaf0529 136 if (!snode || !snode->definition)
1632a686 137 return false;
7de90a6c 138 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 139 return !node || !node->inlined_to;
1632a686
JH
140 }
141
6da8be89 142 /* We will later output the initializer, so we can refer to it.
c44c2088 143 So we are concerned only when DECL comes from initializer of
3aaf0529 144 external var or var that has been optimized out. */
c44c2088 145 if (!from_decl
8813a647 146 || !VAR_P (from_decl)
3aaf0529 147 || (!DECL_EXTERNAL (from_decl)
9041d2e6 148 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 149 && vnode->definition)
6da8be89 150 || (flag_ltrans
9041d2e6 151 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 152 && vnode->in_other_partition))
c44c2088 153 return true;
c44c2088
JH
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
a33a931b 159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 161 return false;
b3b9f3d0
JH
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
3aaf0529
JH
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
170
171 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 172 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
3dafb85c 178 if (!symtab->function_flags_ready)
b3b9f3d0 179 return true;
c44c2088 180
d52f5295 181 snode = symtab_node::get (decl);
3aaf0529
JH
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 188 return !node || !node->inlined_to;
1389294c
JH
189}
190
a15ebbcd
ML
191/* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
edc19e03
WS
195tree
196create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
197{
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202}
203
0038d4e0 204/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
207
208tree
c44c2088 209canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 210{
37f808c4
RB
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
50619002
EB
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
315f5f1b
RG
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 218 {
315f5f1b
RG
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
17f39a39
JH
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
5a27a197
RG
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
5a27a197
RG
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
239 if (!base)
240 return NULL_TREE;
b3b9f3d0 241
8813a647 242 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 243 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 244 return NULL_TREE;
13f92e8d
JJ
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
8813a647 247 if (VAR_P (base))
46eb666a 248 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
249 else if (TREE_CODE (base) == FUNCTION_DECL)
250 {
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
d52f5295 254 cgraph_node::get_create (base);
7501ca28 255 }
0038d4e0 256 /* Fixup types in global initializers. */
73aef89e
RG
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
259
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
17f39a39 263 }
37f808c4
RB
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
266 {
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
272 }
50619002 273 return orig_cval;
17f39a39 274}
cbdd87d4
RG
275
276/* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
278
279tree
280get_symbol_constant_value (tree sym)
281{
6a6dac52
JH
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
cbdd87d4 284 {
cbdd87d4
RG
285 if (val)
286 {
9d60be38 287 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 288 if (val && is_gimple_min_invariant (val))
17f39a39 289 return val;
1389294c
JH
290 else
291 return NULL_TREE;
cbdd87d4
RG
292 }
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
296 if (!val
b8a8c472 297 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 298 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
299 }
300
301 return NULL_TREE;
302}
303
304
cbdd87d4
RG
305
306/* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
310
311static tree
312maybe_fold_reference (tree expr, bool is_lhs)
313{
17f39a39 314 tree result;
cbdd87d4 315
f0eddb90
RG
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332
f0eddb90
RG
333 if (!is_lhs
334 && (result = fold_const_aggregate_ref (expr))
335 && is_gimple_min_invariant (result))
336 return result;
cbdd87d4 337
cbdd87d4
RG
338 return NULL_TREE;
339}
340
341
342/* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
345 folded. */
346
347static tree
348fold_gimple_assign (gimple_stmt_iterator *si)
349{
355fe088 350 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
352 location_t loc = gimple_location (stmt);
353
354 tree result = NULL_TREE;
355
356 switch (get_gimple_rhs_class (subcode))
357 {
358 case GIMPLE_SINGLE_RHS:
359 {
360 tree rhs = gimple_assign_rhs1 (stmt);
361
8c00ba08
JW
362 if (TREE_CLOBBER_P (rhs))
363 return NULL_TREE;
364
4e71066d 365 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
366 return maybe_fold_reference (rhs, false);
367
bdf37f7a
JH
368 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 {
370 tree val = OBJ_TYPE_REF_EXPR (rhs);
371 if (is_gimple_min_invariant (val))
372 return val;
f8a39967 373 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
374 {
375 bool final;
376 vec <cgraph_node *>targets
f8a39967 377 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 378 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 379 {
2b5f0895
XDL
380 if (dump_enabled_p ())
381 {
4f5b9c80 382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets.length () == 1
386 ? targets[0]->name ()
3ef276e4 387 : "NULL");
2b5f0895 388 }
3ef276e4
RB
389 if (targets.length () == 1)
390 {
391 val = fold_convert (TREE_TYPE (val),
392 build_fold_addr_expr_loc
393 (loc, targets[0]->decl));
394 STRIP_USELESS_TYPE_CONVERSION (val);
395 }
396 else
67914693
SL
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
3ef276e4 399 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
400 return val;
401 }
402 }
bdf37f7a 403 }
7524f419 404
cbdd87d4
RG
405 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 {
70f34814
RG
407 tree ref = TREE_OPERAND (rhs, 0);
408 tree tem = maybe_fold_reference (ref, true);
409 if (tem
410 && TREE_CODE (tem) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem, 1)))
412 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 else if (tem)
cbdd87d4
RG
414 result = fold_convert (TREE_TYPE (rhs),
415 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
416 else if (TREE_CODE (ref) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref, 1)))
418 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
419
420 if (result)
421 {
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result);
427
428 if (result != rhs && valid_gimple_rhs_p (result))
429 return result;
430 }
cbdd87d4
RG
431 }
432
433 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
435 {
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
437 unsigned i;
438 tree val;
439
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 441 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
442 return NULL_TREE;
443
444 return build_vector_from_ctor (TREE_TYPE (rhs),
445 CONSTRUCTOR_ELTS (rhs));
446 }
447
448 else if (DECL_P (rhs))
9d60be38 449 return get_symbol_constant_value (rhs);
cbdd87d4
RG
450 }
451 break;
452
453 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
454 break;
455
456 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
457 break;
458
0354c0c7 459 case GIMPLE_TERNARY_RHS:
5c099d40
RB
460 result = fold_ternary_loc (loc, subcode,
461 TREE_TYPE (gimple_assign_lhs (stmt)),
462 gimple_assign_rhs1 (stmt),
463 gimple_assign_rhs2 (stmt),
464 gimple_assign_rhs3 (stmt));
0354c0c7
BS
465
466 if (result)
467 {
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
0354c0c7
BS
471 }
472 break;
473
cbdd87d4
RG
474 case GIMPLE_INVALID_RHS:
475 gcc_unreachable ();
476 }
477
478 return NULL_TREE;
479}
480
fef5a0d9
RB
481
482/* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
486
487static void
488gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489{
355fe088 490 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
491
492 if (gimple_has_location (stmt))
493 annotate_all_with_location (stmts, gimple_location (stmt));
494
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
355fe088 497 gimple *laststore = NULL;
fef5a0d9
RB
498 for (gimple_stmt_iterator i = gsi_last (stmts);
499 !gsi_end_p (i); gsi_prev (&i))
500 {
355fe088 501 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
502 if ((gimple_assign_single_p (new_stmt)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 || (is_gimple_call (new_stmt)
505 && (gimple_call_flags (new_stmt)
506 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 {
508 tree vdef;
509 if (!laststore)
510 vdef = gimple_vdef (stmt);
511 else
512 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 gimple_set_vdef (new_stmt, vdef);
514 if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 laststore = new_stmt;
517 }
518 }
519
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse = gimple_vuse (stmt);
523 for (gimple_stmt_iterator i = gsi_start (stmts);
524 !gsi_end_p (i); gsi_next (&i))
525 {
355fe088 526 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt))
530 gimple_set_vuse (new_stmt, reaching_vuse);
531 gimple_set_modified (new_stmt, true);
532 if (gimple_vdef (new_stmt))
533 reaching_vuse = gimple_vdef (new_stmt);
534 }
535
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
538 if (reaching_vuse
539 && reaching_vuse == gimple_vuse (stmt))
540 {
541 tree vdef = gimple_vdef (stmt);
542 if (vdef
543 && TREE_CODE (vdef) == SSA_NAME)
544 {
545 unlink_stmt_vdef (stmt);
546 release_ssa_name (vdef);
547 }
548 }
549
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p, stmts, false);
552}
553
cbdd87d4
RG
554/* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
cbdd87d4
RG
563
564void
565gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566{
567 tree lhs;
355fe088 568 gimple *stmt, *new_stmt;
cbdd87d4 569 gimple_stmt_iterator i;
355a7673 570 gimple_seq stmts = NULL;
cbdd87d4
RG
571
572 stmt = gsi_stmt (*si_p);
573
574 gcc_assert (is_gimple_call (stmt));
575
45852dcc 576 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 577
e256dfce 578 lhs = gimple_call_lhs (stmt);
cbdd87d4 579 if (lhs == NULL_TREE)
6e572326
RG
580 {
581 gimplify_and_add (expr, &stmts);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts))
585 {
9fdc58de 586 pop_gimplify_context (NULL);
6e572326
RG
587 if (gimple_in_ssa_p (cfun))
588 {
589 unlink_stmt_vdef (stmt);
590 release_defs (stmt);
591 }
f6b4dc28 592 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
593 return;
594 }
595 }
cbdd87d4 596 else
e256dfce 597 {
381cdae4 598 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
599 new_stmt = gimple_build_assign (lhs, tmp);
600 i = gsi_last (stmts);
601 gsi_insert_after_without_update (&i, new_stmt,
602 GSI_CONTINUE_LINKING);
603 }
cbdd87d4
RG
604
605 pop_gimplify_context (NULL);
606
fef5a0d9
RB
607 gsi_replace_with_seq_vops (si_p, stmts);
608}
cbdd87d4 609
fef5a0d9
RB
610
611/* Replace the call at *GSI with the gimple value VAL. */
612
e3174bdf 613void
fef5a0d9
RB
614replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615{
355fe088 616 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 617 tree lhs = gimple_call_lhs (stmt);
355fe088 618 gimple *repl;
fef5a0d9 619 if (lhs)
e256dfce 620 {
fef5a0d9
RB
621 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 val = fold_convert (TREE_TYPE (lhs), val);
623 repl = gimple_build_assign (lhs, val);
624 }
625 else
626 repl = gimple_build_nop ();
627 tree vdef = gimple_vdef (stmt);
628 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 {
630 unlink_stmt_vdef (stmt);
631 release_ssa_name (vdef);
632 }
f6b4dc28 633 gsi_replace (gsi, repl, false);
fef5a0d9
RB
634}
635
636/* Replace the call at *GSI with the new call REPL and fold that
637 again. */
638
639static void
355fe088 640replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 641{
355fe088 642 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
643 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644 gimple_set_location (repl, gimple_location (stmt));
779724a5 645 gimple_move_vops (repl, stmt);
f6b4dc28 646 gsi_replace (gsi, repl, false);
fef5a0d9
RB
647 fold_stmt (gsi);
648}
649
650/* Return true if VAR is a VAR_DECL or a component thereof. */
651
652static bool
653var_decl_component_p (tree var)
654{
655 tree inner = var;
656 while (handled_component_p (inner))
657 inner = TREE_OPERAND (inner, 0);
47cac108
RB
658 return (DECL_P (inner)
659 || (TREE_CODE (inner) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
661}
662
c89af696
AH
663/* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
665
666static bool
667size_must_be_zero_p (tree size)
668{
669 if (integer_zerop (size))
670 return true;
671
3f27391f 672 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
673 return false;
674
6512c0f1
MS
675 tree type = TREE_TYPE (size);
676 int prec = TYPE_PRECISION (type);
677
6512c0f1
MS
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 681 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
c89af696
AH
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
6512c0f1
MS
687}
688
cc8bea0a
MS
689/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
fef5a0d9
RB
695
696static bool
697gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 698 tree dest, tree src, enum built_in_function code)
fef5a0d9 699{
355fe088 700 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 tree destvar, srcvar;
704 location_t loc = gimple_location (stmt);
705
6512c0f1
MS
706 /* If the LEN parameter is a constant zero or in range where
707 the only valid value is zero, return DEST. */
708 if (size_must_be_zero_p (len))
fef5a0d9 709 {
355fe088 710 gimple *repl;
fef5a0d9
RB
711 if (gimple_call_lhs (stmt))
712 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 717 {
fef5a0d9
RB
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
f6b4dc28 721 gsi_replace (gsi, repl, false);
fef5a0d9
RB
722 return true;
723 }
724
725 /* If SRC and DEST are the same (and not volatile), return
726 DEST{,+LEN,+LEN-1}. */
727 if (operand_equal_p (src, dest, 0))
728 {
cc8bea0a
MS
729 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 731 32667). */
fef5a0d9
RB
732 unlink_stmt_vdef (stmt);
733 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 release_ssa_name (gimple_vdef (stmt));
735 if (!lhs)
736 {
f6b4dc28 737 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
738 return true;
739 }
740 goto done;
741 }
742 else
743 {
744 tree srctype, desttype;
745 unsigned int src_align, dest_align;
746 tree off0;
d01b568a
BE
747 const char *tmp_str;
748 unsigned HOST_WIDE_INT tmp_len;
fef5a0d9
RB
749
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
753
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
762 /* FIXME: Don't transform copies from strings with known length.
763 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
764 from being handled, and the case was XFAILed for that reason.
765 Now that it is handled and the XFAIL removed, as soon as other
766 strlenopt tests that rely on it for passing are adjusted, this
767 hack can be removed. */
768 && !c_strlen (src, 1)
d01b568a
BE
769 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
770 && memchr (tmp_str, 0, tmp_len) == NULL))
fef5a0d9
RB
771 {
772 unsigned ilen = tree_to_uhwi (len);
146ec50f 773 if (pow2p_hwi (ilen))
fef5a0d9 774 {
213694e5
MS
775 /* Detect out-of-bounds accesses without issuing warnings.
776 Avoid folding out-of-bounds copies but to avoid false
777 positives for unreachable code defer warning until after
778 DCE has worked its magic.
779 -Wrestrict is still diagnosed. */
780 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
781 dest, src, len, len,
782 false, false))
783 if (warning != OPT_Wrestrict)
784 return false;
cc8bea0a 785
64ab8765 786 scalar_int_mode mode;
fef5a0d9
RB
787 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
788 if (type
64ab8765
RS
789 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
790 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
791 /* If the destination pointer is not aligned we must be able
792 to emit an unaligned store. */
64ab8765 793 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 794 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 795 || (optab_handler (movmisalign_optab, mode)
f869c12f 796 != CODE_FOR_nothing)))
fef5a0d9
RB
797 {
798 tree srctype = type;
799 tree desttype = type;
64ab8765 800 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
801 srctype = build_aligned_type (type, src_align);
802 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
803 tree tem = fold_const_aggregate_ref (srcmem);
804 if (tem)
805 srcmem = tem;
64ab8765 806 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 807 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 808 && (optab_handler (movmisalign_optab, mode)
f869c12f 809 == CODE_FOR_nothing))
fef5a0d9
RB
810 srcmem = NULL_TREE;
811 if (srcmem)
812 {
355fe088 813 gimple *new_stmt;
fef5a0d9
RB
814 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
815 {
816 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
817 srcmem
818 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
819 new_stmt);
fef5a0d9
RB
820 gimple_assign_set_lhs (new_stmt, srcmem);
821 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
822 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
823 }
64ab8765 824 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
825 desttype = build_aligned_type (type, dest_align);
826 new_stmt
827 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
828 dest, off0),
829 srcmem);
779724a5 830 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
831 if (!lhs)
832 {
f6b4dc28 833 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
834 return true;
835 }
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 goto done;
838 }
839 }
840 }
841 }
842
0d67a510 843 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
844 {
845 /* Both DEST and SRC must be pointer types.
846 ??? This is what old code did. Is the testing for pointer types
847 really mandatory?
848
849 If either SRC is readonly or length is 1, we can use memcpy. */
850 if (!dest_align || !src_align)
851 return false;
852 if (readonly_data_expr (src)
853 || (tree_fits_uhwi_p (len)
854 && (MIN (src_align, dest_align) / BITS_PER_UNIT
855 >= tree_to_uhwi (len))))
856 {
857 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
858 if (!fn)
859 return false;
860 gimple_call_set_fndecl (stmt, fn);
861 gimple_call_set_arg (stmt, 0, dest);
862 gimple_call_set_arg (stmt, 1, src);
863 fold_stmt (gsi);
864 return true;
865 }
866
867 /* If *src and *dest can't overlap, optimize into memcpy as well. */
868 if (TREE_CODE (src) == ADDR_EXPR
869 && TREE_CODE (dest) == ADDR_EXPR)
870 {
871 tree src_base, dest_base, fn;
a90c8804
RS
872 poly_int64 src_offset = 0, dest_offset = 0;
873 poly_uint64 maxsize;
fef5a0d9
RB
874
875 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
876 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
877 if (src_base == NULL)
878 src_base = srcvar;
fef5a0d9 879 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
880 dest_base = get_addr_base_and_unit_offset (destvar,
881 &dest_offset);
882 if (dest_base == NULL)
883 dest_base = destvar;
a90c8804 884 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 885 maxsize = -1;
fef5a0d9
RB
886 if (SSA_VAR_P (src_base)
887 && SSA_VAR_P (dest_base))
888 {
889 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
890 && ranges_maybe_overlap_p (src_offset, maxsize,
891 dest_offset, maxsize))
fef5a0d9
RB
892 return false;
893 }
894 else if (TREE_CODE (src_base) == MEM_REF
895 && TREE_CODE (dest_base) == MEM_REF)
896 {
897 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
898 TREE_OPERAND (dest_base, 0), 0))
899 return false;
a90c8804
RS
900 poly_offset_int full_src_offset
901 = mem_ref_offset (src_base) + src_offset;
902 poly_offset_int full_dest_offset
903 = mem_ref_offset (dest_base) + dest_offset;
904 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
905 full_dest_offset, maxsize))
fef5a0d9
RB
906 return false;
907 }
908 else
909 return false;
910
911 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
912 if (!fn)
913 return false;
914 gimple_call_set_fndecl (stmt, fn);
915 gimple_call_set_arg (stmt, 0, dest);
916 gimple_call_set_arg (stmt, 1, src);
917 fold_stmt (gsi);
918 return true;
919 }
920
921 /* If the destination and source do not alias optimize into
922 memcpy as well. */
923 if ((is_gimple_min_invariant (dest)
924 || TREE_CODE (dest) == SSA_NAME)
925 && (is_gimple_min_invariant (src)
926 || TREE_CODE (src) == SSA_NAME))
927 {
928 ao_ref destr, srcr;
929 ao_ref_init_from_ptr_and_size (&destr, dest, len);
930 ao_ref_init_from_ptr_and_size (&srcr, src, len);
931 if (!refs_may_alias_p_1 (&destr, &srcr, false))
932 {
933 tree fn;
934 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
935 if (!fn)
936 return false;
937 gimple_call_set_fndecl (stmt, fn);
938 gimple_call_set_arg (stmt, 0, dest);
939 gimple_call_set_arg (stmt, 1, src);
940 fold_stmt (gsi);
941 return true;
942 }
943 }
944
945 return false;
946 }
947
948 if (!tree_fits_shwi_p (len))
949 return false;
fef5a0d9
RB
950 if (!POINTER_TYPE_P (TREE_TYPE (src))
951 || !POINTER_TYPE_P (TREE_TYPE (dest)))
952 return false;
953 /* In the following try to find a type that is most natural to be
954 used for the memcpy source and destination and that allows
955 the most optimization when memcpy is turned into a plain assignment
956 using that type. In theory we could always use a char[len] type
957 but that only gains us that the destination and source possibly
958 no longer will have their address taken. */
fef5a0d9
RB
959 srctype = TREE_TYPE (TREE_TYPE (src));
960 if (TREE_CODE (srctype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 962 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
963 desttype = TREE_TYPE (TREE_TYPE (dest));
964 if (TREE_CODE (desttype) == ARRAY_TYPE
965 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 966 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
967 if (TREE_ADDRESSABLE (srctype)
968 || TREE_ADDRESSABLE (desttype))
969 return false;
970
971 /* Make sure we are not copying using a floating-point mode or
972 a type whose size possibly does not match its precision. */
973 if (FLOAT_MODE_P (TYPE_MODE (desttype))
974 || TREE_CODE (desttype) == BOOLEAN_TYPE
975 || TREE_CODE (desttype) == ENUMERAL_TYPE)
976 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
977 if (FLOAT_MODE_P (TYPE_MODE (srctype))
978 || TREE_CODE (srctype) == BOOLEAN_TYPE
979 || TREE_CODE (srctype) == ENUMERAL_TYPE)
980 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
981 if (!srctype)
982 srctype = desttype;
983 if (!desttype)
984 desttype = srctype;
985 if (!srctype)
986 return false;
987
988 src_align = get_pointer_alignment (src);
989 dest_align = get_pointer_alignment (dest);
fef5a0d9 990
5105b576
RB
991 /* Choose between src and destination type for the access based
992 on alignment, whether the access constitutes a register access
993 and whether it may actually expose a declaration for SSA rewrite
994 or SRA decomposition. */
42f74245 995 destvar = NULL_TREE;
5105b576 996 srcvar = NULL_TREE;
42f74245
RB
997 if (TREE_CODE (dest) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1000 && dest_align >= TYPE_ALIGN (desttype)
1001 && (is_gimple_reg_type (desttype)
1002 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1003 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1004 else if (TREE_CODE (src) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (src, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1007 && src_align >= TYPE_ALIGN (srctype)
1008 && (is_gimple_reg_type (srctype)
1009 || dest_align >= TYPE_ALIGN (srctype)))
1010 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
fef5a0d9
RB
1011 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1012 return false;
1013
5105b576
RB
1014 /* Now that we chose an access type express the other side in
1015 terms of it if the target allows that with respect to alignment
1016 constraints. */
fef5a0d9
RB
1017 if (srcvar == NULL_TREE)
1018 {
fef5a0d9
RB
1019 if (src_align >= TYPE_ALIGN (desttype))
1020 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1021 else
1022 {
1023 if (STRICT_ALIGNMENT)
1024 return false;
1025 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1026 src_align);
1027 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1028 }
1029 }
1030 else if (destvar == NULL_TREE)
1031 {
fef5a0d9
RB
1032 if (dest_align >= TYPE_ALIGN (srctype))
1033 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1034 else
1035 {
1036 if (STRICT_ALIGNMENT)
1037 return false;
1038 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1039 dest_align);
1040 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1041 }
1042 }
1043
213694e5
MS
1044 /* Same as above, detect out-of-bounds accesses without issuing
1045 warnings. Avoid folding out-of-bounds copies but to avoid
1046 false positives for unreachable code defer warning until
1047 after DCE has worked its magic.
1048 -Wrestrict is still diagnosed. */
1049 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1050 dest, src, len, len,
1051 false, false))
1052 if (warning != OPT_Wrestrict)
1053 return false;
cc8bea0a 1054
355fe088 1055 gimple *new_stmt;
fef5a0d9
RB
1056 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1057 {
921b13d0
RB
1058 tree tem = fold_const_aggregate_ref (srcvar);
1059 if (tem)
1060 srcvar = tem;
1061 if (! is_gimple_min_invariant (srcvar))
1062 {
1063 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1064 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1065 new_stmt);
921b13d0
RB
1066 gimple_assign_set_lhs (new_stmt, srcvar);
1067 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1068 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1069 }
d7257171
RB
1070 new_stmt = gimple_build_assign (destvar, srcvar);
1071 goto set_vop_and_replace;
fef5a0d9 1072 }
d7257171
RB
1073
1074 /* We get an aggregate copy. Use an unsigned char[] type to
1075 perform the copying to preserve padding and to avoid any issues
1076 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1077 desttype = build_array_type_nelts (unsigned_char_type_node,
1078 tree_to_uhwi (len));
1079 srctype = desttype;
1080 if (src_align > TYPE_ALIGN (srctype))
1081 srctype = build_aligned_type (srctype, src_align);
1082 if (dest_align > TYPE_ALIGN (desttype))
1083 desttype = build_aligned_type (desttype, dest_align);
1084 new_stmt
1085 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1086 fold_build2 (MEM_REF, srctype, src, off0));
1087set_vop_and_replace:
779724a5 1088 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1089 if (!lhs)
1090 {
f6b4dc28 1091 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1092 return true;
1093 }
1094 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1095 }
1096
1097done:
74e3c262 1098 gimple_seq stmts = NULL;
0d67a510 1099 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1100 len = NULL_TREE;
0d67a510 1101 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1102 {
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1106 }
0d67a510
ML
1107 else
1108 gcc_unreachable ();
fef5a0d9 1109
74e3c262 1110 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1111 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1112 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1113 return true;
1114}
1115
b3d8d88e
MS
1116/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1117 to built-in memcmp (a, b, len). */
1118
1119static bool
1120gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1121{
1122 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1123
1124 if (!fn)
1125 return false;
1126
1127 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1128
1129 gimple *stmt = gsi_stmt (*gsi);
1130 tree a = gimple_call_arg (stmt, 0);
1131 tree b = gimple_call_arg (stmt, 1);
1132 tree len = gimple_call_arg (stmt, 2);
1133
1134 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1135 replace_call_with_call_and_fold (gsi, repl);
1136
1137 return true;
1138}
1139
1140/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1141 to built-in memmove (dest, src, len). */
1142
1143static bool
1144gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1145{
1146 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1147
1148 if (!fn)
1149 return false;
1150
1151 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1152 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1153 len) into memmove (dest, src, len). */
1154
1155 gimple *stmt = gsi_stmt (*gsi);
1156 tree src = gimple_call_arg (stmt, 0);
1157 tree dest = gimple_call_arg (stmt, 1);
1158 tree len = gimple_call_arg (stmt, 2);
1159
1160 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1161 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1162 replace_call_with_call_and_fold (gsi, repl);
1163
1164 return true;
1165}
1166
1167/* Transform a call to built-in bzero (dest, len) at *GSI into one
1168 to built-in memset (dest, 0, len). */
1169
1170static bool
1171gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1172{
1173 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1174
1175 if (!fn)
1176 return false;
1177
1178 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1179
1180 gimple *stmt = gsi_stmt (*gsi);
1181 tree dest = gimple_call_arg (stmt, 0);
1182 tree len = gimple_call_arg (stmt, 1);
1183
1184 gimple_seq seq = NULL;
1185 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1186 gimple_seq_add_stmt_without_update (&seq, repl);
1187 gsi_replace_with_seq_vops (gsi, seq);
1188 fold_stmt (gsi);
1189
1190 return true;
1191}
1192
fef5a0d9
RB
1193/* Fold function call to builtin memset or bzero at *GSI setting the
1194 memory of size LEN to VAL. Return whether a simplification was made. */
1195
1196static bool
1197gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1198{
355fe088 1199 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1200 tree etype;
1201 unsigned HOST_WIDE_INT length, cval;
1202
1203 /* If the LEN parameter is zero, return DEST. */
1204 if (integer_zerop (len))
1205 {
1206 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1207 return true;
1208 }
1209
1210 if (! tree_fits_uhwi_p (len))
1211 return false;
1212
1213 if (TREE_CODE (c) != INTEGER_CST)
1214 return false;
1215
1216 tree dest = gimple_call_arg (stmt, 0);
1217 tree var = dest;
1218 if (TREE_CODE (var) != ADDR_EXPR)
1219 return false;
1220
1221 var = TREE_OPERAND (var, 0);
1222 if (TREE_THIS_VOLATILE (var))
1223 return false;
1224
1225 etype = TREE_TYPE (var);
1226 if (TREE_CODE (etype) == ARRAY_TYPE)
1227 etype = TREE_TYPE (etype);
1228
1229 if (!INTEGRAL_TYPE_P (etype)
1230 && !POINTER_TYPE_P (etype))
1231 return NULL_TREE;
1232
1233 if (! var_decl_component_p (var))
1234 return NULL_TREE;
1235
1236 length = tree_to_uhwi (len);
7a504f33 1237 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1238 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1239 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1240 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1241 return NULL_TREE;
1242
1243 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1244 return NULL_TREE;
1245
1ba9acb1
RB
1246 if (!type_has_mode_precision_p (etype))
1247 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1248 TYPE_UNSIGNED (etype));
1249
fef5a0d9
RB
1250 if (integer_zerop (c))
1251 cval = 0;
1252 else
1253 {
1254 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1255 return NULL_TREE;
1256
1257 cval = TREE_INT_CST_LOW (c);
1258 cval &= 0xff;
1259 cval |= cval << 8;
1260 cval |= cval << 16;
1261 cval |= (cval << 31) << 1;
1262 }
1263
1264 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1265 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1266 gimple_move_vops (store, stmt);
fef5a0d9
RB
1267 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1268 if (gimple_call_lhs (stmt))
1269 {
355fe088 1270 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1271 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1272 }
1273 else
1274 {
1275 gimple_stmt_iterator gsi2 = *gsi;
1276 gsi_prev (gsi);
1277 gsi_remove (&gsi2, true);
1278 }
1279
1280 return true;
1281}
1282
fb471a13 1283/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1284
1285static bool
03c4a945
MS
1286get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1287 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1288{
fb471a13 1289 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1290
fb471a13
MS
1291 /* The length computed by this invocation of the function. */
1292 tree val = NULL_TREE;
1293
eef2da67
MS
1294 /* True if VAL is an optimistic (tight) bound determined from
1295 the size of the character array in which the string may be
1296 stored. In that case, the computed VAL is used to set
1297 PDATA->MAXBOUND. */
1298 bool tight_bound = false;
1299
fb471a13
MS
1300 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1301 if (TREE_CODE (arg) == ADDR_EXPR
1302 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1303 {
fb471a13
MS
1304 tree op = TREE_OPERAND (arg, 0);
1305 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1306 {
fb471a13
MS
1307 tree aop0 = TREE_OPERAND (op, 0);
1308 if (TREE_CODE (aop0) == INDIRECT_REF
1309 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1310 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1311 pdata, eltsize);
fef5a0d9 1312 }
598f7235 1313 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1314 && rkind == SRK_LENRANGE)
fef5a0d9 1315 {
fb471a13
MS
1316 /* Fail if an array is the last member of a struct object
1317 since it could be treated as a (fake) flexible array
1318 member. */
1319 tree idx = TREE_OPERAND (op, 1);
1320
1321 arg = TREE_OPERAND (op, 0);
1322 tree optype = TREE_TYPE (arg);
1323 if (tree dom = TYPE_DOMAIN (optype))
1324 if (tree bound = TYPE_MAX_VALUE (dom))
1325 if (TREE_CODE (bound) == INTEGER_CST
1326 && TREE_CODE (idx) == INTEGER_CST
1327 && tree_int_cst_lt (bound, idx))
1328 return false;
fef5a0d9 1329 }
fb471a13 1330 }
7d583f42 1331
598f7235 1332 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1333 {
1334 /* We are computing the maximum value (not string length). */
1335 val = arg;
1336 if (TREE_CODE (val) != INTEGER_CST
1337 || tree_int_cst_sgn (val) < 0)
1338 return false;
1339 }
1340 else
1341 {
1342 c_strlen_data lendata = { };
1343 val = c_strlen (arg, 1, &lendata, eltsize);
1344
fb471a13
MS
1345 if (!val && lendata.decl)
1346 {
03c4a945
MS
1347 /* ARG refers to an unterminated const character array.
1348 DATA.DECL with size DATA.LEN. */
1349 val = lendata.minlen;
730832cd 1350 pdata->decl = lendata.decl;
7d583f42 1351 }
fb471a13
MS
1352 }
1353
a7160771
MS
1354 /* Set if VAL represents the maximum length based on array size (set
1355 when exact length cannot be determined). */
1356 bool maxbound = false;
1357
84de9426 1358 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1359 {
1360 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1361 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1362 pdata, eltsize);
88d0c3f0 1363
fb471a13 1364 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1365 {
fb471a13 1366 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1367
fb471a13
MS
1368 /* Determine the "innermost" array type. */
1369 while (TREE_CODE (optype) == ARRAY_TYPE
1370 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1371 optype = TREE_TYPE (optype);
c42d0aa0 1372
fb471a13
MS
1373 /* Avoid arrays of pointers. */
1374 tree eltype = TREE_TYPE (optype);
1375 if (TREE_CODE (optype) != ARRAY_TYPE
1376 || !INTEGRAL_TYPE_P (eltype))
1377 return false;
c42d0aa0 1378
fb471a13
MS
1379 /* Fail when the array bound is unknown or zero. */
1380 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1381 if (!val
1382 || TREE_CODE (val) != INTEGER_CST
1383 || integer_zerop (val))
fb471a13 1384 return false;
1bfd6a00 1385
fb471a13
MS
1386 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1387 integer_one_node);
c42d0aa0 1388
fb471a13
MS
1389 /* Set the minimum size to zero since the string in
1390 the array could have zero length. */
730832cd 1391 pdata->minlen = ssize_int (0);
204a7ecb 1392
eef2da67 1393 tight_bound = true;
fb471a13
MS
1394 }
1395 else if (TREE_CODE (arg) == COMPONENT_REF
1396 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1397 == ARRAY_TYPE))
1398 {
1399 /* Use the type of the member array to determine the upper
1400 bound on the length of the array. This may be overly
1401 optimistic if the array itself isn't NUL-terminated and
1402 the caller relies on the subsequent member to contain
1403 the NUL but that would only be considered valid if
03c4a945 1404 the array were the last member of a struct. */
fb471a13
MS
1405
1406 tree fld = TREE_OPERAND (arg, 1);
1407
1408 tree optype = TREE_TYPE (fld);
1409
1410 /* Determine the "innermost" array type. */
1411 while (TREE_CODE (optype) == ARRAY_TYPE
1412 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1413 optype = TREE_TYPE (optype);
1414
1415 /* Fail when the array bound is unknown or zero. */
1416 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1417 if (!val
1418 || TREE_CODE (val) != INTEGER_CST
1419 || integer_zerop (val))
fb471a13
MS
1420 return false;
1421 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1422 integer_one_node);
1423
1424 /* Set the minimum size to zero since the string in
1425 the array could have zero length. */
730832cd 1426 pdata->minlen = ssize_int (0);
fb471a13 1427
eef2da67
MS
1428 /* The array size determined above is an optimistic bound
1429 on the length. If the array isn't nul-terminated the
1430 length computed by the library function would be greater.
1431 Even though using strlen to cross the subobject boundary
1432 is undefined, avoid drawing conclusions from the member
1433 type about the length here. */
1434 tight_bound = true;
1435 }
e7868dc6
MS
1436 else if (TREE_CODE (arg) == MEM_REF
1437 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1438 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1439 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1440 {
1441 /* Handle a MEM_REF into a DECL accessing an array of integers,
1442 being conservative about references to extern structures with
1443 flexible array members that can be initialized to arbitrary
1444 numbers of elements as an extension (static structs are okay).
1445 FIXME: Make this less conservative -- see
1446 component_ref_size in tree.c. */
1447 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1448 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1449 && (decl_binds_to_current_def_p (ref)
1450 || !array_at_struct_end_p (arg)))
1451 {
1452 /* Fail if the offset is out of bounds. Such accesses
1453 should be diagnosed at some point. */
1454 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1455 if (!val
1456 || TREE_CODE (val) != INTEGER_CST
1457 || integer_zerop (val))
e7868dc6
MS
1458 return false;
1459
1460 poly_offset_int psiz = wi::to_offset (val);
1461 poly_offset_int poff = mem_ref_offset (arg);
1462 if (known_le (psiz, poff))
1463 return false;
1464
1465 pdata->minlen = ssize_int (0);
1466
1467 /* Subtract the offset and one for the terminating nul. */
1468 psiz -= poff;
1469 psiz -= 1;
1470 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1471 /* Since VAL reflects the size of a declared object
1472 rather the type of the access it is not a tight bound. */
1473 }
1474 }
1475 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1476 {
eef2da67
MS
1477 /* Avoid handling pointers to arrays. GCC might misuse
1478 a pointer to an array of one bound to point to an array
1479 object of a greater bound. */
1480 tree argtype = TREE_TYPE (arg);
1481 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1482 {
eef2da67 1483 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1484 if (!val
1485 || TREE_CODE (val) != INTEGER_CST
1486 || integer_zerop (val))
88d0c3f0 1487 return false;
fb471a13
MS
1488 val = wide_int_to_tree (TREE_TYPE (val),
1489 wi::sub (wi::to_wide (val), 1));
1490
e495e31a
MS
1491 /* Set the minimum size to zero since the string in
1492 the array could have zero length. */
730832cd 1493 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1494 }
1495 }
a7160771 1496 maxbound = true;
fb471a13 1497 }
88d0c3f0 1498
fb471a13
MS
1499 if (!val)
1500 return false;
fef5a0d9 1501
fb471a13 1502 /* Adjust the lower bound on the string length as necessary. */
730832cd 1503 if (!pdata->minlen
598f7235 1504 || (rkind != SRK_STRLEN
730832cd 1505 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1506 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1507 && tree_int_cst_lt (val, pdata->minlen)))
1508 pdata->minlen = val;
88d0c3f0 1509
a7160771 1510 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1511 {
1512 /* Adjust the tighter (more optimistic) string length bound
1513 if necessary and proceed to adjust the more conservative
1514 bound. */
1515 if (TREE_CODE (val) == INTEGER_CST)
1516 {
a7160771
MS
1517 if (tree_int_cst_lt (pdata->maxbound, val))
1518 pdata->maxbound = val;
730832cd
MS
1519 }
1520 else
1521 pdata->maxbound = val;
1522 }
a7160771
MS
1523 else if (pdata->maxbound || maxbound)
1524 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1525 if VAL corresponds to the maximum length determined based
1526 on the type of the object. */
730832cd
MS
1527 pdata->maxbound = val;
1528
eef2da67
MS
1529 if (tight_bound)
1530 {
1531 /* VAL computed above represents an optimistically tight bound
1532 on the length of the string based on the referenced object's
1533 or subobject's type. Determine the conservative upper bound
1534 based on the enclosing object's size if possible. */
84de9426 1535 if (rkind == SRK_LENRANGE)
eef2da67
MS
1536 {
1537 poly_int64 offset;
1538 tree base = get_addr_base_and_unit_offset (arg, &offset);
1539 if (!base)
1540 {
1541 /* When the call above fails due to a non-constant offset
1542 assume the offset is zero and use the size of the whole
1543 enclosing object instead. */
1544 base = get_base_address (arg);
1545 offset = 0;
1546 }
1547 /* If the base object is a pointer no upper bound on the length
1548 can be determined. Otherwise the maximum length is equal to
1549 the size of the enclosing object minus the offset of
1550 the referenced subobject minus 1 (for the terminating nul). */
1551 tree type = TREE_TYPE (base);
1552 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1553 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1554 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1555 val = build_all_ones_cst (size_type_node);
1556 else
1557 {
1558 val = DECL_SIZE_UNIT (base);
1559 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1560 size_int (offset + 1));
1561 }
1562 }
1563 else
1564 return false;
1565 }
1566
730832cd 1567 if (pdata->maxlen)
fb471a13
MS
1568 {
1569 /* Adjust the more conservative bound if possible/necessary
1570 and fail otherwise. */
598f7235 1571 if (rkind != SRK_STRLEN)
fef5a0d9 1572 {
730832cd 1573 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1574 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1575 return false;
fef5a0d9 1576
730832cd
MS
1577 if (tree_int_cst_lt (pdata->maxlen, val))
1578 pdata->maxlen = val;
fb471a13
MS
1579 return true;
1580 }
730832cd 1581 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1582 {
1583 /* Fail if the length of this ARG is different from that
1584 previously determined from another ARG. */
1585 return false;
1586 }
fef5a0d9
RB
1587 }
1588
730832cd 1589 pdata->maxlen = val;
84de9426 1590 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1591}
1592
5d6655eb
MS
1593/* For an ARG referencing one or more strings, try to obtain the range
1594 of their lengths, or the size of the largest array ARG referes to if
1595 the range of lengths cannot be determined, and store all in *PDATA.
1596 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1597 the maximum constant value.
1598 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1599 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1600 length or if we are unable to determine the length, return false.
fb471a13 1601 VISITED is a bitmap of visited variables.
598f7235
MS
1602 RKIND determines the kind of value or range to obtain (see
1603 strlen_range_kind).
1604 Set PDATA->DECL if ARG refers to an unterminated constant array.
1605 On input, set ELTSIZE to 1 for normal single byte character strings,
1606 and either 2 or 4 for wide characer strings (the size of wchar_t).
1607 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1608
1609static bool
03c4a945
MS
1610get_range_strlen (tree arg, bitmap *visited,
1611 strlen_range_kind rkind,
1612 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1613{
1614
1615 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1616 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1617
fef5a0d9
RB
1618 /* If ARG is registered for SSA update we cannot look at its defining
1619 statement. */
1620 if (name_registered_for_update_p (arg))
1621 return false;
1622
1623 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1624 if (!*visited)
1625 *visited = BITMAP_ALLOC (NULL);
1626 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1627 return true;
1628
fb471a13
MS
1629 tree var = arg;
1630 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1631
fef5a0d9
RB
1632 switch (gimple_code (def_stmt))
1633 {
1634 case GIMPLE_ASSIGN:
598f7235
MS
1635 /* The RHS of the statement defining VAR must either have a
1636 constant length or come from another SSA_NAME with a constant
1637 length. */
fef5a0d9
RB
1638 if (gimple_assign_single_p (def_stmt)
1639 || gimple_assign_unary_nop_p (def_stmt))
1640 {
598f7235 1641 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1642 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1643 }
1644 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1645 {
c8602fe6
JJ
1646 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1647 gimple_assign_rhs3 (def_stmt) };
1648
1649 for (unsigned int i = 0; i < 2; i++)
03c4a945 1650 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1651 {
84de9426 1652 if (rkind != SRK_LENRANGE)
c8602fe6 1653 return false;
80c2bad6
MS
1654 /* Set the upper bound to the maximum to prevent
1655 it from being adjusted in the next iteration but
1656 leave MINLEN and the more conservative MAXBOUND
1657 determined so far alone (or leave them null if
1658 they haven't been set yet). That the MINLEN is
1659 in fact zero can be determined from MAXLEN being
1660 unbounded but the discovered minimum is used for
1661 diagnostics. */
730832cd 1662 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1663 }
1664 return true;
cc8bea0a 1665 }
fef5a0d9
RB
1666 return false;
1667
1668 case GIMPLE_PHI:
598f7235
MS
1669 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1670 must have a constant length. */
c8602fe6 1671 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1672 {
1673 tree arg = gimple_phi_arg (def_stmt, i)->def;
1674
1675 /* If this PHI has itself as an argument, we cannot
1676 determine the string length of this argument. However,
1677 if we can find a constant string length for the other
1678 PHI args then we can still be sure that this is a
1679 constant string length. So be optimistic and just
1680 continue with the next argument. */
1681 if (arg == gimple_phi_result (def_stmt))
1682 continue;
1683
03c4a945 1684 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1685 {
84de9426 1686 if (rkind != SRK_LENRANGE)
88d0c3f0 1687 return false;
80c2bad6
MS
1688 /* Set the upper bound to the maximum to prevent
1689 it from being adjusted in the next iteration but
1690 leave MINLEN and the more conservative MAXBOUND
1691 determined so far alone (or leave them null if
1692 they haven't been set yet). That the MINLEN is
1693 in fact zero can be determined from MAXLEN being
1694 unbounded but the discovered minimum is used for
1695 diagnostics. */
730832cd 1696 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1697 }
fef5a0d9 1698 }
fef5a0d9
RB
1699 return true;
1700
1701 default:
1702 return false;
1703 }
1704}
5d6655eb 1705
97623b52
MS
1706/* Try to obtain the range of the lengths of the string(s) referenced
1707 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1708 of lengths cannot be determined, and store all in *PDATA which must
1709 be zero-initialized on input except PDATA->MAXBOUND may be set to
1710 a non-null tree node other than INTEGER_CST to request to have it
1711 set to the length of the longest string in a PHI. ELTSIZE is
1712 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1713 some power of 2 for wide characters.
1714 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1715 for optimization. Returning false means that a nonzero PDATA->MINLEN
1716 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1717 is -1 (in that case, the actual range is indeterminate, i.e.,
1718 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1719
3f343040 1720bool
84de9426 1721get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1722{
1723 bitmap visited = NULL;
a7160771 1724 tree maxbound = pdata->maxbound;
88d0c3f0 1725
84de9426 1726 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1727 {
5d6655eb
MS
1728 /* On failure extend the length range to an impossible maximum
1729 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1730 members can stay unchanged regardless. */
1731 pdata->minlen = ssize_int (0);
1732 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1733 }
5d6655eb
MS
1734 else if (!pdata->minlen)
1735 pdata->minlen = ssize_int (0);
1736
a7160771
MS
1737 /* If it's unchanged from it initial non-null value, set the conservative
1738 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1739 if (maxbound && pdata->maxbound == maxbound)
1740 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1741
1742 if (visited)
1743 BITMAP_FREE (visited);
3f343040 1744
03c4a945 1745 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1746}
1747
5d6655eb
MS
1748/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1749 For ARG of pointer types, NONSTR indicates if the caller is prepared
1750 to handle unterminated strings. For integer ARG and when RKIND ==
1751 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1752
5d6655eb
MS
1753 If an unterminated array is discovered and our caller handles
1754 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1755 return the maximum size. Otherwise return NULL. */
1756
598f7235
MS
1757static tree
1758get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1759{
598f7235
MS
1760 /* A non-null NONSTR is meaningless when determining the maximum
1761 value of an integer ARG. */
1762 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1763 /* ARG must have an integral type when RKIND says so. */
1764 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1765
dcb7fae2 1766 bitmap visited = NULL;
3f343040 1767
5d6655eb
MS
1768 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1769 is unbounded. */
730832cd 1770 c_strlen_data lendata = { };
03c4a945 1771 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 1772 lendata.maxlen = NULL_TREE;
5d6655eb
MS
1773 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1774 lendata.maxlen = NULL_TREE;
1775
dcb7fae2
RB
1776 if (visited)
1777 BITMAP_FREE (visited);
1778
e08341bb
MS
1779 if (nonstr)
1780 {
1781 /* For callers prepared to handle unterminated arrays set
1782 *NONSTR to point to the declaration of the array and return
1783 the maximum length/size. */
730832cd
MS
1784 *nonstr = lendata.decl;
1785 return lendata.maxlen;
e08341bb
MS
1786 }
1787
1788 /* Fail if the constant array isn't nul-terminated. */
730832cd 1789 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
1790}
1791
fef5a0d9
RB
1792
1793/* Fold function call to builtin strcpy with arguments DEST and SRC.
1794 If LEN is not NULL, it represents the length of the string to be
1795 copied. Return NULL_TREE if no simplification can be made. */
1796
1797static bool
1798gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1799 tree dest, tree src)
fef5a0d9 1800{
cc8bea0a
MS
1801 gimple *stmt = gsi_stmt (*gsi);
1802 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1803 tree fn;
1804
1805 /* If SRC and DEST are the same (and not volatile), return DEST. */
1806 if (operand_equal_p (src, dest, 0))
1807 {
8cd95cec
MS
1808 /* Issue -Wrestrict unless the pointers are null (those do
1809 not point to objects and so do not indicate an overlap;
1810 such calls could be the result of sanitization and jump
1811 threading). */
1812 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1813 {
1814 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1815
e9b9fa4c
MS
1816 warning_at (loc, OPT_Wrestrict,
1817 "%qD source argument is the same as destination",
1818 func);
1819 }
cc8bea0a 1820
fef5a0d9
RB
1821 replace_call_with_value (gsi, dest);
1822 return true;
1823 }
1824
1825 if (optimize_function_for_size_p (cfun))
1826 return false;
1827
1828 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1829 if (!fn)
1830 return false;
1831
e08341bb
MS
1832 /* Set to non-null if ARG refers to an unterminated array. */
1833 tree nonstr = NULL;
598f7235 1834 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
1835
1836 if (nonstr)
1837 {
1838 /* Avoid folding calls with unterminated arrays. */
1839 if (!gimple_no_warning_p (stmt))
1840 warn_string_no_nul (loc, "strcpy", src, nonstr);
1841 gimple_set_no_warning (stmt, true);
1842 return false;
1843 }
1844
fef5a0d9 1845 if (!len)
dcb7fae2 1846 return false;
fef5a0d9
RB
1847
1848 len = fold_convert_loc (loc, size_type_node, len);
1849 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1850 len = force_gimple_operand_gsi (gsi, len, true,
1851 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1852 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1853 replace_call_with_call_and_fold (gsi, repl);
1854 return true;
1855}
1856
1857/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1858 If SLEN is not NULL, it represents the length of the source string.
1859 Return NULL_TREE if no simplification can be made. */
1860
1861static bool
dcb7fae2
RB
1862gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1863 tree dest, tree src, tree len)
fef5a0d9 1864{
025d57f0
MS
1865 gimple *stmt = gsi_stmt (*gsi);
1866 location_t loc = gimple_location (stmt);
6a33d0ff 1867 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1868
1869 /* If the LEN parameter is zero, return DEST. */
1870 if (integer_zerop (len))
1871 {
53b28abf 1872 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
1873 decorate with attribute nonstring. */
1874 if (!nonstring)
1875 {
1876 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1877
1878 /* Warn about the lack of nul termination: the result is not
1879 a (nul-terminated) string. */
598f7235 1880 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
1881 if (slen && !integer_zerop (slen))
1882 warning_at (loc, OPT_Wstringop_truncation,
1883 "%G%qD destination unchanged after copying no bytes "
1884 "from a string of length %E",
8a45b051 1885 stmt, fndecl, slen);
6a33d0ff
MS
1886 else
1887 warning_at (loc, OPT_Wstringop_truncation,
1888 "%G%qD destination unchanged after copying no bytes",
8a45b051 1889 stmt, fndecl);
6a33d0ff 1890 }
025d57f0 1891
fef5a0d9
RB
1892 replace_call_with_value (gsi, dest);
1893 return true;
1894 }
1895
1896 /* We can't compare slen with len as constants below if len is not a
1897 constant. */
dcb7fae2 1898 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1899 return false;
1900
fef5a0d9 1901 /* Now, we must be passed a constant src ptr parameter. */
598f7235 1902 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 1903 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1904 return false;
1905
025d57f0
MS
1906 /* The size of the source string including the terminating nul. */
1907 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1908
1909 /* We do not support simplification of this case, though we do
1910 support it when expanding trees into RTL. */
1911 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1912 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1913 return false;
1914
5d0d5d68
MS
1915 /* Diagnose truncation that leaves the copy unterminated. */
1916 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1917
fef5a0d9 1918 /* OK transform into builtin memcpy. */
025d57f0 1919 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1920 if (!fn)
1921 return false;
1922
1923 len = fold_convert_loc (loc, size_type_node, len);
1924 len = force_gimple_operand_gsi (gsi, len, true,
1925 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1926 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1927 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1928
fef5a0d9
RB
1929 return true;
1930}
1931
71dea1dd
WD
1932/* Fold function call to builtin strchr or strrchr.
1933 If both arguments are constant, evaluate and fold the result,
1934 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1935 In general strlen is significantly faster than strchr
1936 due to being a simpler operation. */
1937static bool
71dea1dd 1938gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1939{
1940 gimple *stmt = gsi_stmt (*gsi);
1941 tree str = gimple_call_arg (stmt, 0);
1942 tree c = gimple_call_arg (stmt, 1);
1943 location_t loc = gimple_location (stmt);
71dea1dd
WD
1944 const char *p;
1945 char ch;
912d9ec3 1946
71dea1dd 1947 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1948 return false;
1949
b5338fb3
MS
1950 /* Avoid folding if the first argument is not a nul-terminated array.
1951 Defer warning until later. */
1952 if (!check_nul_terminated_array (NULL_TREE, str))
1953 return false;
1954
71dea1dd
WD
1955 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1956 {
1957 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1958
1959 if (p1 == NULL)
1960 {
1961 replace_call_with_value (gsi, integer_zero_node);
1962 return true;
1963 }
1964
1965 tree len = build_int_cst (size_type_node, p1 - p);
1966 gimple_seq stmts = NULL;
1967 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1968 POINTER_PLUS_EXPR, str, len);
1969 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1970 gsi_replace_with_seq_vops (gsi, stmts);
1971 return true;
1972 }
1973
1974 if (!integer_zerop (c))
912d9ec3
WD
1975 return false;
1976
71dea1dd 1977 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 1978 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
1979 {
1980 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1981
c8952930 1982 if (strchr_fn)
71dea1dd
WD
1983 {
1984 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1985 replace_call_with_call_and_fold (gsi, repl);
1986 return true;
1987 }
1988
1989 return false;
1990 }
1991
912d9ec3
WD
1992 tree len;
1993 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1994
1995 if (!strlen_fn)
1996 return false;
1997
1998 /* Create newstr = strlen (str). */
1999 gimple_seq stmts = NULL;
2000 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2001 gimple_set_location (new_stmt, loc);
a15ebbcd 2002 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2003 gimple_call_set_lhs (new_stmt, len);
2004 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2005
2006 /* Create (str p+ strlen (str)). */
2007 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2008 POINTER_PLUS_EXPR, str, len);
2009 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2010 gsi_replace_with_seq_vops (gsi, stmts);
2011 /* gsi now points at the assignment to the lhs, get a
2012 stmt iterator to the strlen.
2013 ??? We can't use gsi_for_stmt as that doesn't work when the
2014 CFG isn't built yet. */
2015 gimple_stmt_iterator gsi2 = *gsi;
2016 gsi_prev (&gsi2);
2017 fold_stmt (&gsi2);
2018 return true;
2019}
2020
c8952930
JJ
2021/* Fold function call to builtin strstr.
2022 If both arguments are constant, evaluate and fold the result,
2023 additionally fold strstr (x, "") into x and strstr (x, "c")
2024 into strchr (x, 'c'). */
2025static bool
2026gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2027{
2028 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2029 if (!gimple_call_lhs (stmt))
2030 return false;
2031
c8952930
JJ
2032 tree haystack = gimple_call_arg (stmt, 0);
2033 tree needle = gimple_call_arg (stmt, 1);
c8952930 2034
b5338fb3
MS
2035 /* Avoid folding if either argument is not a nul-terminated array.
2036 Defer warning until later. */
2037 if (!check_nul_terminated_array (NULL_TREE, haystack)
2038 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2039 return false;
2040
b5338fb3 2041 const char *q = c_getstr (needle);
c8952930
JJ
2042 if (q == NULL)
2043 return false;
2044
b5338fb3 2045 if (const char *p = c_getstr (haystack))
c8952930
JJ
2046 {
2047 const char *r = strstr (p, q);
2048
2049 if (r == NULL)
2050 {
2051 replace_call_with_value (gsi, integer_zero_node);
2052 return true;
2053 }
2054
2055 tree len = build_int_cst (size_type_node, r - p);
2056 gimple_seq stmts = NULL;
2057 gimple *new_stmt
2058 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2059 haystack, len);
2060 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2061 gsi_replace_with_seq_vops (gsi, stmts);
2062 return true;
2063 }
2064
2065 /* For strstr (x, "") return x. */
2066 if (q[0] == '\0')
2067 {
2068 replace_call_with_value (gsi, haystack);
2069 return true;
2070 }
2071
2072 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2073 if (q[1] == '\0')
2074 {
2075 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2076 if (strchr_fn)
2077 {
2078 tree c = build_int_cst (integer_type_node, q[0]);
2079 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2080 replace_call_with_call_and_fold (gsi, repl);
2081 return true;
2082 }
2083 }
2084
2085 return false;
2086}
2087
fef5a0d9
RB
2088/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2089 to the call.
2090
2091 Return NULL_TREE if no simplification was possible, otherwise return the
2092 simplified form of the call as a tree.
2093
2094 The simplified form may be a constant or other expression which
2095 computes the same value, but in a more efficient manner (including
2096 calls to other builtin functions).
2097
2098 The call may contain arguments which need to be evaluated, but
2099 which are not useful to determine the result of the call. In
2100 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2101 COMPOUND_EXPR will be an argument which must be evaluated.
2102 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2103 COMPOUND_EXPR in the chain will contain the tree for the simplified
2104 form of the builtin function call. */
2105
2106static bool
dcb7fae2 2107gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2108{
355fe088 2109 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2110 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2111
2112 const char *p = c_getstr (src);
2113
2114 /* If the string length is zero, return the dst parameter. */
2115 if (p && *p == '\0')
2116 {
2117 replace_call_with_value (gsi, dst);
2118 return true;
2119 }
2120
2121 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2122 return false;
2123
2124 /* See if we can store by pieces into (dst + strlen(dst)). */
2125 tree newdst;
2126 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2127 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2128
2129 if (!strlen_fn || !memcpy_fn)
2130 return false;
2131
2132 /* If the length of the source string isn't computable don't
2133 split strcat into strlen and memcpy. */
598f7235 2134 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2135 if (! len)
fef5a0d9
RB
2136 return false;
2137
2138 /* Create strlen (dst). */
2139 gimple_seq stmts = NULL, stmts2;
355fe088 2140 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2141 gimple_set_location (repl, loc);
a15ebbcd 2142 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2143 gimple_call_set_lhs (repl, newdst);
2144 gimple_seq_add_stmt_without_update (&stmts, repl);
2145
2146 /* Create (dst p+ strlen (dst)). */
2147 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2148 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2149 gimple_seq_add_seq_without_update (&stmts, stmts2);
2150
2151 len = fold_convert_loc (loc, size_type_node, len);
2152 len = size_binop_loc (loc, PLUS_EXPR, len,
2153 build_int_cst (size_type_node, 1));
2154 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2155 gimple_seq_add_seq_without_update (&stmts, stmts2);
2156
2157 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2158 gimple_seq_add_stmt_without_update (&stmts, repl);
2159 if (gimple_call_lhs (stmt))
2160 {
2161 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2162 gimple_seq_add_stmt_without_update (&stmts, repl);
2163 gsi_replace_with_seq_vops (gsi, stmts);
2164 /* gsi now points at the assignment to the lhs, get a
2165 stmt iterator to the memcpy call.
2166 ??? We can't use gsi_for_stmt as that doesn't work when the
2167 CFG isn't built yet. */
2168 gimple_stmt_iterator gsi2 = *gsi;
2169 gsi_prev (&gsi2);
2170 fold_stmt (&gsi2);
2171 }
2172 else
2173 {
2174 gsi_replace_with_seq_vops (gsi, stmts);
2175 fold_stmt (gsi);
2176 }
2177 return true;
2178}
2179
07f1cf56
RB
2180/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2181 are the arguments to the call. */
2182
2183static bool
2184gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2185{
355fe088 2186 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2187 tree dest = gimple_call_arg (stmt, 0);
2188 tree src = gimple_call_arg (stmt, 1);
2189 tree size = gimple_call_arg (stmt, 2);
2190 tree fn;
2191 const char *p;
2192
2193
2194 p = c_getstr (src);
2195 /* If the SRC parameter is "", return DEST. */
2196 if (p && *p == '\0')
2197 {
2198 replace_call_with_value (gsi, dest);
2199 return true;
2200 }
2201
2202 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2203 return false;
2204
2205 /* If __builtin_strcat_chk is used, assume strcat is available. */
2206 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2207 if (!fn)
2208 return false;
2209
355fe088 2210 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2211 replace_call_with_call_and_fold (gsi, repl);
2212 return true;
2213}
2214
ad03a744
RB
2215/* Simplify a call to the strncat builtin. */
2216
2217static bool
2218gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2219{
8a45b051 2220 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2221 tree dst = gimple_call_arg (stmt, 0);
2222 tree src = gimple_call_arg (stmt, 1);
2223 tree len = gimple_call_arg (stmt, 2);
2224
2225 const char *p = c_getstr (src);
2226
2227 /* If the requested length is zero, or the src parameter string
2228 length is zero, return the dst parameter. */
2229 if (integer_zerop (len) || (p && *p == '\0'))
2230 {
2231 replace_call_with_value (gsi, dst);
2232 return true;
2233 }
2234
025d57f0
MS
2235 if (TREE_CODE (len) != INTEGER_CST || !p)
2236 return false;
2237
2238 unsigned srclen = strlen (p);
2239
2240 int cmpsrc = compare_tree_int (len, srclen);
2241
2242 /* Return early if the requested len is less than the string length.
2243 Warnings will be issued elsewhere later. */
2244 if (cmpsrc < 0)
2245 return false;
2246
2247 unsigned HOST_WIDE_INT dstsize;
2248
2249 bool nowarn = gimple_no_warning_p (stmt);
2250
2251 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2252 {
025d57f0 2253 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2254
025d57f0
MS
2255 if (cmpdst >= 0)
2256 {
2257 tree fndecl = gimple_call_fndecl (stmt);
2258
2259 /* Strncat copies (at most) LEN bytes and always appends
2260 the terminating NUL so the specified bound should never
2261 be equal to (or greater than) the size of the destination.
2262 If it is, the copy could overflow. */
2263 location_t loc = gimple_location (stmt);
2264 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2265 cmpdst == 0
2266 ? G_("%G%qD specified bound %E equals "
2267 "destination size")
2268 : G_("%G%qD specified bound %E exceeds "
2269 "destination size %wu"),
2270 stmt, fndecl, len, dstsize);
2271 if (nowarn)
2272 gimple_set_no_warning (stmt, true);
2273 }
2274 }
ad03a744 2275
025d57f0
MS
2276 if (!nowarn && cmpsrc == 0)
2277 {
2278 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2279 location_t loc = gimple_location (stmt);
eec5f615
MS
2280
2281 /* To avoid possible overflow the specified bound should also
2282 not be equal to the length of the source, even when the size
2283 of the destination is unknown (it's not an uncommon mistake
2284 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2285 if (warning_at (loc, OPT_Wstringop_overflow_,
2286 "%G%qD specified bound %E equals source length",
2287 stmt, fndecl, len))
2288 gimple_set_no_warning (stmt, true);
ad03a744
RB
2289 }
2290
025d57f0
MS
2291 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2292
2293 /* If the replacement _DECL isn't initialized, don't do the
2294 transformation. */
2295 if (!fn)
2296 return false;
2297
2298 /* Otherwise, emit a call to strcat. */
2299 gcall *repl = gimple_build_call (fn, 2, dst, src);
2300 replace_call_with_call_and_fold (gsi, repl);
2301 return true;
ad03a744
RB
2302}
2303
745583f9
RB
2304/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2305 LEN, and SIZE. */
2306
2307static bool
2308gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2309{
355fe088 2310 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2311 tree dest = gimple_call_arg (stmt, 0);
2312 tree src = gimple_call_arg (stmt, 1);
2313 tree len = gimple_call_arg (stmt, 2);
2314 tree size = gimple_call_arg (stmt, 3);
2315 tree fn;
2316 const char *p;
2317
2318 p = c_getstr (src);
2319 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2320 if ((p && *p == '\0')
2321 || integer_zerop (len))
2322 {
2323 replace_call_with_value (gsi, dest);
2324 return true;
2325 }
2326
2327 if (! tree_fits_uhwi_p (size))
2328 return false;
2329
2330 if (! integer_all_onesp (size))
2331 {
2332 tree src_len = c_strlen (src, 1);
2333 if (src_len
2334 && tree_fits_uhwi_p (src_len)
2335 && tree_fits_uhwi_p (len)
2336 && ! tree_int_cst_lt (len, src_len))
2337 {
2338 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2339 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2340 if (!fn)
2341 return false;
2342
355fe088 2343 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2344 replace_call_with_call_and_fold (gsi, repl);
2345 return true;
2346 }
2347 return false;
2348 }
2349
2350 /* If __builtin_strncat_chk is used, assume strncat is available. */
2351 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2352 if (!fn)
2353 return false;
2354
355fe088 2355 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2356 replace_call_with_call_and_fold (gsi, repl);
2357 return true;
2358}
2359
a918bfbf
ML
2360/* Build and append gimple statements to STMTS that would load a first
2361 character of a memory location identified by STR. LOC is location
2362 of the statement. */
2363
2364static tree
2365gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2366{
2367 tree var;
2368
2369 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2370 tree cst_uchar_ptr_node
2371 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2372 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2373
2374 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2375 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2376 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2377
2378 gimple_assign_set_lhs (stmt, var);
2379 gimple_seq_add_stmt_without_update (stmts, stmt);
2380
2381 return var;
2382}
2383
d2f8402a 2384/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2385
2386static bool
2387gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2388{
2389 gimple *stmt = gsi_stmt (*gsi);
2390 tree callee = gimple_call_fndecl (stmt);
2391 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2392
2393 tree type = integer_type_node;
2394 tree str1 = gimple_call_arg (stmt, 0);
2395 tree str2 = gimple_call_arg (stmt, 1);
2396 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2397
2398 tree bound_node = NULL_TREE;
d2f8402a 2399 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2400
2401 /* Handle strncmp and strncasecmp functions. */
2402 if (gimple_call_num_args (stmt) == 3)
2403 {
d86d8b35
MS
2404 bound_node = gimple_call_arg (stmt, 2);
2405 if (tree_fits_uhwi_p (bound_node))
2406 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2407 }
2408
d86d8b35 2409 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2410 if (bound == 0)
a918bfbf
ML
2411 {
2412 replace_call_with_value (gsi, integer_zero_node);
2413 return true;
2414 }
2415
2416 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2417 if (operand_equal_p (str1, str2, 0))
2418 {
2419 replace_call_with_value (gsi, integer_zero_node);
2420 return true;
2421 }
2422
d2f8402a
MS
2423 /* Initially set to the number of characters, including the terminating
2424 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2425 the array Sx is not terminated by a nul.
2426 For nul-terminated strings then adjusted to their length so that
2427 LENx == NULPOSx holds. */
2428 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2429 const char *p1 = c_getstr (str1, &len1);
2430 const char *p2 = c_getstr (str2, &len2);
2431
2432 /* The position of the terminating nul character if one exists, otherwise
2433 a value greater than LENx. */
2434 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2435
2436 if (p1)
2437 {
2438 size_t n = strnlen (p1, len1);
2439 if (n < len1)
2440 len1 = nulpos1 = n;
2441 }
2442
2443 if (p2)
2444 {
2445 size_t n = strnlen (p2, len2);
2446 if (n < len2)
2447 len2 = nulpos2 = n;
2448 }
a918bfbf
ML
2449
2450 /* For known strings, return an immediate value. */
2451 if (p1 && p2)
2452 {
2453 int r = 0;
2454 bool known_result = false;
2455
2456 switch (fcode)
2457 {
2458 case BUILT_IN_STRCMP:
8b0b334a 2459 case BUILT_IN_STRCMP_EQ:
d2f8402a 2460 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2461 break;
d2f8402a
MS
2462
2463 r = strcmp (p1, p2);
2464 known_result = true;
2465 break;
2466
a918bfbf 2467 case BUILT_IN_STRNCMP:
8b0b334a 2468 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2469 {
d86d8b35
MS
2470 if (bound == HOST_WIDE_INT_M1U)
2471 break;
2472
d2f8402a
MS
2473 /* Reduce the bound to be no more than the length
2474 of the shorter of the two strings, or the sizes
2475 of the unterminated arrays. */
2476 unsigned HOST_WIDE_INT n = bound;
2477
2478 if (len1 == nulpos1 && len1 < n)
2479 n = len1 + 1;
2480 if (len2 == nulpos2 && len2 < n)
2481 n = len2 + 1;
2482
2483 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2484 break;
d2f8402a
MS
2485
2486 r = strncmp (p1, p2, n);
a918bfbf
ML
2487 known_result = true;
2488 break;
2489 }
2490 /* Only handleable situation is where the string are equal (result 0),
2491 which is already handled by operand_equal_p case. */
2492 case BUILT_IN_STRCASECMP:
2493 break;
2494 case BUILT_IN_STRNCASECMP:
2495 {
d2f8402a 2496 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2497 break;
d2f8402a 2498 r = strncmp (p1, p2, bound);
a918bfbf
ML
2499 if (r == 0)
2500 known_result = true;
5de73c05 2501 break;
a918bfbf
ML
2502 }
2503 default:
2504 gcc_unreachable ();
2505 }
2506
2507 if (known_result)
2508 {
2509 replace_call_with_value (gsi, build_cmp_result (type, r));
2510 return true;
2511 }
2512 }
2513
d2f8402a 2514 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2515 || fcode == BUILT_IN_STRCMP
8b0b334a 2516 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2517 || fcode == BUILT_IN_STRCASECMP;
2518
2519 location_t loc = gimple_location (stmt);
2520
2521 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2522 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2523 {
2524 gimple_seq stmts = NULL;
2525 tree var = gimple_load_first_char (loc, str1, &stmts);
2526 if (lhs)
2527 {
2528 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2529 gimple_seq_add_stmt_without_update (&stmts, stmt);
2530 }
2531
2532 gsi_replace_with_seq_vops (gsi, stmts);
2533 return true;
2534 }
2535
2536 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2537 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2538 {
2539 gimple_seq stmts = NULL;
2540 tree var = gimple_load_first_char (loc, str2, &stmts);
2541
2542 if (lhs)
2543 {
2544 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2545 stmt = gimple_build_assign (c, NOP_EXPR, var);
2546 gimple_seq_add_stmt_without_update (&stmts, stmt);
2547
2548 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2549 gimple_seq_add_stmt_without_update (&stmts, stmt);
2550 }
2551
2552 gsi_replace_with_seq_vops (gsi, stmts);
2553 return true;
2554 }
2555
d2f8402a 2556 /* If BOUND is one, return an expression corresponding to
a918bfbf 2557 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2558 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2559 {
2560 gimple_seq stmts = NULL;
2561 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2562 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2563
2564 if (lhs)
2565 {
2566 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2567 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2568 gimple_seq_add_stmt_without_update (&stmts, convert1);
2569
2570 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2571 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2572 gimple_seq_add_stmt_without_update (&stmts, convert2);
2573
2574 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2575 gimple_seq_add_stmt_without_update (&stmts, stmt);
2576 }
2577
2578 gsi_replace_with_seq_vops (gsi, stmts);
2579 return true;
2580 }
2581
d2f8402a
MS
2582 /* If BOUND is greater than the length of one constant string,
2583 and the other argument is also a nul-terminated string, replace
2584 strncmp with strcmp. */
2585 if (fcode == BUILT_IN_STRNCMP
2586 && bound > 0 && bound < HOST_WIDE_INT_M1U
2587 && ((p2 && len2 < bound && len2 == nulpos2)
2588 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2589 {
2590 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2591 if (!fn)
2592 return false;
2593 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2594 replace_call_with_call_and_fold (gsi, repl);
2595 return true;
2596 }
2597
a918bfbf
ML
2598 return false;
2599}
2600
488c6247
ML
2601/* Fold a call to the memchr pointed by GSI iterator. */
2602
2603static bool
2604gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2605{
2606 gimple *stmt = gsi_stmt (*gsi);
2607 tree lhs = gimple_call_lhs (stmt);
2608 tree arg1 = gimple_call_arg (stmt, 0);
2609 tree arg2 = gimple_call_arg (stmt, 1);
2610 tree len = gimple_call_arg (stmt, 2);
2611
2612 /* If the LEN parameter is zero, return zero. */
2613 if (integer_zerop (len))
2614 {
2615 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2616 return true;
2617 }
2618
2619 char c;
2620 if (TREE_CODE (arg2) != INTEGER_CST
2621 || !tree_fits_uhwi_p (len)
2622 || !target_char_cst_p (arg2, &c))
2623 return false;
2624
2625 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2626 unsigned HOST_WIDE_INT string_length;
2627 const char *p1 = c_getstr (arg1, &string_length);
2628
2629 if (p1)
2630 {
2631 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2632 if (r == NULL)
2633 {
5fd336bb
JM
2634 tree mem_size, offset_node;
2635 string_constant (arg1, &offset_node, &mem_size, NULL);
2636 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2637 ? 0 : tree_to_uhwi (offset_node);
2638 /* MEM_SIZE is the size of the array the string literal
2639 is stored in. */
2640 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2641 gcc_checking_assert (string_length <= string_size);
2642 if (length <= string_size)
488c6247
ML
2643 {
2644 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2645 return true;
2646 }
2647 }
2648 else
2649 {
2650 unsigned HOST_WIDE_INT offset = r - p1;
2651 gimple_seq stmts = NULL;
2652 if (lhs != NULL_TREE)
2653 {
2654 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2655 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2656 arg1, offset_cst);
2657 gimple_seq_add_stmt_without_update (&stmts, stmt);
2658 }
2659 else
2660 gimple_seq_add_stmt_without_update (&stmts,
2661 gimple_build_nop ());
2662
2663 gsi_replace_with_seq_vops (gsi, stmts);
2664 return true;
2665 }
2666 }
2667
2668 return false;
2669}
a918bfbf 2670
fef5a0d9
RB
2671/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2672 to the call. IGNORE is true if the value returned
2673 by the builtin will be ignored. UNLOCKED is true is true if this
2674 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2675 the known length of the string. Return NULL_TREE if no simplification
2676 was possible. */
2677
2678static bool
2679gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2680 tree arg0, tree arg1,
dcb7fae2 2681 bool unlocked)
fef5a0d9 2682{
355fe088 2683 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2684
fef5a0d9
RB
2685 /* If we're using an unlocked function, assume the other unlocked
2686 functions exist explicitly. */
2687 tree const fn_fputc = (unlocked
2688 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2689 : builtin_decl_implicit (BUILT_IN_FPUTC));
2690 tree const fn_fwrite = (unlocked
2691 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2692 : builtin_decl_implicit (BUILT_IN_FWRITE));
2693
2694 /* If the return value is used, don't do the transformation. */
dcb7fae2 2695 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2696 return false;
2697
fef5a0d9
RB
2698 /* Get the length of the string passed to fputs. If the length
2699 can't be determined, punt. */
598f7235 2700 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2701 if (!len
2702 || TREE_CODE (len) != INTEGER_CST)
2703 return false;
2704
2705 switch (compare_tree_int (len, 1))
2706 {
2707 case -1: /* length is 0, delete the call entirely . */
2708 replace_call_with_value (gsi, integer_zero_node);
2709 return true;
2710
2711 case 0: /* length is 1, call fputc. */
2712 {
2713 const char *p = c_getstr (arg0);
2714 if (p != NULL)
2715 {
2716 if (!fn_fputc)
2717 return false;
2718
355fe088 2719 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2720 build_int_cst
2721 (integer_type_node, p[0]), arg1);
2722 replace_call_with_call_and_fold (gsi, repl);
2723 return true;
2724 }
2725 }
2726 /* FALLTHROUGH */
2727 case 1: /* length is greater than 1, call fwrite. */
2728 {
2729 /* If optimizing for size keep fputs. */
2730 if (optimize_function_for_size_p (cfun))
2731 return false;
2732 /* New argument list transforming fputs(string, stream) to
2733 fwrite(string, 1, len, stream). */
2734 if (!fn_fwrite)
2735 return false;
2736
355fe088 2737 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2738 size_one_node, len, arg1);
2739 replace_call_with_call_and_fold (gsi, repl);
2740 return true;
2741 }
2742 default:
2743 gcc_unreachable ();
2744 }
2745 return false;
2746}
2747
2748/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2749 DEST, SRC, LEN, and SIZE are the arguments to the call.
2750 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2751 code of the builtin. If MAXLEN is not NULL, it is maximum length
2752 passed as third argument. */
2753
2754static bool
2755gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2756 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2757 enum built_in_function fcode)
2758{
355fe088 2759 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2760 location_t loc = gimple_location (stmt);
2761 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2762 tree fn;
2763
2764 /* If SRC and DEST are the same (and not volatile), return DEST
2765 (resp. DEST+LEN for __mempcpy_chk). */
2766 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2767 {
2768 if (fcode != BUILT_IN_MEMPCPY_CHK)
2769 {
2770 replace_call_with_value (gsi, dest);
2771 return true;
2772 }
2773 else
2774 {
74e3c262
RB
2775 gimple_seq stmts = NULL;
2776 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2777 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2778 TREE_TYPE (dest), dest, len);
74e3c262 2779 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2780 replace_call_with_value (gsi, temp);
2781 return true;
2782 }
2783 }
2784
2785 if (! tree_fits_uhwi_p (size))
2786 return false;
2787
598f7235 2788 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
2789 if (! integer_all_onesp (size))
2790 {
2791 if (! tree_fits_uhwi_p (len))
2792 {
2793 /* If LEN is not constant, try MAXLEN too.
2794 For MAXLEN only allow optimizing into non-_ocs function
2795 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2796 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2797 {
2798 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2799 {
2800 /* (void) __mempcpy_chk () can be optimized into
2801 (void) __memcpy_chk (). */
2802 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2803 if (!fn)
2804 return false;
2805
355fe088 2806 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2807 replace_call_with_call_and_fold (gsi, repl);
2808 return true;
2809 }
2810 return false;
2811 }
2812 }
2813 else
2814 maxlen = len;
2815
2816 if (tree_int_cst_lt (size, maxlen))
2817 return false;
2818 }
2819
2820 fn = NULL_TREE;
2821 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2822 mem{cpy,pcpy,move,set} is available. */
2823 switch (fcode)
2824 {
2825 case BUILT_IN_MEMCPY_CHK:
2826 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2827 break;
2828 case BUILT_IN_MEMPCPY_CHK:
2829 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2830 break;
2831 case BUILT_IN_MEMMOVE_CHK:
2832 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2833 break;
2834 case BUILT_IN_MEMSET_CHK:
2835 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2836 break;
2837 default:
2838 break;
2839 }
2840
2841 if (!fn)
2842 return false;
2843
355fe088 2844 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2845 replace_call_with_call_and_fold (gsi, repl);
2846 return true;
2847}
2848
2849/* Fold a call to the __st[rp]cpy_chk builtin.
2850 DEST, SRC, and SIZE are the arguments to the call.
2851 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2852 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2853 strings passed as second argument. */
2854
2855static bool
2856gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2857 tree dest,
fef5a0d9 2858 tree src, tree size,
fef5a0d9
RB
2859 enum built_in_function fcode)
2860{
355fe088 2861 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2862 location_t loc = gimple_location (stmt);
2863 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2864 tree len, fn;
2865
2866 /* If SRC and DEST are the same (and not volatile), return DEST. */
2867 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2868 {
8cd95cec
MS
2869 /* Issue -Wrestrict unless the pointers are null (those do
2870 not point to objects and so do not indicate an overlap;
2871 such calls could be the result of sanitization and jump
2872 threading). */
2873 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2874 {
2875 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2876
e9b9fa4c
MS
2877 warning_at (loc, OPT_Wrestrict,
2878 "%qD source argument is the same as destination",
2879 func);
2880 }
cc8bea0a 2881
fef5a0d9
RB
2882 replace_call_with_value (gsi, dest);
2883 return true;
2884 }
2885
2886 if (! tree_fits_uhwi_p (size))
2887 return false;
2888
598f7235 2889 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
2890 if (! integer_all_onesp (size))
2891 {
2892 len = c_strlen (src, 1);
2893 if (! len || ! tree_fits_uhwi_p (len))
2894 {
2895 /* If LEN is not constant, try MAXLEN too.
2896 For MAXLEN only allow optimizing into non-_ocs function
2897 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2898 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2899 {
2900 if (fcode == BUILT_IN_STPCPY_CHK)
2901 {
2902 if (! ignore)
2903 return false;
2904
2905 /* If return value of __stpcpy_chk is ignored,
2906 optimize into __strcpy_chk. */
2907 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2908 if (!fn)
2909 return false;
2910
355fe088 2911 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2912 replace_call_with_call_and_fold (gsi, repl);
2913 return true;
2914 }
2915
2916 if (! len || TREE_SIDE_EFFECTS (len))
2917 return false;
2918
2919 /* If c_strlen returned something, but not a constant,
2920 transform __strcpy_chk into __memcpy_chk. */
2921 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2922 if (!fn)
2923 return false;
2924
74e3c262 2925 gimple_seq stmts = NULL;
770fe3a3 2926 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
2927 len = gimple_convert (&stmts, loc, size_type_node, len);
2928 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2929 build_int_cst (size_type_node, 1));
2930 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2931 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2932 replace_call_with_call_and_fold (gsi, repl);
2933 return true;
2934 }
e256dfce 2935 }
fef5a0d9
RB
2936 else
2937 maxlen = len;
2938
2939 if (! tree_int_cst_lt (maxlen, size))
2940 return false;
e256dfce
RG
2941 }
2942
fef5a0d9
RB
2943 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2944 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2945 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2946 if (!fn)
2947 return false;
2948
355fe088 2949 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2950 replace_call_with_call_and_fold (gsi, repl);
2951 return true;
2952}
2953
2954/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2955 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2956 length passed as third argument. IGNORE is true if return value can be
2957 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2958
2959static bool
2960gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2961 tree dest, tree src,
dcb7fae2 2962 tree len, tree size,
fef5a0d9
RB
2963 enum built_in_function fcode)
2964{
355fe088 2965 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2966 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2967 tree fn;
2968
2969 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2970 {
fef5a0d9
RB
2971 /* If return value of __stpncpy_chk is ignored,
2972 optimize into __strncpy_chk. */
2973 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2974 if (fn)
2975 {
355fe088 2976 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2977 replace_call_with_call_and_fold (gsi, repl);
2978 return true;
2979 }
cbdd87d4
RG
2980 }
2981
fef5a0d9
RB
2982 if (! tree_fits_uhwi_p (size))
2983 return false;
2984
598f7235 2985 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 2986 if (! integer_all_onesp (size))
cbdd87d4 2987 {
fef5a0d9 2988 if (! tree_fits_uhwi_p (len))
fe2ef088 2989 {
fef5a0d9
RB
2990 /* If LEN is not constant, try MAXLEN too.
2991 For MAXLEN only allow optimizing into non-_ocs function
2992 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2993 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2994 return false;
8a1561bc 2995 }
fef5a0d9
RB
2996 else
2997 maxlen = len;
2998
2999 if (tree_int_cst_lt (size, maxlen))
3000 return false;
cbdd87d4
RG
3001 }
3002
fef5a0d9
RB
3003 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3004 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3005 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3006 if (!fn)
3007 return false;
3008
355fe088 3009 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3010 replace_call_with_call_and_fold (gsi, repl);
3011 return true;
cbdd87d4
RG
3012}
3013
2625bb5d
RB
3014/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3015 Return NULL_TREE if no simplification can be made. */
3016
3017static bool
3018gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3019{
3020 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3021 location_t loc = gimple_location (stmt);
3022 tree dest = gimple_call_arg (stmt, 0);
3023 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3024 tree fn, lenp1;
2625bb5d
RB
3025
3026 /* If the result is unused, replace stpcpy with strcpy. */
3027 if (gimple_call_lhs (stmt) == NULL_TREE)
3028 {
3029 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3030 if (!fn)
3031 return false;
3032 gimple_call_set_fndecl (stmt, fn);
3033 fold_stmt (gsi);
3034 return true;
3035 }
3036
01b0acb7 3037 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3038 c_strlen_data data = { };
7d583f42 3039 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3040 if (!len
3041 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3042 {
7d583f42
JL
3043 data.decl = unterminated_array (src);
3044 if (!data.decl)
01b0acb7
MS
3045 return false;
3046 }
3047
7d583f42 3048 if (data.decl)
01b0acb7
MS
3049 {
3050 /* Avoid folding calls with unterminated arrays. */
3051 if (!gimple_no_warning_p (stmt))
7d583f42 3052 warn_string_no_nul (loc, "stpcpy", src, data.decl);
01b0acb7
MS
3053 gimple_set_no_warning (stmt, true);
3054 return false;
3055 }
2625bb5d
RB
3056
3057 if (optimize_function_for_size_p (cfun)
3058 /* If length is zero it's small enough. */
3059 && !integer_zerop (len))
3060 return false;
3061
3062 /* If the source has a known length replace stpcpy with memcpy. */
3063 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3064 if (!fn)
3065 return false;
3066
3067 gimple_seq stmts = NULL;
3068 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3069 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3070 tem, build_int_cst (size_type_node, 1));
3071 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3072 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3073 gimple_move_vops (repl, stmt);
2625bb5d
RB
3074 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3075 /* Replace the result with dest + len. */
3076 stmts = NULL;
3077 tem = gimple_convert (&stmts, loc, sizetype, len);
3078 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3079 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3080 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3081 gsi_replace (gsi, ret, false);
2625bb5d
RB
3082 /* Finally fold the memcpy call. */
3083 gimple_stmt_iterator gsi2 = *gsi;
3084 gsi_prev (&gsi2);
3085 fold_stmt (&gsi2);
3086 return true;
3087}
3088
fef5a0d9
RB
3089/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3090 NULL_TREE if a normal call should be emitted rather than expanding
3091 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3092 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3093 passed as second argument. */
cbdd87d4
RG
3094
3095static bool
fef5a0d9 3096gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3097 enum built_in_function fcode)
cbdd87d4 3098{
538dd0b7 3099 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3100 tree dest, size, len, fn, fmt, flag;
3101 const char *fmt_str;
cbdd87d4 3102
fef5a0d9
RB
3103 /* Verify the required arguments in the original call. */
3104 if (gimple_call_num_args (stmt) < 5)
3105 return false;
cbdd87d4 3106
fef5a0d9
RB
3107 dest = gimple_call_arg (stmt, 0);
3108 len = gimple_call_arg (stmt, 1);
3109 flag = gimple_call_arg (stmt, 2);
3110 size = gimple_call_arg (stmt, 3);
3111 fmt = gimple_call_arg (stmt, 4);
3112
3113 if (! tree_fits_uhwi_p (size))
3114 return false;
3115
3116 if (! integer_all_onesp (size))
3117 {
598f7235 3118 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3119 if (! tree_fits_uhwi_p (len))
cbdd87d4 3120 {
fef5a0d9
RB
3121 /* If LEN is not constant, try MAXLEN too.
3122 For MAXLEN only allow optimizing into non-_ocs function
3123 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3124 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3125 return false;
3126 }
3127 else
fef5a0d9 3128 maxlen = len;
cbdd87d4 3129
fef5a0d9
RB
3130 if (tree_int_cst_lt (size, maxlen))
3131 return false;
3132 }
cbdd87d4 3133
fef5a0d9
RB
3134 if (!init_target_chars ())
3135 return false;
cbdd87d4 3136
fef5a0d9
RB
3137 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3138 or if format doesn't contain % chars or is "%s". */
3139 if (! integer_zerop (flag))
3140 {
3141 fmt_str = c_getstr (fmt);
3142 if (fmt_str == NULL)
3143 return false;
3144 if (strchr (fmt_str, target_percent) != NULL
3145 && strcmp (fmt_str, target_percent_s))
3146 return false;
cbdd87d4
RG
3147 }
3148
fef5a0d9
RB
3149 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3150 available. */
3151 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3152 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3153 if (!fn)
491e0b9b
RG
3154 return false;
3155
fef5a0d9
RB
3156 /* Replace the called function and the first 5 argument by 3 retaining
3157 trailing varargs. */
3158 gimple_call_set_fndecl (stmt, fn);
3159 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3160 gimple_call_set_arg (stmt, 0, dest);
3161 gimple_call_set_arg (stmt, 1, len);
3162 gimple_call_set_arg (stmt, 2, fmt);
3163 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3164 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3165 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3166 fold_stmt (gsi);
3167 return true;
3168}
cbdd87d4 3169
fef5a0d9
RB
3170/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3171 Return NULL_TREE if a normal call should be emitted rather than
3172 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3173 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3174
fef5a0d9
RB
3175static bool
3176gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3177 enum built_in_function fcode)
3178{
538dd0b7 3179 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3180 tree dest, size, len, fn, fmt, flag;
3181 const char *fmt_str;
3182 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3183
fef5a0d9
RB
3184 /* Verify the required arguments in the original call. */
3185 if (nargs < 4)
3186 return false;
3187 dest = gimple_call_arg (stmt, 0);
3188 flag = gimple_call_arg (stmt, 1);
3189 size = gimple_call_arg (stmt, 2);
3190 fmt = gimple_call_arg (stmt, 3);
3191
3192 if (! tree_fits_uhwi_p (size))
3193 return false;
3194
3195 len = NULL_TREE;
3196
3197 if (!init_target_chars ())
3198 return false;
3199
3200 /* Check whether the format is a literal string constant. */
3201 fmt_str = c_getstr (fmt);
3202 if (fmt_str != NULL)
3203 {
3204 /* If the format doesn't contain % args or %%, we know the size. */
3205 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3206 {
fef5a0d9
RB
3207 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3208 len = build_int_cstu (size_type_node, strlen (fmt_str));
3209 }
3210 /* If the format is "%s" and first ... argument is a string literal,
3211 we know the size too. */
3212 else if (fcode == BUILT_IN_SPRINTF_CHK
3213 && strcmp (fmt_str, target_percent_s) == 0)
3214 {
3215 tree arg;
cbdd87d4 3216
fef5a0d9
RB
3217 if (nargs == 5)
3218 {
3219 arg = gimple_call_arg (stmt, 4);
3220 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3221 {
3222 len = c_strlen (arg, 1);
3223 if (! len || ! tree_fits_uhwi_p (len))
3224 len = NULL_TREE;
3225 }
3226 }
3227 }
3228 }
cbdd87d4 3229
fef5a0d9
RB
3230 if (! integer_all_onesp (size))
3231 {
3232 if (! len || ! tree_int_cst_lt (len, size))
3233 return false;
3234 }
cbdd87d4 3235
fef5a0d9
RB
3236 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3237 or if format doesn't contain % chars or is "%s". */
3238 if (! integer_zerop (flag))
3239 {
3240 if (fmt_str == NULL)
3241 return false;
3242 if (strchr (fmt_str, target_percent) != NULL
3243 && strcmp (fmt_str, target_percent_s))
3244 return false;
3245 }
cbdd87d4 3246
fef5a0d9
RB
3247 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3248 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3249 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3250 if (!fn)
3251 return false;
3252
3253 /* Replace the called function and the first 4 argument by 2 retaining
3254 trailing varargs. */
3255 gimple_call_set_fndecl (stmt, fn);
3256 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3257 gimple_call_set_arg (stmt, 0, dest);
3258 gimple_call_set_arg (stmt, 1, fmt);
3259 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3260 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3261 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3262 fold_stmt (gsi);
3263 return true;
3264}
3265
35770bb2
RB
3266/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3267 ORIG may be null if this is a 2-argument call. We don't attempt to
3268 simplify calls with more than 3 arguments.
3269
a104bd88 3270 Return true if simplification was possible, otherwise false. */
35770bb2 3271
a104bd88 3272bool
dcb7fae2 3273gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3274{
355fe088 3275 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3276 tree dest = gimple_call_arg (stmt, 0);
3277 tree fmt = gimple_call_arg (stmt, 1);
3278 tree orig = NULL_TREE;
3279 const char *fmt_str = NULL;
3280
3281 /* Verify the required arguments in the original call. We deal with two
3282 types of sprintf() calls: 'sprintf (str, fmt)' and
3283 'sprintf (dest, "%s", orig)'. */
3284 if (gimple_call_num_args (stmt) > 3)
3285 return false;
3286
3287 if (gimple_call_num_args (stmt) == 3)
3288 orig = gimple_call_arg (stmt, 2);
3289
3290 /* Check whether the format is a literal string constant. */
3291 fmt_str = c_getstr (fmt);
3292 if (fmt_str == NULL)
3293 return false;
3294
3295 if (!init_target_chars ())
3296 return false;
3297
3298 /* If the format doesn't contain % args or %%, use strcpy. */
3299 if (strchr (fmt_str, target_percent) == NULL)
3300 {
3301 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3302
3303 if (!fn)
3304 return false;
3305
3306 /* Don't optimize sprintf (buf, "abc", ptr++). */
3307 if (orig)
3308 return false;
3309
3310 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3311 'format' is known to contain no % formats. */
3312 gimple_seq stmts = NULL;
355fe088 3313 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3314
3315 /* Propagate the NO_WARNING bit to avoid issuing the same
3316 warning more than once. */
3317 if (gimple_no_warning_p (stmt))
3318 gimple_set_no_warning (repl, true);
3319
35770bb2 3320 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3321 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3322 {
a73468e8
JJ
3323 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3324 strlen (fmt_str)));
35770bb2
RB
3325 gimple_seq_add_stmt_without_update (&stmts, repl);
3326 gsi_replace_with_seq_vops (gsi, stmts);
3327 /* gsi now points at the assignment to the lhs, get a
3328 stmt iterator to the memcpy call.
3329 ??? We can't use gsi_for_stmt as that doesn't work when the
3330 CFG isn't built yet. */
3331 gimple_stmt_iterator gsi2 = *gsi;
3332 gsi_prev (&gsi2);
3333 fold_stmt (&gsi2);
3334 }
3335 else
3336 {
3337 gsi_replace_with_seq_vops (gsi, stmts);
3338 fold_stmt (gsi);
3339 }
3340 return true;
3341 }
3342
3343 /* If the format is "%s", use strcpy if the result isn't used. */
3344 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3345 {
3346 tree fn;
3347 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3348
3349 if (!fn)
3350 return false;
3351
3352 /* Don't crash on sprintf (str1, "%s"). */
3353 if (!orig)
3354 return false;
3355
dcb7fae2
RB
3356 tree orig_len = NULL_TREE;
3357 if (gimple_call_lhs (stmt))
35770bb2 3358 {
598f7235 3359 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3360 if (!orig_len)
35770bb2
RB
3361 return false;
3362 }
3363
3364 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3365 gimple_seq stmts = NULL;
355fe088 3366 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3367
3368 /* Propagate the NO_WARNING bit to avoid issuing the same
3369 warning more than once. */
3370 if (gimple_no_warning_p (stmt))
3371 gimple_set_no_warning (repl, true);
3372
35770bb2 3373 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3374 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3375 {
a73468e8 3376 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3377 TREE_TYPE (orig_len)))
a73468e8
JJ
3378 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3379 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3380 gimple_seq_add_stmt_without_update (&stmts, repl);
3381 gsi_replace_with_seq_vops (gsi, stmts);
3382 /* gsi now points at the assignment to the lhs, get a
3383 stmt iterator to the memcpy call.
3384 ??? We can't use gsi_for_stmt as that doesn't work when the
3385 CFG isn't built yet. */
3386 gimple_stmt_iterator gsi2 = *gsi;
3387 gsi_prev (&gsi2);
3388 fold_stmt (&gsi2);
3389 }
3390 else
3391 {
3392 gsi_replace_with_seq_vops (gsi, stmts);
3393 fold_stmt (gsi);
3394 }
3395 return true;
3396 }
3397 return false;
3398}
3399
d7e78447
RB
3400/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3401 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3402 attempt to simplify calls with more than 4 arguments.
35770bb2 3403
a104bd88 3404 Return true if simplification was possible, otherwise false. */
d7e78447 3405
a104bd88 3406bool
dcb7fae2 3407gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3408{
538dd0b7 3409 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3410 tree dest = gimple_call_arg (stmt, 0);
3411 tree destsize = gimple_call_arg (stmt, 1);
3412 tree fmt = gimple_call_arg (stmt, 2);
3413 tree orig = NULL_TREE;
3414 const char *fmt_str = NULL;
3415
3416 if (gimple_call_num_args (stmt) > 4)
3417 return false;
3418
3419 if (gimple_call_num_args (stmt) == 4)
3420 orig = gimple_call_arg (stmt, 3);
3421
3422 if (!tree_fits_uhwi_p (destsize))
3423 return false;
3424 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3425
3426 /* Check whether the format is a literal string constant. */
3427 fmt_str = c_getstr (fmt);
3428 if (fmt_str == NULL)
3429 return false;
3430
3431 if (!init_target_chars ())
3432 return false;
3433
3434 /* If the format doesn't contain % args or %%, use strcpy. */
3435 if (strchr (fmt_str, target_percent) == NULL)
3436 {
3437 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3438 if (!fn)
3439 return false;
3440
3441 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3442 if (orig)
3443 return false;
3444
3445 /* We could expand this as
3446 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3447 or to
3448 memcpy (str, fmt_with_nul_at_cstm1, cst);
3449 but in the former case that might increase code size
3450 and in the latter case grow .rodata section too much.
3451 So punt for now. */
3452 size_t len = strlen (fmt_str);
3453 if (len >= destlen)
3454 return false;
3455
3456 gimple_seq stmts = NULL;
355fe088 3457 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3458 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3459 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3460 {
a73468e8
JJ
3461 repl = gimple_build_assign (lhs,
3462 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3463 gimple_seq_add_stmt_without_update (&stmts, repl);
3464 gsi_replace_with_seq_vops (gsi, stmts);
3465 /* gsi now points at the assignment to the lhs, get a
3466 stmt iterator to the memcpy call.
3467 ??? We can't use gsi_for_stmt as that doesn't work when the
3468 CFG isn't built yet. */
3469 gimple_stmt_iterator gsi2 = *gsi;
3470 gsi_prev (&gsi2);
3471 fold_stmt (&gsi2);
3472 }
3473 else
3474 {
3475 gsi_replace_with_seq_vops (gsi, stmts);
3476 fold_stmt (gsi);
3477 }
3478 return true;
3479 }
3480
3481 /* If the format is "%s", use strcpy if the result isn't used. */
3482 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3483 {
3484 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3485 if (!fn)
3486 return false;
3487
3488 /* Don't crash on snprintf (str1, cst, "%s"). */
3489 if (!orig)
3490 return false;
3491
598f7235 3492 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3493 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3494 return false;
d7e78447
RB
3495
3496 /* We could expand this as
3497 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3498 or to
3499 memcpy (str1, str2_with_nul_at_cstm1, cst);
3500 but in the former case that might increase code size
3501 and in the latter case grow .rodata section too much.
3502 So punt for now. */
3503 if (compare_tree_int (orig_len, destlen) >= 0)
3504 return false;
3505
3506 /* Convert snprintf (str1, cst, "%s", str2) into
3507 strcpy (str1, str2) if strlen (str2) < cst. */
3508 gimple_seq stmts = NULL;
355fe088 3509 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3510 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3511 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3512 {
a73468e8 3513 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3514 TREE_TYPE (orig_len)))
a73468e8
JJ
3515 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3516 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3517 gimple_seq_add_stmt_without_update (&stmts, repl);
3518 gsi_replace_with_seq_vops (gsi, stmts);
3519 /* gsi now points at the assignment to the lhs, get a
3520 stmt iterator to the memcpy call.
3521 ??? We can't use gsi_for_stmt as that doesn't work when the
3522 CFG isn't built yet. */
3523 gimple_stmt_iterator gsi2 = *gsi;
3524 gsi_prev (&gsi2);
3525 fold_stmt (&gsi2);
3526 }
3527 else
3528 {
3529 gsi_replace_with_seq_vops (gsi, stmts);
3530 fold_stmt (gsi);
3531 }
3532 return true;
3533 }
3534 return false;
3535}
35770bb2 3536
edd7ae68
RB
3537/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3538 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3539 more than 3 arguments, and ARG may be null in the 2-argument case.
3540
3541 Return NULL_TREE if no simplification was possible, otherwise return the
3542 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3543 code of the function to be simplified. */
3544
3545static bool
3546gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3547 tree fp, tree fmt, tree arg,
3548 enum built_in_function fcode)
3549{
3550 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3551 tree fn_fputc, fn_fputs;
3552 const char *fmt_str = NULL;
3553
3554 /* If the return value is used, don't do the transformation. */
3555 if (gimple_call_lhs (stmt) != NULL_TREE)
3556 return false;
3557
3558 /* Check whether the format is a literal string constant. */
3559 fmt_str = c_getstr (fmt);
3560 if (fmt_str == NULL)
3561 return false;
3562
3563 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3564 {
3565 /* If we're using an unlocked function, assume the other
3566 unlocked functions exist explicitly. */
3567 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3568 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3569 }
3570 else
3571 {
3572 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3573 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3574 }
3575
3576 if (!init_target_chars ())
3577 return false;
3578
3579 /* If the format doesn't contain % args or %%, use strcpy. */
3580 if (strchr (fmt_str, target_percent) == NULL)
3581 {
3582 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3583 && arg)
3584 return false;
3585
3586 /* If the format specifier was "", fprintf does nothing. */
3587 if (fmt_str[0] == '\0')
3588 {
3589 replace_call_with_value (gsi, NULL_TREE);
3590 return true;
3591 }
3592
3593 /* When "string" doesn't contain %, replace all cases of
3594 fprintf (fp, string) with fputs (string, fp). The fputs
3595 builtin will take care of special cases like length == 1. */
3596 if (fn_fputs)
3597 {
3598 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3599 replace_call_with_call_and_fold (gsi, repl);
3600 return true;
3601 }
3602 }
3603
3604 /* The other optimizations can be done only on the non-va_list variants. */
3605 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3606 return false;
3607
3608 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3609 else if (strcmp (fmt_str, target_percent_s) == 0)
3610 {
3611 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3612 return false;
3613 if (fn_fputs)
3614 {
3615 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3616 replace_call_with_call_and_fold (gsi, repl);
3617 return true;
3618 }
3619 }
3620
3621 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3622 else if (strcmp (fmt_str, target_percent_c) == 0)
3623 {
3624 if (!arg
3625 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3626 return false;
3627 if (fn_fputc)
3628 {
3629 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3630 replace_call_with_call_and_fold (gsi, repl);
3631 return true;
3632 }
3633 }
3634
3635 return false;
3636}
3637
ad03a744
RB
3638/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3639 FMT and ARG are the arguments to the call; we don't fold cases with
3640 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3641
3642 Return NULL_TREE if no simplification was possible, otherwise return the
3643 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3644 code of the function to be simplified. */
3645
3646static bool
3647gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3648 tree arg, enum built_in_function fcode)
3649{
3650 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3651 tree fn_putchar, fn_puts, newarg;
3652 const char *fmt_str = NULL;
3653
3654 /* If the return value is used, don't do the transformation. */
3655 if (gimple_call_lhs (stmt) != NULL_TREE)
3656 return false;
3657
3658 /* Check whether the format is a literal string constant. */
3659 fmt_str = c_getstr (fmt);
3660 if (fmt_str == NULL)
3661 return false;
3662
3663 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3664 {
3665 /* If we're using an unlocked function, assume the other
3666 unlocked functions exist explicitly. */
3667 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3668 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3669 }
3670 else
3671 {
3672 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3673 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3674 }
3675
3676 if (!init_target_chars ())
3677 return false;
3678
3679 if (strcmp (fmt_str, target_percent_s) == 0
3680 || strchr (fmt_str, target_percent) == NULL)
3681 {
3682 const char *str;
3683
3684 if (strcmp (fmt_str, target_percent_s) == 0)
3685 {
3686 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3687 return false;
3688
3689 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3690 return false;
3691
3692 str = c_getstr (arg);
3693 if (str == NULL)
3694 return false;
3695 }
3696 else
3697 {
3698 /* The format specifier doesn't contain any '%' characters. */
3699 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3700 && arg)
3701 return false;
3702 str = fmt_str;
3703 }
3704
3705 /* If the string was "", printf does nothing. */
3706 if (str[0] == '\0')
3707 {
3708 replace_call_with_value (gsi, NULL_TREE);
3709 return true;
3710 }
3711
3712 /* If the string has length of 1, call putchar. */
3713 if (str[1] == '\0')
3714 {
3715 /* Given printf("c"), (where c is any one character,)
3716 convert "c"[0] to an int and pass that to the replacement
3717 function. */
3718 newarg = build_int_cst (integer_type_node, str[0]);
3719 if (fn_putchar)
3720 {
3721 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3722 replace_call_with_call_and_fold (gsi, repl);
3723 return true;
3724 }
3725 }
3726 else
3727 {
3728 /* If the string was "string\n", call puts("string"). */
3729 size_t len = strlen (str);
3730 if ((unsigned char)str[len - 1] == target_newline
3731 && (size_t) (int) len == len
3732 && (int) len > 0)
3733 {
3734 char *newstr;
ad03a744
RB
3735
3736 /* Create a NUL-terminated string that's one char shorter
3737 than the original, stripping off the trailing '\n'. */
a353fec4 3738 newstr = xstrdup (str);
ad03a744 3739 newstr[len - 1] = '\0';
a353fec4
BE
3740 newarg = build_string_literal (len, newstr);
3741 free (newstr);
ad03a744
RB
3742 if (fn_puts)
3743 {
3744 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3745 replace_call_with_call_and_fold (gsi, repl);
3746 return true;
3747 }
3748 }
3749 else
3750 /* We'd like to arrange to call fputs(string,stdout) here,
3751 but we need stdout and don't have a way to get it yet. */
3752 return false;
3753 }
3754 }
3755
3756 /* The other optimizations can be done only on the non-va_list variants. */
3757 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3758 return false;
3759
3760 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3761 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3762 {
3763 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3764 return false;
3765 if (fn_puts)
3766 {
3767 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3768 replace_call_with_call_and_fold (gsi, repl);
3769 return true;
3770 }
3771 }
3772
3773 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3774 else if (strcmp (fmt_str, target_percent_c) == 0)
3775 {
3776 if (!arg || ! useless_type_conversion_p (integer_type_node,
3777 TREE_TYPE (arg)))
3778 return false;
3779 if (fn_putchar)
3780 {
3781 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3782 replace_call_with_call_and_fold (gsi, repl);
3783 return true;
3784 }
3785 }
3786
3787 return false;
3788}
3789
edd7ae68 3790
fef5a0d9
RB
3791
3792/* Fold a call to __builtin_strlen with known length LEN. */
3793
3794static bool
dcb7fae2 3795gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3796{
355fe088 3797 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3798 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3799
3800 wide_int minlen;
3801 wide_int maxlen;
3802
5d6655eb 3803 c_strlen_data lendata = { };
03c4a945 3804 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
3805 && !lendata.decl
3806 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3807 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
3808 {
3809 /* The range of lengths refers to either a single constant
3810 string or to the longest and shortest constant string
3811 referenced by the argument of the strlen() call, or to
3812 the strings that can possibly be stored in the arrays
3813 the argument refers to. */
5d6655eb
MS
3814 minlen = wi::to_wide (lendata.minlen);
3815 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
3816 }
3817 else
3818 {
3819 unsigned prec = TYPE_PRECISION (sizetype);
3820
3821 minlen = wi::shwi (0, prec);
3822 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3823 }
3824
3825 if (minlen == maxlen)
3826 {
5d6655eb
MS
3827 /* Fold the strlen call to a constant. */
3828 tree type = TREE_TYPE (lendata.minlen);
3829 tree len = force_gimple_operand_gsi (gsi,
3830 wide_int_to_tree (type, minlen),
3831 true, NULL, true, GSI_SAME_STMT);
3832 replace_call_with_value (gsi, len);
c42d0aa0
MS
3833 return true;
3834 }
3835
d4bf6975 3836 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 3837 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 3838 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
3839
3840 return false;
cbdd87d4
RG
3841}
3842
48126138
NS
3843/* Fold a call to __builtin_acc_on_device. */
3844
3845static bool
3846gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3847{
3848 /* Defer folding until we know which compiler we're in. */
3849 if (symtab->state != EXPANSION)
3850 return false;
3851
3852 unsigned val_host = GOMP_DEVICE_HOST;
3853 unsigned val_dev = GOMP_DEVICE_NONE;
3854
3855#ifdef ACCEL_COMPILER
3856 val_host = GOMP_DEVICE_NOT_HOST;
3857 val_dev = ACCEL_COMPILER_acc_device;
3858#endif
3859
3860 location_t loc = gimple_location (gsi_stmt (*gsi));
3861
3862 tree host_eq = make_ssa_name (boolean_type_node);
3863 gimple *host_ass = gimple_build_assign
3864 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3865 gimple_set_location (host_ass, loc);
3866 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3867
3868 tree dev_eq = make_ssa_name (boolean_type_node);
3869 gimple *dev_ass = gimple_build_assign
3870 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3871 gimple_set_location (dev_ass, loc);
3872 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3873
3874 tree result = make_ssa_name (boolean_type_node);
3875 gimple *result_ass = gimple_build_assign
3876 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3877 gimple_set_location (result_ass, loc);
3878 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3879
3880 replace_call_with_value (gsi, result);
3881
3882 return true;
3883}
cbdd87d4 3884
fe75f732
PK
3885/* Fold realloc (0, n) -> malloc (n). */
3886
3887static bool
3888gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3889{
3890 gimple *stmt = gsi_stmt (*gsi);
3891 tree arg = gimple_call_arg (stmt, 0);
3892 tree size = gimple_call_arg (stmt, 1);
3893
3894 if (operand_equal_p (arg, null_pointer_node, 0))
3895 {
3896 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3897 if (fn_malloc)
3898 {
3899 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3900 replace_call_with_call_and_fold (gsi, repl);
3901 return true;
3902 }
3903 }
3904 return false;
3905}
3906
dcb7fae2
RB
3907/* Fold the non-target builtin at *GSI and return whether any simplification
3908 was made. */
cbdd87d4 3909
fef5a0d9 3910static bool
dcb7fae2 3911gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3912{
538dd0b7 3913 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3914 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3915
dcb7fae2
RB
3916 /* Give up for always_inline inline builtins until they are
3917 inlined. */
3918 if (avoid_folding_inline_builtin (callee))
3919 return false;
cbdd87d4 3920
edd7ae68
RB
3921 unsigned n = gimple_call_num_args (stmt);
3922 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3923 switch (fcode)
cbdd87d4 3924 {
b3d8d88e
MS
3925 case BUILT_IN_BCMP:
3926 return gimple_fold_builtin_bcmp (gsi);
3927 case BUILT_IN_BCOPY:
3928 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3929 case BUILT_IN_BZERO:
b3d8d88e
MS
3930 return gimple_fold_builtin_bzero (gsi);
3931
dcb7fae2
RB
3932 case BUILT_IN_MEMSET:
3933 return gimple_fold_builtin_memset (gsi,
3934 gimple_call_arg (stmt, 1),
3935 gimple_call_arg (stmt, 2));
dcb7fae2 3936 case BUILT_IN_MEMCPY:
dcb7fae2 3937 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
3938 case BUILT_IN_MEMMOVE:
3939 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 3940 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
3941 case BUILT_IN_SPRINTF_CHK:
3942 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3943 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3944 case BUILT_IN_STRCAT_CHK:
3945 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3946 case BUILT_IN_STRNCAT_CHK:
3947 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3948 case BUILT_IN_STRLEN:
dcb7fae2 3949 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3950 case BUILT_IN_STRCPY:
dcb7fae2 3951 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3952 gimple_call_arg (stmt, 0),
dcb7fae2 3953 gimple_call_arg (stmt, 1));
cbdd87d4 3954 case BUILT_IN_STRNCPY:
dcb7fae2 3955 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3956 gimple_call_arg (stmt, 0),
3957 gimple_call_arg (stmt, 1),
dcb7fae2 3958 gimple_call_arg (stmt, 2));
9a7eefec 3959 case BUILT_IN_STRCAT:
dcb7fae2
RB
3960 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3961 gimple_call_arg (stmt, 1));
ad03a744
RB
3962 case BUILT_IN_STRNCAT:
3963 return gimple_fold_builtin_strncat (gsi);
71dea1dd 3964 case BUILT_IN_INDEX:
912d9ec3 3965 case BUILT_IN_STRCHR:
71dea1dd
WD
3966 return gimple_fold_builtin_strchr (gsi, false);
3967 case BUILT_IN_RINDEX:
3968 case BUILT_IN_STRRCHR:
3969 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
3970 case BUILT_IN_STRSTR:
3971 return gimple_fold_builtin_strstr (gsi);
a918bfbf 3972 case BUILT_IN_STRCMP:
8b0b334a 3973 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
3974 case BUILT_IN_STRCASECMP:
3975 case BUILT_IN_STRNCMP:
8b0b334a 3976 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
3977 case BUILT_IN_STRNCASECMP:
3978 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
3979 case BUILT_IN_MEMCHR:
3980 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 3981 case BUILT_IN_FPUTS:
dcb7fae2
RB
3982 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3983 gimple_call_arg (stmt, 1), false);
cbdd87d4 3984 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
3985 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3986 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
3987 case BUILT_IN_MEMCPY_CHK:
3988 case BUILT_IN_MEMPCPY_CHK:
3989 case BUILT_IN_MEMMOVE_CHK:
3990 case BUILT_IN_MEMSET_CHK:
dcb7fae2 3991 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
3992 gimple_call_arg (stmt, 0),
3993 gimple_call_arg (stmt, 1),
3994 gimple_call_arg (stmt, 2),
3995 gimple_call_arg (stmt, 3),
edd7ae68 3996 fcode);
2625bb5d
RB
3997 case BUILT_IN_STPCPY:
3998 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
3999 case BUILT_IN_STRCPY_CHK:
4000 case BUILT_IN_STPCPY_CHK:
dcb7fae2 4001 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
4002 gimple_call_arg (stmt, 0),
4003 gimple_call_arg (stmt, 1),
4004 gimple_call_arg (stmt, 2),
edd7ae68 4005 fcode);
cbdd87d4 4006 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 4007 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
4008 return gimple_fold_builtin_stxncpy_chk (gsi,
4009 gimple_call_arg (stmt, 0),
4010 gimple_call_arg (stmt, 1),
4011 gimple_call_arg (stmt, 2),
4012 gimple_call_arg (stmt, 3),
edd7ae68 4013 fcode);
cbdd87d4
RG
4014 case BUILT_IN_SNPRINTF_CHK:
4015 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 4016 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 4017
edd7ae68
RB
4018 case BUILT_IN_FPRINTF:
4019 case BUILT_IN_FPRINTF_UNLOCKED:
4020 case BUILT_IN_VFPRINTF:
4021 if (n == 2 || n == 3)
4022 return gimple_fold_builtin_fprintf (gsi,
4023 gimple_call_arg (stmt, 0),
4024 gimple_call_arg (stmt, 1),
4025 n == 3
4026 ? gimple_call_arg (stmt, 2)
4027 : NULL_TREE,
4028 fcode);
4029 break;
4030 case BUILT_IN_FPRINTF_CHK:
4031 case BUILT_IN_VFPRINTF_CHK:
4032 if (n == 3 || n == 4)
4033 return gimple_fold_builtin_fprintf (gsi,
4034 gimple_call_arg (stmt, 0),
4035 gimple_call_arg (stmt, 2),
4036 n == 4
4037 ? gimple_call_arg (stmt, 3)
4038 : NULL_TREE,
4039 fcode);
4040 break;
ad03a744
RB
4041 case BUILT_IN_PRINTF:
4042 case BUILT_IN_PRINTF_UNLOCKED:
4043 case BUILT_IN_VPRINTF:
4044 if (n == 1 || n == 2)
4045 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4046 n == 2
4047 ? gimple_call_arg (stmt, 1)
4048 : NULL_TREE, fcode);
4049 break;
4050 case BUILT_IN_PRINTF_CHK:
4051 case BUILT_IN_VPRINTF_CHK:
4052 if (n == 2 || n == 3)
4053 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4054 n == 3
4055 ? gimple_call_arg (stmt, 2)
4056 : NULL_TREE, fcode);
242a37f1 4057 break;
48126138
NS
4058 case BUILT_IN_ACC_ON_DEVICE:
4059 return gimple_fold_builtin_acc_on_device (gsi,
4060 gimple_call_arg (stmt, 0));
fe75f732
PK
4061 case BUILT_IN_REALLOC:
4062 return gimple_fold_builtin_realloc (gsi);
4063
fef5a0d9
RB
4064 default:;
4065 }
4066
4067 /* Try the generic builtin folder. */
4068 bool ignore = (gimple_call_lhs (stmt) == NULL);
4069 tree result = fold_call_stmt (stmt, ignore);
4070 if (result)
4071 {
4072 if (ignore)
4073 STRIP_NOPS (result);
4074 else
4075 result = fold_convert (gimple_call_return_type (stmt), result);
4076 if (!update_call_from_tree (gsi, result))
4077 gimplify_and_update_call_from_tree (gsi, result);
4078 return true;
4079 }
4080
4081 return false;
4082}
4083
451e8dae
NS
4084/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4085 function calls to constants, where possible. */
4086
4087static tree
4088fold_internal_goacc_dim (const gimple *call)
4089{
629b3d75
MJ
4090 int axis = oacc_get_ifn_dim_arg (call);
4091 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 4092 tree result = NULL_TREE;
67d2229e 4093 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 4094
67d2229e 4095 switch (gimple_call_internal_fn (call))
451e8dae 4096 {
67d2229e
TV
4097 case IFN_GOACC_DIM_POS:
4098 /* If the size is 1, we know the answer. */
4099 if (size == 1)
4100 result = build_int_cst (type, 0);
4101 break;
4102 case IFN_GOACC_DIM_SIZE:
4103 /* If the size is not dynamic, we know the answer. */
4104 if (size)
4105 result = build_int_cst (type, size);
4106 break;
4107 default:
4108 break;
451e8dae
NS
4109 }
4110
4111 return result;
4112}
4113
849a76a5
JJ
4114/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4115 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4116 &var where var is only addressable because of such calls. */
4117
4118bool
4119optimize_atomic_compare_exchange_p (gimple *stmt)
4120{
4121 if (gimple_call_num_args (stmt) != 6
4122 || !flag_inline_atomics
4123 || !optimize
45b2222a 4124 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
4125 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4126 || !gimple_vdef (stmt)
4127 || !gimple_vuse (stmt))
4128 return false;
4129
4130 tree fndecl = gimple_call_fndecl (stmt);
4131 switch (DECL_FUNCTION_CODE (fndecl))
4132 {
4133 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4134 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4135 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4136 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4137 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4138 break;
4139 default:
4140 return false;
4141 }
4142
4143 tree expected = gimple_call_arg (stmt, 1);
4144 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
4145 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4146 return false;
4147
4148 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4149 if (!is_gimple_reg_type (etype)
849a76a5 4150 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
4151 || TREE_THIS_VOLATILE (etype)
4152 || VECTOR_TYPE_P (etype)
4153 || TREE_CODE (etype) == COMPLEX_TYPE
4154 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4155 might not preserve all the bits. See PR71716. */
4156 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
4157 || maybe_ne (TYPE_PRECISION (etype),
4158 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
4159 return false;
4160
4161 tree weak = gimple_call_arg (stmt, 3);
4162 if (!integer_zerop (weak) && !integer_onep (weak))
4163 return false;
4164
4165 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4166 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4167 machine_mode mode = TYPE_MODE (itype);
4168
4169 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4170 == CODE_FOR_nothing
4171 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4172 return false;
4173
cf098191 4174 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
4175 return false;
4176
4177 return true;
4178}
4179
4180/* Fold
4181 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4182 into
4183 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4184 i = IMAGPART_EXPR <t>;
4185 r = (_Bool) i;
4186 e = REALPART_EXPR <t>; */
4187
4188void
4189fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4190{
4191 gimple *stmt = gsi_stmt (*gsi);
4192 tree fndecl = gimple_call_fndecl (stmt);
4193 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4194 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4195 tree ctype = build_complex_type (itype);
4196 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
4197 bool throws = false;
4198 edge e = NULL;
849a76a5
JJ
4199 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4200 expected);
4201 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4202 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4203 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4204 {
4205 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4206 build1 (VIEW_CONVERT_EXPR, itype,
4207 gimple_assign_lhs (g)));
4208 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4209 }
4210 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4211 + int_size_in_bytes (itype);
4212 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4213 gimple_call_arg (stmt, 0),
4214 gimple_assign_lhs (g),
4215 gimple_call_arg (stmt, 2),
4216 build_int_cst (integer_type_node, flag),
4217 gimple_call_arg (stmt, 4),
4218 gimple_call_arg (stmt, 5));
4219 tree lhs = make_ssa_name (ctype);
4220 gimple_call_set_lhs (g, lhs);
779724a5 4221 gimple_move_vops (g, stmt);
cc195d46 4222 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 4223 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
4224 {
4225 throws = true;
4226 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4227 }
4228 gimple_call_set_nothrow (as_a <gcall *> (g),
4229 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4230 gimple_call_set_lhs (stmt, NULL_TREE);
4231 gsi_replace (gsi, g, true);
4232 if (oldlhs)
849a76a5 4233 {
849a76a5
JJ
4234 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4235 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
4236 if (throws)
4237 {
4238 gsi_insert_on_edge_immediate (e, g);
4239 *gsi = gsi_for_stmt (g);
4240 }
4241 else
4242 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4243 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4244 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 4245 }
849a76a5
JJ
4246 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4247 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
4248 if (throws && oldlhs == NULL_TREE)
4249 {
4250 gsi_insert_on_edge_immediate (e, g);
4251 *gsi = gsi_for_stmt (g);
4252 }
4253 else
4254 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
4255 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4256 {
4257 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4258 VIEW_CONVERT_EXPR,
4259 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4260 gimple_assign_lhs (g)));
4261 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4262 }
4263 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4264 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4265 *gsi = gsiret;
4266}
4267
1304953e
JJ
4268/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4269 doesn't fit into TYPE. The test for overflow should be regardless of
4270 -fwrapv, and even for unsigned types. */
4271
4272bool
4273arith_overflowed_p (enum tree_code code, const_tree type,
4274 const_tree arg0, const_tree arg1)
4275{
1304953e
JJ
4276 widest2_int warg0 = widest2_int_cst (arg0);
4277 widest2_int warg1 = widest2_int_cst (arg1);
4278 widest2_int wres;
4279 switch (code)
4280 {
4281 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4282 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4283 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4284 default: gcc_unreachable ();
4285 }
4286 signop sign = TYPE_SIGN (type);
4287 if (sign == UNSIGNED && wi::neg_p (wres))
4288 return true;
4289 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4290}
4291
868363d4
RS
4292/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4293 for the memory it references, otherwise return null. VECTYPE is the
4294 type of the memory vector. */
4295
4296static tree
4297gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4298{
4299 tree ptr = gimple_call_arg (call, 0);
4300 tree alias_align = gimple_call_arg (call, 1);
4301 tree mask = gimple_call_arg (call, 2);
4302 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4303 return NULL_TREE;
4304
4305 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4306 if (TYPE_ALIGN (vectype) != align)
4307 vectype = build_aligned_type (vectype, align);
4308 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4309 return fold_build2 (MEM_REF, vectype, ptr, offset);
4310}
4311
4312/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4313
4314static bool
4315gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4316{
4317 tree lhs = gimple_call_lhs (call);
4318 if (!lhs)
4319 return false;
4320
4321 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4322 {
4323 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4324 gimple_set_location (new_stmt, gimple_location (call));
4325 gimple_move_vops (new_stmt, call);
4326 gsi_replace (gsi, new_stmt, false);
4327 return true;
4328 }
4329 return false;
4330}
4331
4332/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4333
4334static bool
4335gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4336{
4337 tree rhs = gimple_call_arg (call, 3);
4338 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4339 {
4340 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4341 gimple_set_location (new_stmt, gimple_location (call));
4342 gimple_move_vops (new_stmt, call);
4343 gsi_replace (gsi, new_stmt, false);
4344 return true;
4345 }
4346 return false;
4347}
4348
cbdd87d4
RG
4349/* Attempt to fold a call statement referenced by the statement iterator GSI.
4350 The statement may be replaced by another statement, e.g., if the call
4351 simplifies to a constant value. Return true if any changes were made.
4352 It is assumed that the operands have been previously folded. */
4353
e021c122 4354static bool
ceeffab0 4355gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4356{
538dd0b7 4357 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4358 tree callee;
e021c122
RG
4359 bool changed = false;
4360 unsigned i;
cbdd87d4 4361
e021c122
RG
4362 /* Fold *& in call arguments. */
4363 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4364 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4365 {
4366 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4367 if (tmp)
4368 {
4369 gimple_call_set_arg (stmt, i, tmp);
4370 changed = true;
4371 }
4372 }
3b45a007
RG
4373
4374 /* Check for virtual calls that became direct calls. */
4375 callee = gimple_call_fn (stmt);
25583c4f 4376 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4377 {
49c471e3
MJ
4378 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4379 {
450ad0cd
JH
4380 if (dump_file && virtual_method_call_p (callee)
4381 && !possible_polymorphic_call_target_p
6f8091fc
JH
4382 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4383 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4384 {
4385 fprintf (dump_file,
a70e9985 4386 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4387 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4388 fprintf (dump_file, " to ");
4389 print_generic_expr (dump_file, callee, TDF_SLIM);
4390 fprintf (dump_file, "\n");
4391 }
4392
49c471e3 4393 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4394 changed = true;
4395 }
a70e9985 4396 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4397 {
61dd6a2e
JH
4398 bool final;
4399 vec <cgraph_node *>targets
058d0a90 4400 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4401 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4402 {
a70e9985 4403 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4404 if (dump_enabled_p ())
4405 {
4f5b9c80 4406 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4407 "folding virtual function call to %s\n",
4408 targets.length () == 1
4409 ? targets[0]->name ()
4410 : "__builtin_unreachable");
4411 }
61dd6a2e 4412 if (targets.length () == 1)
cf3e5a89 4413 {
18954840
JJ
4414 tree fndecl = targets[0]->decl;
4415 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4416 changed = true;
18954840
JJ
4417 /* If changing the call to __cxa_pure_virtual
4418 or similar noreturn function, adjust gimple_call_fntype
4419 too. */
865f7046 4420 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4421 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4422 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4423 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4424 == void_type_node))
4425 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4426 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4427 if (lhs
4428 && gimple_call_noreturn_p (stmt)
18954840 4429 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4430 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4431 {
4432 if (TREE_CODE (lhs) == SSA_NAME)
4433 {
b731b390 4434 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4435 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4436 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4437 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4438 }
4439 gimple_call_set_lhs (stmt, NULL_TREE);
4440 }
0b986c6a 4441 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4442 }
a70e9985 4443 else
cf3e5a89
JJ
4444 {
4445 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4446 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4447 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4448 /* If the call had a SSA name as lhs morph that into
4449 an uninitialized value. */
a70e9985
JJ
4450 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4451 {
b731b390 4452 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4453 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4454 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4455 set_ssa_default_def (cfun, var, lhs);
42e52a51 4456 }
779724a5 4457 gimple_move_vops (new_stmt, stmt);
2da6996c 4458 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4459 return true;
4460 }
e021c122 4461 }
49c471e3 4462 }
e021c122 4463 }
49c471e3 4464
f2d3d07e
RH
4465 /* Check for indirect calls that became direct calls, and then
4466 no longer require a static chain. */
4467 if (gimple_call_chain (stmt))
4468 {
4469 tree fn = gimple_call_fndecl (stmt);
4470 if (fn && !DECL_STATIC_CHAIN (fn))
4471 {
4472 gimple_call_set_chain (stmt, NULL);
4473 changed = true;
4474 }
4475 else
4476 {
4477 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4478 if (tmp)
4479 {
4480 gimple_call_set_chain (stmt, tmp);
4481 changed = true;
4482 }
4483 }
4484 }
4485
e021c122
RG
4486 if (inplace)
4487 return changed;
4488
4489 /* Check for builtins that CCP can handle using information not
4490 available in the generic fold routines. */
fef5a0d9
RB
4491 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4492 {
4493 if (gimple_fold_builtin (gsi))
4494 changed = true;
4495 }
4496 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4497 {
ea679d55 4498 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4499 }
368b454d 4500 else if (gimple_call_internal_p (stmt))
ed9c79e1 4501 {
368b454d
JJ
4502 enum tree_code subcode = ERROR_MARK;
4503 tree result = NULL_TREE;
1304953e
JJ
4504 bool cplx_result = false;
4505 tree overflow = NULL_TREE;
368b454d
JJ
4506 switch (gimple_call_internal_fn (stmt))
4507 {
4508 case IFN_BUILTIN_EXPECT:
4509 result = fold_builtin_expect (gimple_location (stmt),
4510 gimple_call_arg (stmt, 0),
4511 gimple_call_arg (stmt, 1),
1e9168b2
ML
4512 gimple_call_arg (stmt, 2),
4513 NULL_TREE);
368b454d 4514 break;
0e82f089 4515 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4516 {
4517 tree offset = gimple_call_arg (stmt, 1);
4518 tree objsize = gimple_call_arg (stmt, 2);
4519 if (integer_all_onesp (objsize)
4520 || (TREE_CODE (offset) == INTEGER_CST
4521 && TREE_CODE (objsize) == INTEGER_CST
4522 && tree_int_cst_le (offset, objsize)))
4523 {
4524 replace_call_with_value (gsi, NULL_TREE);
4525 return true;
4526 }
4527 }
4528 break;
4529 case IFN_UBSAN_PTR:
4530 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4531 {
ca1150f0 4532 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4533 return true;
4534 }
4535 break;
ca1150f0
JJ
4536 case IFN_UBSAN_BOUNDS:
4537 {
4538 tree index = gimple_call_arg (stmt, 1);
4539 tree bound = gimple_call_arg (stmt, 2);
4540 if (TREE_CODE (index) == INTEGER_CST
4541 && TREE_CODE (bound) == INTEGER_CST)
4542 {
4543 index = fold_convert (TREE_TYPE (bound), index);
4544 if (TREE_CODE (index) == INTEGER_CST
4545 && tree_int_cst_le (index, bound))
4546 {
4547 replace_call_with_value (gsi, NULL_TREE);
4548 return true;
4549 }
4550 }
4551 }
4552 break;
451e8dae
NS
4553 case IFN_GOACC_DIM_SIZE:
4554 case IFN_GOACC_DIM_POS:
4555 result = fold_internal_goacc_dim (stmt);
4556 break;
368b454d
JJ
4557 case IFN_UBSAN_CHECK_ADD:
4558 subcode = PLUS_EXPR;
4559 break;
4560 case IFN_UBSAN_CHECK_SUB:
4561 subcode = MINUS_EXPR;
4562 break;
4563 case IFN_UBSAN_CHECK_MUL:
4564 subcode = MULT_EXPR;
4565 break;
1304953e
JJ
4566 case IFN_ADD_OVERFLOW:
4567 subcode = PLUS_EXPR;
4568 cplx_result = true;
4569 break;
4570 case IFN_SUB_OVERFLOW:
4571 subcode = MINUS_EXPR;
4572 cplx_result = true;
4573 break;
4574 case IFN_MUL_OVERFLOW:
4575 subcode = MULT_EXPR;
4576 cplx_result = true;
4577 break;
868363d4
RS
4578 case IFN_MASK_LOAD:
4579 changed |= gimple_fold_mask_load (gsi, stmt);
4580 break;
4581 case IFN_MASK_STORE:
4582 changed |= gimple_fold_mask_store (gsi, stmt);
4583 break;
368b454d
JJ
4584 default:
4585 break;
4586 }
4587 if (subcode != ERROR_MARK)
4588 {
4589 tree arg0 = gimple_call_arg (stmt, 0);
4590 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4591 tree type = TREE_TYPE (arg0);
4592 if (cplx_result)
4593 {
4594 tree lhs = gimple_call_lhs (stmt);
4595 if (lhs == NULL_TREE)
4596 type = NULL_TREE;
4597 else
4598 type = TREE_TYPE (TREE_TYPE (lhs));
4599 }
4600 if (type == NULL_TREE)
4601 ;
368b454d 4602 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4603 else if (integer_zerop (arg1))
4604 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4605 /* x = 0 + y; x = 0 * y; */
4606 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4607 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4608 /* x = y - y; */
4609 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4610 result = integer_zero_node;
368b454d 4611 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4612 else if (subcode == MULT_EXPR && integer_onep (arg1))
4613 result = arg0;
4614 else if (subcode == MULT_EXPR && integer_onep (arg0))
4615 result = arg1;
4616 else if (TREE_CODE (arg0) == INTEGER_CST
4617 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4618 {
1304953e
JJ
4619 if (cplx_result)
4620 result = int_const_binop (subcode, fold_convert (type, arg0),
4621 fold_convert (type, arg1));
4622 else
4623 result = int_const_binop (subcode, arg0, arg1);
4624 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4625 {
4626 if (cplx_result)
4627 overflow = build_one_cst (type);
4628 else
4629 result = NULL_TREE;
4630 }
4631 }
4632 if (result)
4633 {
4634 if (result == integer_zero_node)
4635 result = build_zero_cst (type);
4636 else if (cplx_result && TREE_TYPE (result) != type)
4637 {
4638 if (TREE_CODE (result) == INTEGER_CST)
4639 {
4640 if (arith_overflowed_p (PLUS_EXPR, type, result,
4641 integer_zero_node))
4642 overflow = build_one_cst (type);
4643 }
4644 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4645 && TYPE_UNSIGNED (type))
4646 || (TYPE_PRECISION (type)
4647 < (TYPE_PRECISION (TREE_TYPE (result))
4648 + (TYPE_UNSIGNED (TREE_TYPE (result))
4649 && !TYPE_UNSIGNED (type)))))
4650 result = NULL_TREE;
4651 if (result)
4652 result = fold_convert (type, result);
4653 }
368b454d
JJ
4654 }
4655 }
1304953e 4656
ed9c79e1
JJ
4657 if (result)
4658 {
1304953e
JJ
4659 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4660 result = drop_tree_overflow (result);
4661 if (cplx_result)
4662 {
4663 if (overflow == NULL_TREE)
4664 overflow = build_zero_cst (TREE_TYPE (result));
4665 tree ctype = build_complex_type (TREE_TYPE (result));
4666 if (TREE_CODE (result) == INTEGER_CST
4667 && TREE_CODE (overflow) == INTEGER_CST)
4668 result = build_complex (ctype, result, overflow);
4669 else
4670 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4671 ctype, result, overflow);
4672 }
ed9c79e1
JJ
4673 if (!update_call_from_tree (gsi, result))
4674 gimplify_and_update_call_from_tree (gsi, result);
4675 changed = true;
4676 }
4677 }
3b45a007 4678
e021c122 4679 return changed;
cbdd87d4
RG
4680}
4681
e0ee10ed 4682
89a79e96
RB
4683/* Return true whether NAME has a use on STMT. */
4684
4685static bool
355fe088 4686has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4687{
4688 imm_use_iterator iter;
4689 use_operand_p use_p;
4690 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4691 if (USE_STMT (use_p) == stmt)
4692 return true;
4693 return false;
4694}
4695
e0ee10ed
RB
4696/* Worker for fold_stmt_1 dispatch to pattern based folding with
4697 gimple_simplify.
4698
4699 Replaces *GSI with the simplification result in RCODE and OPS
4700 and the associated statements in *SEQ. Does the replacement
4701 according to INPLACE and returns true if the operation succeeded. */
4702
4703static bool
4704replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4705 gimple_match_op *res_op,
e0ee10ed
RB
4706 gimple_seq *seq, bool inplace)
4707{
355fe088 4708 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4709 tree *ops = res_op->ops;
4710 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4711
4712 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4713 newly created statements. See also maybe_push_res_to_seq.
4714 As an exception allow such uses if there was a use of the
4715 same SSA name on the old stmt. */
5d75ad95
RS
4716 for (unsigned int i = 0; i < num_ops; ++i)
4717 if (TREE_CODE (ops[i]) == SSA_NAME
4718 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4719 && !has_use_on_stmt (ops[i], stmt))
4720 return false;
4721
4722 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4723 for (unsigned int i = 0; i < 2; ++i)
4724 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4725 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4726 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4727 return false;
e0ee10ed 4728
fec40d06
RS
4729 /* Don't insert new statements when INPLACE is true, even if we could
4730 reuse STMT for the final statement. */
4731 if (inplace && !gimple_seq_empty_p (*seq))
4732 return false;
4733
538dd0b7 4734 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4735 {
5d75ad95
RS
4736 gcc_assert (res_op->code.is_tree_code ());
4737 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4738 /* GIMPLE_CONDs condition may not throw. */
4739 && (!flag_exceptions
4740 || !cfun->can_throw_non_call_exceptions
5d75ad95 4741 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4742 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4743 false, NULL_TREE)))
5d75ad95
RS
4744 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4745 else if (res_op->code == SSA_NAME)
538dd0b7 4746 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4747 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4748 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4749 {
4750 if (integer_zerop (ops[0]))
538dd0b7 4751 gimple_cond_make_false (cond_stmt);
e0ee10ed 4752 else
538dd0b7 4753 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4754 }
4755 else if (!inplace)
4756 {
5d75ad95 4757 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4758 if (!res)
4759 return false;
538dd0b7 4760 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4761 build_zero_cst (TREE_TYPE (res)));
4762 }
4763 else
4764 return false;
4765 if (dump_file && (dump_flags & TDF_DETAILS))
4766 {
4767 fprintf (dump_file, "gimple_simplified to ");
4768 if (!gimple_seq_empty_p (*seq))
4769 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4770 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4771 0, TDF_SLIM);
4772 }
4773 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4774 return true;
4775 }
4776 else if (is_gimple_assign (stmt)
5d75ad95 4777 && res_op->code.is_tree_code ())
e0ee10ed
RB
4778 {
4779 if (!inplace
5d75ad95 4780 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4781 {
5d75ad95
RS
4782 maybe_build_generic_op (res_op);
4783 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4784 res_op->op_or_null (0),
4785 res_op->op_or_null (1),
4786 res_op->op_or_null (2));
e0ee10ed
RB
4787 if (dump_file && (dump_flags & TDF_DETAILS))
4788 {
4789 fprintf (dump_file, "gimple_simplified to ");
4790 if (!gimple_seq_empty_p (*seq))
4791 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4792 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4793 0, TDF_SLIM);
4794 }
4795 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4796 return true;
4797 }
4798 }
5d75ad95
RS
4799 else if (res_op->code.is_fn_code ()
4800 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4801 {
5d75ad95
RS
4802 gcc_assert (num_ops == gimple_call_num_args (stmt));
4803 for (unsigned int i = 0; i < num_ops; ++i)
4804 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4805 if (dump_file && (dump_flags & TDF_DETAILS))
4806 {
4807 fprintf (dump_file, "gimple_simplified to ");
4808 if (!gimple_seq_empty_p (*seq))
4809 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4810 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4811 }
4812 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4813 return true;
4814 }
e0ee10ed
RB
4815 else if (!inplace)
4816 {
4817 if (gimple_has_lhs (stmt))
4818 {
4819 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4820 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4821 return false;
e0ee10ed
RB
4822 if (dump_file && (dump_flags & TDF_DETAILS))
4823 {
4824 fprintf (dump_file, "gimple_simplified to ");
4825 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4826 }
4827 gsi_replace_with_seq_vops (gsi, *seq);
4828 return true;
4829 }
4830 else
4831 gcc_unreachable ();
4832 }
4833
4834 return false;
4835}
4836
040292e7
RB
4837/* Canonicalize MEM_REFs invariant address operand after propagation. */
4838
4839static bool
4840maybe_canonicalize_mem_ref_addr (tree *t)
4841{
4842 bool res = false;
fe8c8f1e 4843 tree *orig_t = t;
040292e7
RB
4844
4845 if (TREE_CODE (*t) == ADDR_EXPR)
4846 t = &TREE_OPERAND (*t, 0);
4847
f17a223d
RB
4848 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4849 generic vector extension. The actual vector referenced is
4850 view-converted to an array type for this purpose. If the index
4851 is constant the canonical representation in the middle-end is a
4852 BIT_FIELD_REF so re-write the former to the latter here. */
4853 if (TREE_CODE (*t) == ARRAY_REF
4854 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4855 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4856 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4857 {
4858 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4859 if (VECTOR_TYPE_P (vtype))
4860 {
4861 tree low = array_ref_low_bound (*t);
4862 if (TREE_CODE (low) == INTEGER_CST)
4863 {
4864 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4865 {
4866 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4867 wi::to_widest (low));
4868 idx = wi::mul (idx, wi::to_widest
4869 (TYPE_SIZE (TREE_TYPE (*t))));
4870 widest_int ext
4871 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4872 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4873 {
4874 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4875 TREE_TYPE (*t),
4876 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4877 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4878 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4879 res = true;
4880 }
4881 }
4882 }
4883 }
4884 }
4885
040292e7
RB
4886 while (handled_component_p (*t))
4887 t = &TREE_OPERAND (*t, 0);
4888
4889 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4890 of invariant addresses into a SSA name MEM_REF address. */
4891 if (TREE_CODE (*t) == MEM_REF
4892 || TREE_CODE (*t) == TARGET_MEM_REF)
4893 {
4894 tree addr = TREE_OPERAND (*t, 0);
4895 if (TREE_CODE (addr) == ADDR_EXPR
4896 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4897 || handled_component_p (TREE_OPERAND (addr, 0))))
4898 {
4899 tree base;
a90c8804 4900 poly_int64 coffset;
040292e7
RB
4901 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4902 &coffset);
4903 if (!base)
4904 gcc_unreachable ();
4905
4906 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4907 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4908 TREE_OPERAND (*t, 1),
4909 size_int (coffset));
4910 res = true;
4911 }
4912 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4913 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4914 }
4915
4916 /* Canonicalize back MEM_REFs to plain reference trees if the object
4917 accessed is a decl that has the same access semantics as the MEM_REF. */
4918 if (TREE_CODE (*t) == MEM_REF
4919 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4920 && integer_zerop (TREE_OPERAND (*t, 1))
4921 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4922 {
4923 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4924 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4925 if (/* Same volatile qualification. */
4926 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4927 /* Same TBAA behavior with -fstrict-aliasing. */
4928 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4929 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4930 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4931 /* Same alignment. */
4932 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4933 /* We have to look out here to not drop a required conversion
4934 from the rhs to the lhs if *t appears on the lhs or vice-versa
4935 if it appears on the rhs. Thus require strict type
4936 compatibility. */
4937 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4938 {
4939 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4940 res = true;
4941 }
4942 }
4943
fe8c8f1e
RB
4944 else if (TREE_CODE (*orig_t) == ADDR_EXPR
4945 && TREE_CODE (*t) == MEM_REF
4946 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
4947 {
4948 tree base;
4949 poly_int64 coffset;
4950 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
4951 &coffset);
4952 if (base)
4953 {
4954 gcc_assert (TREE_CODE (base) == MEM_REF);
4955 poly_int64 moffset;
4956 if (mem_ref_offset (base).to_shwi (&moffset))
4957 {
4958 coffset += moffset;
4959 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
4960 {
4961 coffset += moffset;
4962 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
4963 return true;
4964 }
4965 }
4966 }
4967 }
4968
040292e7
RB
4969 /* Canonicalize TARGET_MEM_REF in particular with respect to
4970 the indexes becoming constant. */
4971 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4972 {
4973 tree tem = maybe_fold_tmr (*t);
4974 if (tem)
4975 {
4976 *t = tem;
4977 res = true;
4978 }
4979 }
4980
4981 return res;
4982}
4983
cbdd87d4
RG
4984/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4985 distinguishes both cases. */
4986
4987static bool
e0ee10ed 4988fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
4989{
4990 bool changed = false;
355fe088 4991 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 4992 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 4993 unsigned i;
a8b85ce9 4994 fold_defer_overflow_warnings ();
cbdd87d4 4995
040292e7
RB
4996 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4997 after propagation.
4998 ??? This shouldn't be done in generic folding but in the
4999 propagation helpers which also know whether an address was
89a79e96
RB
5000 propagated.
5001 Also canonicalize operand order. */
040292e7
RB
5002 switch (gimple_code (stmt))
5003 {
5004 case GIMPLE_ASSIGN:
5005 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
5006 {
5007 tree *rhs = gimple_assign_rhs1_ptr (stmt);
5008 if ((REFERENCE_CLASS_P (*rhs)
5009 || TREE_CODE (*rhs) == ADDR_EXPR)
5010 && maybe_canonicalize_mem_ref_addr (rhs))
5011 changed = true;
5012 tree *lhs = gimple_assign_lhs_ptr (stmt);
5013 if (REFERENCE_CLASS_P (*lhs)
5014 && maybe_canonicalize_mem_ref_addr (lhs))
5015 changed = true;
5016 }
89a79e96
RB
5017 else
5018 {
5019 /* Canonicalize operand order. */
5020 enum tree_code code = gimple_assign_rhs_code (stmt);
5021 if (TREE_CODE_CLASS (code) == tcc_comparison
5022 || commutative_tree_code (code)
5023 || commutative_ternary_tree_code (code))
5024 {
5025 tree rhs1 = gimple_assign_rhs1 (stmt);
5026 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 5027 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
5028 {
5029 gimple_assign_set_rhs1 (stmt, rhs2);
5030 gimple_assign_set_rhs2 (stmt, rhs1);
5031 if (TREE_CODE_CLASS (code) == tcc_comparison)
5032 gimple_assign_set_rhs_code (stmt,
5033 swap_tree_comparison (code));
5034 changed = true;
5035 }
5036 }
5037 }
040292e7
RB
5038 break;
5039 case GIMPLE_CALL:
5040 {
5041 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5042 {
5043 tree *arg = gimple_call_arg_ptr (stmt, i);
5044 if (REFERENCE_CLASS_P (*arg)
5045 && maybe_canonicalize_mem_ref_addr (arg))
5046 changed = true;
5047 }
5048 tree *lhs = gimple_call_lhs_ptr (stmt);
5049 if (*lhs
5050 && REFERENCE_CLASS_P (*lhs)
5051 && maybe_canonicalize_mem_ref_addr (lhs))
5052 changed = true;
5053 break;
5054 }
5055 case GIMPLE_ASM:
5056 {
538dd0b7
DM
5057 gasm *asm_stmt = as_a <gasm *> (stmt);
5058 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 5059 {
538dd0b7 5060 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
5061 tree op = TREE_VALUE (link);
5062 if (REFERENCE_CLASS_P (op)
5063 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5064 changed = true;
5065 }
538dd0b7 5066 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 5067 {
538dd0b7 5068 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
5069 tree op = TREE_VALUE (link);
5070 if ((REFERENCE_CLASS_P (op)
5071 || TREE_CODE (op) == ADDR_EXPR)
5072 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5073 changed = true;
5074 }
5075 }
5076 break;
5077 case GIMPLE_DEBUG:
5078 if (gimple_debug_bind_p (stmt))
5079 {
5080 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5081 if (*val
5082 && (REFERENCE_CLASS_P (*val)
5083 || TREE_CODE (*val) == ADDR_EXPR)
5084 && maybe_canonicalize_mem_ref_addr (val))
5085 changed = true;
5086 }
5087 break;
89a79e96
RB
5088 case GIMPLE_COND:
5089 {
5090 /* Canonicalize operand order. */
5091 tree lhs = gimple_cond_lhs (stmt);
5092 tree rhs = gimple_cond_rhs (stmt);
14e72812 5093 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
5094 {
5095 gcond *gc = as_a <gcond *> (stmt);
5096 gimple_cond_set_lhs (gc, rhs);
5097 gimple_cond_set_rhs (gc, lhs);
5098 gimple_cond_set_code (gc,
5099 swap_tree_comparison (gimple_cond_code (gc)));
5100 changed = true;
5101 }
5102 }
040292e7
RB
5103 default:;
5104 }
5105
e0ee10ed
RB
5106 /* Dispatch to pattern-based folding. */
5107 if (!inplace
5108 || is_gimple_assign (stmt)
5109 || gimple_code (stmt) == GIMPLE_COND)
5110 {
5111 gimple_seq seq = NULL;
5d75ad95
RS
5112 gimple_match_op res_op;
5113 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 5114 valueize, valueize))
e0ee10ed 5115 {
5d75ad95 5116 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
5117 changed = true;
5118 else
5119 gimple_seq_discard (seq);
5120 }
5121 }
5122
5123 stmt = gsi_stmt (*gsi);
5124
cbdd87d4
RG
5125 /* Fold the main computation performed by the statement. */
5126 switch (gimple_code (stmt))
5127 {
5128 case GIMPLE_ASSIGN:
5129 {
819ec64c
RB
5130 /* Try to canonicalize for boolean-typed X the comparisons
5131 X == 0, X == 1, X != 0, and X != 1. */
5132 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5133 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 5134 {
819ec64c
RB
5135 tree lhs = gimple_assign_lhs (stmt);
5136 tree op1 = gimple_assign_rhs1 (stmt);
5137 tree op2 = gimple_assign_rhs2 (stmt);
5138 tree type = TREE_TYPE (op1);
5139
5140 /* Check whether the comparison operands are of the same boolean
5141 type as the result type is.
5142 Check that second operand is an integer-constant with value
5143 one or zero. */
5144 if (TREE_CODE (op2) == INTEGER_CST
5145 && (integer_zerop (op2) || integer_onep (op2))
5146 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5147 {
5148 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5149 bool is_logical_not = false;
5150
5151 /* X == 0 and X != 1 is a logical-not.of X
5152 X == 1 and X != 0 is X */
5153 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5154 || (cmp_code == NE_EXPR && integer_onep (op2)))
5155 is_logical_not = true;
5156
5157 if (is_logical_not == false)
5158 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5159 /* Only for one-bit precision typed X the transformation
5160 !X -> ~X is valied. */
5161 else if (TYPE_PRECISION (type) == 1)
5162 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5163 /* Otherwise we use !X -> X ^ 1. */
5164 else
5165 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5166 build_int_cst (type, 1));
5167 changed = true;
5168 break;
5169 }
5fbcc0ed 5170 }
819ec64c
RB
5171
5172 unsigned old_num_ops = gimple_num_ops (stmt);
5173 tree lhs = gimple_assign_lhs (stmt);
5174 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
5175 if (new_rhs
5176 && !useless_type_conversion_p (TREE_TYPE (lhs),
5177 TREE_TYPE (new_rhs)))
5178 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5179 if (new_rhs
5180 && (!inplace
5181 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5182 {
5183 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5184 changed = true;
5185 }
5186 break;
5187 }
5188
cbdd87d4 5189 case GIMPLE_CALL:
ceeffab0 5190 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
5191 break;
5192
5193 case GIMPLE_ASM:
5194 /* Fold *& in asm operands. */
38384150 5195 {
538dd0b7 5196 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
5197 size_t noutputs;
5198 const char **oconstraints;
5199 const char *constraint;
5200 bool allows_mem, allows_reg;
5201
538dd0b7 5202 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
5203 oconstraints = XALLOCAVEC (const char *, noutputs);
5204
538dd0b7 5205 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 5206 {
538dd0b7 5207 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
5208 tree op = TREE_VALUE (link);
5209 oconstraints[i]
5210 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5211 if (REFERENCE_CLASS_P (op)
5212 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5213 {
5214 TREE_VALUE (link) = op;
5215 changed = true;
5216 }
5217 }
538dd0b7 5218 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 5219 {
538dd0b7 5220 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
5221 tree op = TREE_VALUE (link);
5222 constraint
5223 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5224 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5225 oconstraints, &allows_mem, &allows_reg);
5226 if (REFERENCE_CLASS_P (op)
5227 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5228 != NULL_TREE)
5229 {
5230 TREE_VALUE (link) = op;
5231 changed = true;
5232 }
5233 }
5234 }
cbdd87d4
RG
5235 break;
5236
bd422c4a
RG
5237 case GIMPLE_DEBUG:
5238 if (gimple_debug_bind_p (stmt))
5239 {
5240 tree val = gimple_debug_bind_get_value (stmt);
5241 if (val
5242 && REFERENCE_CLASS_P (val))
5243 {
5244 tree tem = maybe_fold_reference (val, false);
5245 if (tem)
5246 {
5247 gimple_debug_bind_set_value (stmt, tem);
5248 changed = true;
5249 }
5250 }
3e888a5e
RG
5251 else if (val
5252 && TREE_CODE (val) == ADDR_EXPR)
5253 {
5254 tree ref = TREE_OPERAND (val, 0);
5255 tree tem = maybe_fold_reference (ref, false);
5256 if (tem)
5257 {
5258 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5259 gimple_debug_bind_set_value (stmt, tem);
5260 changed = true;
5261 }
5262 }
bd422c4a
RG
5263 }
5264 break;
5265
cfe3d653
PK
5266 case GIMPLE_RETURN:
5267 {
5268 greturn *ret_stmt = as_a<greturn *> (stmt);
5269 tree ret = gimple_return_retval(ret_stmt);
5270
5271 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5272 {
5273 tree val = valueize (ret);
1af928db
RB
5274 if (val && val != ret
5275 && may_propagate_copy (ret, val))
cfe3d653
PK
5276 {
5277 gimple_return_set_retval (ret_stmt, val);
5278 changed = true;
5279 }
5280 }
5281 }
5282 break;
5283
cbdd87d4
RG
5284 default:;
5285 }
5286
5287 stmt = gsi_stmt (*gsi);
5288
37376165
RB
5289 /* Fold *& on the lhs. */
5290 if (gimple_has_lhs (stmt))
cbdd87d4
RG
5291 {
5292 tree lhs = gimple_get_lhs (stmt);
5293 if (lhs && REFERENCE_CLASS_P (lhs))
5294 {
5295 tree new_lhs = maybe_fold_reference (lhs, true);
5296 if (new_lhs)
5297 {
5298 gimple_set_lhs (stmt, new_lhs);
5299 changed = true;
5300 }
5301 }
5302 }
5303
a8b85ce9 5304 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
5305 return changed;
5306}
5307
e0ee10ed
RB
5308/* Valueziation callback that ends up not following SSA edges. */
5309
5310tree
5311no_follow_ssa_edges (tree)
5312{
5313 return NULL_TREE;
5314}
5315
45cc9f96
RB
5316/* Valueization callback that ends up following single-use SSA edges only. */
5317
5318tree
5319follow_single_use_edges (tree val)
5320{
5321 if (TREE_CODE (val) == SSA_NAME
5322 && !has_single_use (val))
5323 return NULL_TREE;
5324 return val;
5325}
5326
c566cc9f
RS
5327/* Valueization callback that follows all SSA edges. */
5328
5329tree
5330follow_all_ssa_edges (tree val)
5331{
5332 return val;
5333}
5334
cbdd87d4
RG
5335/* Fold the statement pointed to by GSI. In some cases, this function may
5336 replace the whole statement with a new one. Returns true iff folding
5337 makes any changes.
5338 The statement pointed to by GSI should be in valid gimple form but may
5339 be in unfolded state as resulting from for example constant propagation
5340 which can produce *&x = 0. */
5341
5342bool
5343fold_stmt (gimple_stmt_iterator *gsi)
5344{
e0ee10ed
RB
5345 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5346}
5347
5348bool
5349fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5350{
5351 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
5352}
5353
59401b92 5354/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
5355 *&x created by constant propagation are handled. The statement cannot
5356 be replaced with a new one. Return true if the statement was
5357 changed, false otherwise.
59401b92 5358 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
5359 be in unfolded state as resulting from for example constant propagation
5360 which can produce *&x = 0. */
5361
5362bool
59401b92 5363fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 5364{
355fe088 5365 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 5366 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 5367 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5368 return changed;
5369}
5370
e89065a1
SL
5371/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5372 if EXPR is null or we don't know how.
5373 If non-null, the result always has boolean type. */
5374
5375static tree
5376canonicalize_bool (tree expr, bool invert)
5377{
5378 if (!expr)
5379 return NULL_TREE;
5380 else if (invert)
5381 {
5382 if (integer_nonzerop (expr))
5383 return boolean_false_node;
5384 else if (integer_zerop (expr))
5385 return boolean_true_node;
5386 else if (TREE_CODE (expr) == SSA_NAME)
5387 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5388 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5389 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5390 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5391 boolean_type_node,
5392 TREE_OPERAND (expr, 0),
5393 TREE_OPERAND (expr, 1));
5394 else
5395 return NULL_TREE;
5396 }
5397 else
5398 {
5399 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5400 return expr;
5401 if (integer_nonzerop (expr))
5402 return boolean_true_node;
5403 else if (integer_zerop (expr))
5404 return boolean_false_node;
5405 else if (TREE_CODE (expr) == SSA_NAME)
5406 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5407 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5408 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5409 return fold_build2 (TREE_CODE (expr),
5410 boolean_type_node,
5411 TREE_OPERAND (expr, 0),
5412 TREE_OPERAND (expr, 1));
5413 else
5414 return NULL_TREE;
5415 }
5416}
5417
5418/* Check to see if a boolean expression EXPR is logically equivalent to the
5419 comparison (OP1 CODE OP2). Check for various identities involving
5420 SSA_NAMEs. */
5421
5422static bool
5423same_bool_comparison_p (const_tree expr, enum tree_code code,
5424 const_tree op1, const_tree op2)
5425{
355fe088 5426 gimple *s;
e89065a1
SL
5427
5428 /* The obvious case. */
5429 if (TREE_CODE (expr) == code
5430 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5431 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5432 return true;
5433
5434 /* Check for comparing (name, name != 0) and the case where expr
5435 is an SSA_NAME with a definition matching the comparison. */
5436 if (TREE_CODE (expr) == SSA_NAME
5437 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5438 {
5439 if (operand_equal_p (expr, op1, 0))
5440 return ((code == NE_EXPR && integer_zerop (op2))
5441 || (code == EQ_EXPR && integer_nonzerop (op2)));
5442 s = SSA_NAME_DEF_STMT (expr);
5443 if (is_gimple_assign (s)
5444 && gimple_assign_rhs_code (s) == code
5445 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5446 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5447 return true;
5448 }
5449
5450 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5451 of name is a comparison, recurse. */
5452 if (TREE_CODE (op1) == SSA_NAME
5453 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5454 {
5455 s = SSA_NAME_DEF_STMT (op1);
5456 if (is_gimple_assign (s)
5457 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5458 {
5459 enum tree_code c = gimple_assign_rhs_code (s);
5460 if ((c == NE_EXPR && integer_zerop (op2))
5461 || (c == EQ_EXPR && integer_nonzerop (op2)))
5462 return same_bool_comparison_p (expr, c,
5463 gimple_assign_rhs1 (s),
5464 gimple_assign_rhs2 (s));
5465 if ((c == EQ_EXPR && integer_zerop (op2))
5466 || (c == NE_EXPR && integer_nonzerop (op2)))
5467 return same_bool_comparison_p (expr,
5468 invert_tree_comparison (c, false),
5469 gimple_assign_rhs1 (s),
5470 gimple_assign_rhs2 (s));
5471 }
5472 }
5473 return false;
5474}
5475
5476/* Check to see if two boolean expressions OP1 and OP2 are logically
5477 equivalent. */
5478
5479static bool
5480same_bool_result_p (const_tree op1, const_tree op2)
5481{
5482 /* Simple cases first. */
5483 if (operand_equal_p (op1, op2, 0))
5484 return true;
5485
5486 /* Check the cases where at least one of the operands is a comparison.
5487 These are a bit smarter than operand_equal_p in that they apply some
5488 identifies on SSA_NAMEs. */
98209db3 5489 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5490 && same_bool_comparison_p (op1, TREE_CODE (op2),
5491 TREE_OPERAND (op2, 0),
5492 TREE_OPERAND (op2, 1)))
5493 return true;
98209db3 5494 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5495 && same_bool_comparison_p (op2, TREE_CODE (op1),
5496 TREE_OPERAND (op1, 0),
5497 TREE_OPERAND (op1, 1)))
5498 return true;
5499
5500 /* Default case. */
5501 return false;
5502}
5503
5504/* Forward declarations for some mutually recursive functions. */
5505
5506static tree
5f487a34 5507and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5508 enum tree_code code2, tree op2a, tree op2b);
5509static tree
5f487a34 5510and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5511 enum tree_code code2, tree op2a, tree op2b);
5512static tree
5f487a34 5513and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5514 enum tree_code code2, tree op2a, tree op2b);
5515static tree
5f487a34 5516or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5517 enum tree_code code2, tree op2a, tree op2b);
5518static tree
5f487a34 5519or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
5520 enum tree_code code2, tree op2a, tree op2b);
5521static tree
5f487a34 5522or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
5523 enum tree_code code2, tree op2a, tree op2b);
5524
5525/* Helper function for and_comparisons_1: try to simplify the AND of the
5526 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5527 If INVERT is true, invert the value of the VAR before doing the AND.
5528 Return NULL_EXPR if we can't simplify this to a single expression. */
5529
5530static tree
5f487a34 5531and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5532 enum tree_code code2, tree op2a, tree op2b)
5533{
5534 tree t;
355fe088 5535 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5536
5537 /* We can only deal with variables whose definitions are assignments. */
5538 if (!is_gimple_assign (stmt))
5539 return NULL_TREE;
5540
5541 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5542 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5543 Then we only have to consider the simpler non-inverted cases. */
5544 if (invert)
5f487a34 5545 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
5546 invert_tree_comparison (code2, false),
5547 op2a, op2b);
5548 else
5f487a34 5549 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
5550 return canonicalize_bool (t, invert);
5551}
5552
5553/* Try to simplify the AND of the ssa variable defined by the assignment
5554 STMT with the comparison specified by (OP2A CODE2 OP2B).
5555 Return NULL_EXPR if we can't simplify this to a single expression. */
5556
5557static tree
5f487a34 5558and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5559 enum tree_code code2, tree op2a, tree op2b)
5560{
5561 tree var = gimple_assign_lhs (stmt);
5562 tree true_test_var = NULL_TREE;
5563 tree false_test_var = NULL_TREE;
5564 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5565
5566 /* Check for identities like (var AND (var == 0)) => false. */
5567 if (TREE_CODE (op2a) == SSA_NAME
5568 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5569 {
5570 if ((code2 == NE_EXPR && integer_zerop (op2b))
5571 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5572 {
5573 true_test_var = op2a;
5574 if (var == true_test_var)
5575 return var;
5576 }
5577 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5578 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5579 {
5580 false_test_var = op2a;
5581 if (var == false_test_var)
5582 return boolean_false_node;
5583 }
5584 }
5585
5586 /* If the definition is a comparison, recurse on it. */
5587 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5588 {
5f487a34 5589 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
5590 gimple_assign_rhs1 (stmt),
5591 gimple_assign_rhs2 (stmt),
5592 code2,
5593 op2a,
5594 op2b);
5595 if (t)
5596 return t;
5597 }
5598
5599 /* If the definition is an AND or OR expression, we may be able to
5600 simplify by reassociating. */
eb9820c0
KT
5601 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5602 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5603 {
5604 tree inner1 = gimple_assign_rhs1 (stmt);
5605 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5606 gimple *s;
e89065a1
SL
5607 tree t;
5608 tree partial = NULL_TREE;
eb9820c0 5609 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5610
5611 /* Check for boolean identities that don't require recursive examination
5612 of inner1/inner2:
5613 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5614 inner1 AND (inner1 OR inner2) => inner1
5615 !inner1 AND (inner1 AND inner2) => false
5616 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5617 Likewise for similar cases involving inner2. */
5618 if (inner1 == true_test_var)
5619 return (is_and ? var : inner1);
5620 else if (inner2 == true_test_var)
5621 return (is_and ? var : inner2);
5622 else if (inner1 == false_test_var)
5623 return (is_and
5624 ? boolean_false_node
5f487a34
LJH
5625 : and_var_with_comparison (type, inner2, false, code2, op2a,
5626 op2b));
e89065a1
SL
5627 else if (inner2 == false_test_var)
5628 return (is_and
5629 ? boolean_false_node
5f487a34
LJH
5630 : and_var_with_comparison (type, inner1, false, code2, op2a,
5631 op2b));
e89065a1
SL
5632
5633 /* Next, redistribute/reassociate the AND across the inner tests.
5634 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5635 if (TREE_CODE (inner1) == SSA_NAME
5636 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5637 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 5638 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
5639 gimple_assign_rhs1 (s),
5640 gimple_assign_rhs2 (s),
5641 code2, op2a, op2b)))
5642 {
5643 /* Handle the AND case, where we are reassociating:
5644 (inner1 AND inner2) AND (op2a code2 op2b)
5645 => (t AND inner2)
5646 If the partial result t is a constant, we win. Otherwise
5647 continue on to try reassociating with the other inner test. */
5648 if (is_and)
5649 {
5650 if (integer_onep (t))
5651 return inner2;
5652 else if (integer_zerop (t))
5653 return boolean_false_node;
5654 }
5655
5656 /* Handle the OR case, where we are redistributing:
5657 (inner1 OR inner2) AND (op2a code2 op2b)
5658 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5659 else if (integer_onep (t))
5660 return boolean_true_node;
5661
5662 /* Save partial result for later. */
5663 partial = t;
e89065a1
SL
5664 }
5665
5666 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5667 if (TREE_CODE (inner2) == SSA_NAME
5668 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5669 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 5670 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
5671 gimple_assign_rhs1 (s),
5672 gimple_assign_rhs2 (s),
5673 code2, op2a, op2b)))
5674 {
5675 /* Handle the AND case, where we are reassociating:
5676 (inner1 AND inner2) AND (op2a code2 op2b)
5677 => (inner1 AND t) */
5678 if (is_and)
5679 {
5680 if (integer_onep (t))
5681 return inner1;
5682 else if (integer_zerop (t))
5683 return boolean_false_node;
8236c8eb
JJ
5684 /* If both are the same, we can apply the identity
5685 (x AND x) == x. */
5686 else if (partial && same_bool_result_p (t, partial))
5687 return t;
e89065a1
SL
5688 }
5689
5690 /* Handle the OR case. where we are redistributing:
5691 (inner1 OR inner2) AND (op2a code2 op2b)
5692 => (t OR (inner1 AND (op2a code2 op2b)))
5693 => (t OR partial) */
5694 else
5695 {
5696 if (integer_onep (t))
5697 return boolean_true_node;
5698 else if (partial)
5699 {
5700 /* We already got a simplification for the other
5701 operand to the redistributed OR expression. The
5702 interesting case is when at least one is false.
5703 Or, if both are the same, we can apply the identity
5704 (x OR x) == x. */
5705 if (integer_zerop (partial))
5706 return t;
5707 else if (integer_zerop (t))
5708 return partial;
5709 else if (same_bool_result_p (t, partial))
5710 return t;
5711 }
5712 }
5713 }
5714 }
5715 return NULL_TREE;
5716}
5717
5718/* Try to simplify the AND of two comparisons defined by
5719 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5720 If this can be done without constructing an intermediate value,
5721 return the resulting tree; otherwise NULL_TREE is returned.
5722 This function is deliberately asymmetric as it recurses on SSA_DEFs
5723 in the first comparison but not the second. */
5724
5725static tree
5f487a34 5726and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5727 enum tree_code code2, tree op2a, tree op2b)
5728{
ae22ac3c 5729 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5730
e89065a1
SL
5731 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5732 if (operand_equal_p (op1a, op2a, 0)
5733 && operand_equal_p (op1b, op2b, 0))
5734 {
eb9820c0 5735 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5736 tree t = combine_comparisons (UNKNOWN_LOCATION,
5737 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5738 truth_type, op1a, op1b);
e89065a1
SL
5739 if (t)
5740 return t;
5741 }
5742
5743 /* Likewise the swapped case of the above. */
5744 if (operand_equal_p (op1a, op2b, 0)
5745 && operand_equal_p (op1b, op2a, 0))
5746 {
eb9820c0 5747 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5748 tree t = combine_comparisons (UNKNOWN_LOCATION,
5749 TRUTH_ANDIF_EXPR, code1,
5750 swap_tree_comparison (code2),
31ed6226 5751 truth_type, op1a, op1b);
e89065a1
SL
5752 if (t)
5753 return t;
5754 }
5755
e89065a1
SL
5756 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5757 NAME's definition is a truth value. See if there are any simplifications
5758 that can be done against the NAME's definition. */
5759 if (TREE_CODE (op1a) == SSA_NAME
5760 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5761 && (integer_zerop (op1b) || integer_onep (op1b)))
5762 {
5763 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5764 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5765 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5766 switch (gimple_code (stmt))
5767 {
5768 case GIMPLE_ASSIGN:
5769 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
5770 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5771 op2b);
e89065a1
SL
5772
5773 case GIMPLE_PHI:
5774 /* If every argument to the PHI produces the same result when
5775 ANDed with the second comparison, we win.
5776 Do not do this unless the type is bool since we need a bool
5777 result here anyway. */
5778 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5779 {
5780 tree result = NULL_TREE;
5781 unsigned i;
5782 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5783 {
5784 tree arg = gimple_phi_arg_def (stmt, i);
5785
5786 /* If this PHI has itself as an argument, ignore it.
5787 If all the other args produce the same result,
5788 we're still OK. */
5789 if (arg == gimple_phi_result (stmt))
5790 continue;
5791 else if (TREE_CODE (arg) == INTEGER_CST)
5792 {
5793 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5794 {
5795 if (!result)
5796 result = boolean_false_node;
5797 else if (!integer_zerop (result))
5798 return NULL_TREE;
5799 }
5800 else if (!result)
5801 result = fold_build2 (code2, boolean_type_node,
5802 op2a, op2b);
5803 else if (!same_bool_comparison_p (result,
5804 code2, op2a, op2b))
5805 return NULL_TREE;
5806 }
0e8b84ec
JJ
5807 else if (TREE_CODE (arg) == SSA_NAME
5808 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5809 {
6c66f733 5810 tree temp;
355fe088 5811 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5812 /* In simple cases we can look through PHI nodes,
5813 but we have to be careful with loops.
5814 See PR49073. */
5815 if (! dom_info_available_p (CDI_DOMINATORS)
5816 || gimple_bb (def_stmt) == gimple_bb (stmt)
5817 || dominated_by_p (CDI_DOMINATORS,
5818 gimple_bb (def_stmt),
5819 gimple_bb (stmt)))
5820 return NULL_TREE;
5f487a34 5821 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 5822 op2a, op2b);
e89065a1
SL
5823 if (!temp)
5824 return NULL_TREE;
5825 else if (!result)
5826 result = temp;
5827 else if (!same_bool_result_p (result, temp))
5828 return NULL_TREE;
5829 }
5830 else
5831 return NULL_TREE;
5832 }
5833 return result;
5834 }
5835
5836 default:
5837 break;
5838 }
5839 }
5840 return NULL_TREE;
5841}
5842
5f487a34
LJH
5843/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5844 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5845 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5846 simplify this to a single expression. As we are going to lower the cost
5847 of building SSA names / gimple stmts significantly, we need to allocate
5848 them ont the stack. This will cause the code to be a bit ugly. */
5849
5850static tree
5851maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5852 enum tree_code code1,
5853 tree op1a, tree op1b,
5854 enum tree_code code2, tree op2a,
5855 tree op2b)
5856{
5857 /* Allocate gimple stmt1 on the stack. */
5858 gassign *stmt1
5859 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5860 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5861 gimple_assign_set_rhs_code (stmt1, code1);
5862 gimple_assign_set_rhs1 (stmt1, op1a);
5863 gimple_assign_set_rhs2 (stmt1, op1b);
5864
5865 /* Allocate gimple stmt2 on the stack. */
5866 gassign *stmt2
5867 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5868 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5869 gimple_assign_set_rhs_code (stmt2, code2);
5870 gimple_assign_set_rhs1 (stmt2, op2a);
5871 gimple_assign_set_rhs2 (stmt2, op2b);
5872
5873 /* Allocate SSA names(lhs1) on the stack. */
5874 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5875 memset (lhs1, 0, sizeof (tree_ssa_name));
5876 TREE_SET_CODE (lhs1, SSA_NAME);
5877 TREE_TYPE (lhs1) = type;
5878 init_ssa_name_imm_use (lhs1);
5879
5880 /* Allocate SSA names(lhs2) on the stack. */
5881 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5882 memset (lhs2, 0, sizeof (tree_ssa_name));
5883 TREE_SET_CODE (lhs2, SSA_NAME);
5884 TREE_TYPE (lhs2) = type;
5885 init_ssa_name_imm_use (lhs2);
5886
5887 gimple_assign_set_lhs (stmt1, lhs1);
5888 gimple_assign_set_lhs (stmt2, lhs2);
5889
5890 gimple_match_op op (gimple_match_cond::UNCOND, code,
5891 type, gimple_assign_lhs (stmt1),
5892 gimple_assign_lhs (stmt2));
5893 if (op.resimplify (NULL, follow_all_ssa_edges))
5894 {
5895 if (gimple_simplified_result_is_gimple_val (&op))
5896 {
5897 tree res = op.ops[0];
5898 if (res == lhs1)
5899 return build2 (code1, type, op1a, op1b);
5900 else if (res == lhs2)
5901 return build2 (code2, type, op2a, op2b);
5902 else
5903 return res;
5904 }
ae9c3507
ML
5905 else if (op.code.is_tree_code ()
5906 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5907 {
5908 tree op0 = op.ops[0];
5909 tree op1 = op.ops[1];
5910 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5911 return NULL_TREE; /* not simple */
5912
5913 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5914 }
5f487a34
LJH
5915 }
5916
5917 return NULL_TREE;
5918}
5919
e89065a1
SL
5920/* Try to simplify the AND of two comparisons, specified by
5921 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5922 If this can be simplified to a single expression (without requiring
5923 introducing more SSA variables to hold intermediate values),
5924 return the resulting tree. Otherwise return NULL_TREE.
5925 If the result expression is non-null, it has boolean type. */
5926
5927tree
5f487a34
LJH
5928maybe_fold_and_comparisons (tree type,
5929 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5930 enum tree_code code2, tree op2a, tree op2b)
5931{
5f487a34 5932 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 5933 return t;
5f487a34
LJH
5934
5935 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5936 return t;
5937
5938 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5939 op1a, op1b, code2, op2a,
5940 op2b))
5941 return t;
5942
5943 return NULL_TREE;
e89065a1
SL
5944}
5945
5946/* Helper function for or_comparisons_1: try to simplify the OR of the
5947 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5948 If INVERT is true, invert the value of VAR before doing the OR.
5949 Return NULL_EXPR if we can't simplify this to a single expression. */
5950
5951static tree
5f487a34 5952or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5953 enum tree_code code2, tree op2a, tree op2b)
5954{
5955 tree t;
355fe088 5956 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5957
5958 /* We can only deal with variables whose definitions are assignments. */
5959 if (!is_gimple_assign (stmt))
5960 return NULL_TREE;
5961
5962 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5963 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5964 Then we only have to consider the simpler non-inverted cases. */
5965 if (invert)
5f487a34 5966 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
5967 invert_tree_comparison (code2, false),
5968 op2a, op2b);
5969 else
5f487a34 5970 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
5971 return canonicalize_bool (t, invert);
5972}
5973
5974/* Try to simplify the OR of the ssa variable defined by the assignment
5975 STMT with the comparison specified by (OP2A CODE2 OP2B).
5976 Return NULL_EXPR if we can't simplify this to a single expression. */
5977
5978static tree
5f487a34 5979or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5980 enum tree_code code2, tree op2a, tree op2b)
5981{
5982 tree var = gimple_assign_lhs (stmt);
5983 tree true_test_var = NULL_TREE;
5984 tree false_test_var = NULL_TREE;
5985 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5986
5987 /* Check for identities like (var OR (var != 0)) => true . */
5988 if (TREE_CODE (op2a) == SSA_NAME
5989 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5990 {
5991 if ((code2 == NE_EXPR && integer_zerop (op2b))
5992 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5993 {
5994 true_test_var = op2a;
5995 if (var == true_test_var)
5996 return var;
5997 }
5998 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5999 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6000 {
6001 false_test_var = op2a;
6002 if (var == false_test_var)
6003 return boolean_true_node;
6004 }
6005 }
6006
6007 /* If the definition is a comparison, recurse on it. */
6008 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6009 {
5f487a34 6010 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
6011 gimple_assign_rhs1 (stmt),
6012 gimple_assign_rhs2 (stmt),
6013 code2,
6014 op2a,
6015 op2b);
6016 if (t)
6017 return t;
6018 }
6019
6020 /* If the definition is an AND or OR expression, we may be able to
6021 simplify by reassociating. */
eb9820c0
KT
6022 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6023 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6024 {
6025 tree inner1 = gimple_assign_rhs1 (stmt);
6026 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6027 gimple *s;
e89065a1
SL
6028 tree t;
6029 tree partial = NULL_TREE;
eb9820c0 6030 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
6031
6032 /* Check for boolean identities that don't require recursive examination
6033 of inner1/inner2:
6034 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6035 inner1 OR (inner1 AND inner2) => inner1
6036 !inner1 OR (inner1 OR inner2) => true
6037 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6038 */
6039 if (inner1 == true_test_var)
6040 return (is_or ? var : inner1);
6041 else if (inner2 == true_test_var)
6042 return (is_or ? var : inner2);
6043 else if (inner1 == false_test_var)
6044 return (is_or
6045 ? boolean_true_node
5f487a34
LJH
6046 : or_var_with_comparison (type, inner2, false, code2, op2a,
6047 op2b));
e89065a1
SL
6048 else if (inner2 == false_test_var)
6049 return (is_or
6050 ? boolean_true_node
5f487a34
LJH
6051 : or_var_with_comparison (type, inner1, false, code2, op2a,
6052 op2b));
e89065a1
SL
6053
6054 /* Next, redistribute/reassociate the OR across the inner tests.
6055 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6056 if (TREE_CODE (inner1) == SSA_NAME
6057 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6058 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6059 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6060 gimple_assign_rhs1 (s),
6061 gimple_assign_rhs2 (s),
6062 code2, op2a, op2b)))
6063 {
6064 /* Handle the OR case, where we are reassociating:
6065 (inner1 OR inner2) OR (op2a code2 op2b)
6066 => (t OR inner2)
6067 If the partial result t is a constant, we win. Otherwise
6068 continue on to try reassociating with the other inner test. */
8236c8eb 6069 if (is_or)
e89065a1
SL
6070 {
6071 if (integer_onep (t))
6072 return boolean_true_node;
6073 else if (integer_zerop (t))
6074 return inner2;
6075 }
6076
6077 /* Handle the AND case, where we are redistributing:
6078 (inner1 AND inner2) OR (op2a code2 op2b)
6079 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
6080 else if (integer_zerop (t))
6081 return boolean_false_node;
6082
6083 /* Save partial result for later. */
6084 partial = t;
e89065a1
SL
6085 }
6086
6087 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6088 if (TREE_CODE (inner2) == SSA_NAME
6089 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6090 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6091 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6092 gimple_assign_rhs1 (s),
6093 gimple_assign_rhs2 (s),
6094 code2, op2a, op2b)))
6095 {
6096 /* Handle the OR case, where we are reassociating:
6097 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
6098 => (inner1 OR t)
6099 => (t OR partial) */
6100 if (is_or)
e89065a1
SL
6101 {
6102 if (integer_zerop (t))
6103 return inner1;
6104 else if (integer_onep (t))
6105 return boolean_true_node;
8236c8eb
JJ
6106 /* If both are the same, we can apply the identity
6107 (x OR x) == x. */
6108 else if (partial && same_bool_result_p (t, partial))
6109 return t;
e89065a1
SL
6110 }
6111
6112 /* Handle the AND case, where we are redistributing:
6113 (inner1 AND inner2) OR (op2a code2 op2b)
6114 => (t AND (inner1 OR (op2a code2 op2b)))
6115 => (t AND partial) */
6116 else
6117 {
6118 if (integer_zerop (t))
6119 return boolean_false_node;
6120 else if (partial)
6121 {
6122 /* We already got a simplification for the other
6123 operand to the redistributed AND expression. The
6124 interesting case is when at least one is true.
6125 Or, if both are the same, we can apply the identity
8236c8eb 6126 (x AND x) == x. */
e89065a1
SL
6127 if (integer_onep (partial))
6128 return t;
6129 else if (integer_onep (t))
6130 return partial;
6131 else if (same_bool_result_p (t, partial))
8236c8eb 6132 return t;
e89065a1
SL
6133 }
6134 }
6135 }
6136 }
6137 return NULL_TREE;
6138}
6139
6140/* Try to simplify the OR of two comparisons defined by
6141 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6142 If this can be done without constructing an intermediate value,
6143 return the resulting tree; otherwise NULL_TREE is returned.
6144 This function is deliberately asymmetric as it recurses on SSA_DEFs
6145 in the first comparison but not the second. */
6146
6147static tree
5f487a34 6148or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6149 enum tree_code code2, tree op2a, tree op2b)
6150{
ae22ac3c 6151 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6152
e89065a1
SL
6153 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6154 if (operand_equal_p (op1a, op2a, 0)
6155 && operand_equal_p (op1b, op2b, 0))
6156 {
eb9820c0 6157 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6158 tree t = combine_comparisons (UNKNOWN_LOCATION,
6159 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 6160 truth_type, op1a, op1b);
e89065a1
SL
6161 if (t)
6162 return t;
6163 }
6164
6165 /* Likewise the swapped case of the above. */
6166 if (operand_equal_p (op1a, op2b, 0)
6167 && operand_equal_p (op1b, op2a, 0))
6168 {
eb9820c0 6169 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6170 tree t = combine_comparisons (UNKNOWN_LOCATION,
6171 TRUTH_ORIF_EXPR, code1,
6172 swap_tree_comparison (code2),
31ed6226 6173 truth_type, op1a, op1b);
e89065a1
SL
6174 if (t)
6175 return t;
6176 }
6177
e89065a1
SL
6178 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6179 NAME's definition is a truth value. See if there are any simplifications
6180 that can be done against the NAME's definition. */
6181 if (TREE_CODE (op1a) == SSA_NAME
6182 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6183 && (integer_zerop (op1b) || integer_onep (op1b)))
6184 {
6185 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6186 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6187 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6188 switch (gimple_code (stmt))
6189 {
6190 case GIMPLE_ASSIGN:
6191 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6192 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6193 op2b);
e89065a1
SL
6194
6195 case GIMPLE_PHI:
6196 /* If every argument to the PHI produces the same result when
6197 ORed with the second comparison, we win.
6198 Do not do this unless the type is bool since we need a bool
6199 result here anyway. */
6200 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6201 {
6202 tree result = NULL_TREE;
6203 unsigned i;
6204 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6205 {
6206 tree arg = gimple_phi_arg_def (stmt, i);
6207
6208 /* If this PHI has itself as an argument, ignore it.
6209 If all the other args produce the same result,
6210 we're still OK. */
6211 if (arg == gimple_phi_result (stmt))
6212 continue;
6213 else if (TREE_CODE (arg) == INTEGER_CST)
6214 {
6215 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6216 {
6217 if (!result)
6218 result = boolean_true_node;
6219 else if (!integer_onep (result))
6220 return NULL_TREE;
6221 }
6222 else if (!result)
6223 result = fold_build2 (code2, boolean_type_node,
6224 op2a, op2b);
6225 else if (!same_bool_comparison_p (result,
6226 code2, op2a, op2b))
6227 return NULL_TREE;
6228 }
0e8b84ec
JJ
6229 else if (TREE_CODE (arg) == SSA_NAME
6230 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6231 {
6c66f733 6232 tree temp;
355fe088 6233 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6234 /* In simple cases we can look through PHI nodes,
6235 but we have to be careful with loops.
6236 See PR49073. */
6237 if (! dom_info_available_p (CDI_DOMINATORS)
6238 || gimple_bb (def_stmt) == gimple_bb (stmt)
6239 || dominated_by_p (CDI_DOMINATORS,
6240 gimple_bb (def_stmt),
6241 gimple_bb (stmt)))
6242 return NULL_TREE;
5f487a34 6243 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 6244 op2a, op2b);
e89065a1
SL
6245 if (!temp)
6246 return NULL_TREE;
6247 else if (!result)
6248 result = temp;
6249 else if (!same_bool_result_p (result, temp))
6250 return NULL_TREE;
6251 }
6252 else
6253 return NULL_TREE;
6254 }
6255 return result;
6256 }
6257
6258 default:
6259 break;
6260 }
6261 }
6262 return NULL_TREE;
6263}
6264
6265/* Try to simplify the OR of two comparisons, specified by
6266 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6267 If this can be simplified to a single expression (without requiring
6268 introducing more SSA variables to hold intermediate values),
6269 return the resulting tree. Otherwise return NULL_TREE.
6270 If the result expression is non-null, it has boolean type. */
6271
6272tree
5f487a34
LJH
6273maybe_fold_or_comparisons (tree type,
6274 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6275 enum tree_code code2, tree op2a, tree op2b)
6276{
5f487a34 6277 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6278 return t;
cfef45c8 6279
5f487a34
LJH
6280 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6281 return t;
6282
6283 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6284 op1a, op1b, code2, op2a,
6285 op2b))
6286 return t;
6287
6288 return NULL_TREE;
6289}
cfef45c8
RG
6290
6291/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6292
6293 Either NULL_TREE, a simplified but non-constant or a constant
6294 is returned.
6295
6296 ??? This should go into a gimple-fold-inline.h file to be eventually
6297 privatized with the single valueize function used in the various TUs
6298 to avoid the indirect function call overhead. */
6299
6300tree
355fe088 6301gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6302 tree (*gvalueize) (tree))
cfef45c8 6303{
5d75ad95 6304 gimple_match_op res_op;
45cc9f96
RB
6305 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6306 edges if there are intermediate VARYING defs. For this reason
6307 do not follow SSA edges here even though SCCVN can technically
6308 just deal fine with that. */
5d75ad95 6309 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6310 {
34050b6b 6311 tree res = NULL_TREE;
5d75ad95
RS
6312 if (gimple_simplified_result_is_gimple_val (&res_op))
6313 res = res_op.ops[0];
34050b6b 6314 else if (mprts_hook)
5d75ad95 6315 res = mprts_hook (&res_op);
34050b6b 6316 if (res)
45cc9f96 6317 {
34050b6b
RB
6318 if (dump_file && dump_flags & TDF_DETAILS)
6319 {
6320 fprintf (dump_file, "Match-and-simplified ");
6321 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6322 fprintf (dump_file, " to ");
ef6cb4c7 6323 print_generic_expr (dump_file, res);
34050b6b
RB
6324 fprintf (dump_file, "\n");
6325 }
6326 return res;
45cc9f96 6327 }
45cc9f96
RB
6328 }
6329
cfef45c8
RG
6330 location_t loc = gimple_location (stmt);
6331 switch (gimple_code (stmt))
6332 {
6333 case GIMPLE_ASSIGN:
6334 {
6335 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6336
6337 switch (get_gimple_rhs_class (subcode))
6338 {
6339 case GIMPLE_SINGLE_RHS:
6340 {
6341 tree rhs = gimple_assign_rhs1 (stmt);
6342 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6343
6344 if (TREE_CODE (rhs) == SSA_NAME)
6345 {
6346 /* If the RHS is an SSA_NAME, return its known constant value,
6347 if any. */
6348 return (*valueize) (rhs);
6349 }
6350 /* Handle propagating invariant addresses into address
6351 operations. */
6352 else if (TREE_CODE (rhs) == ADDR_EXPR
6353 && !is_gimple_min_invariant (rhs))
6354 {
a90c8804 6355 poly_int64 offset = 0;
cfef45c8
RG
6356 tree base;
6357 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6358 &offset,
6359 valueize);
6360 if (base
6361 && (CONSTANT_CLASS_P (base)
6362 || decl_address_invariant_p (base)))
6363 return build_invariant_address (TREE_TYPE (rhs),
6364 base, offset);
6365 }
6366 else if (TREE_CODE (rhs) == CONSTRUCTOR
6367 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6368 && known_eq (CONSTRUCTOR_NELTS (rhs),
6369 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6370 {
794e3180
RS
6371 unsigned i, nelts;
6372 tree val;
cfef45c8 6373
928686b1 6374 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6375 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6376 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6377 {
6378 val = (*valueize) (val);
6379 if (TREE_CODE (val) == INTEGER_CST
6380 || TREE_CODE (val) == REAL_CST
6381 || TREE_CODE (val) == FIXED_CST)
794e3180 6382 vec.quick_push (val);
cfef45c8
RG
6383 else
6384 return NULL_TREE;
6385 }
6386
5ebaa477 6387 return vec.build ();
cfef45c8 6388 }
bdf37f7a
JH
6389 if (subcode == OBJ_TYPE_REF)
6390 {
6391 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6392 /* If callee is constant, we can fold away the wrapper. */
6393 if (is_gimple_min_invariant (val))
6394 return val;
6395 }
cfef45c8
RG
6396
6397 if (kind == tcc_reference)
6398 {
6399 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6400 || TREE_CODE (rhs) == REALPART_EXPR
6401 || TREE_CODE (rhs) == IMAGPART_EXPR)
6402 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6403 {
6404 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6405 return fold_unary_loc (EXPR_LOCATION (rhs),
6406 TREE_CODE (rhs),
6407 TREE_TYPE (rhs), val);
6408 }
6409 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6410 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6411 {
6412 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6413 return fold_ternary_loc (EXPR_LOCATION (rhs),
6414 TREE_CODE (rhs),
6415 TREE_TYPE (rhs), val,
6416 TREE_OPERAND (rhs, 1),
6417 TREE_OPERAND (rhs, 2));
6418 }
6419 else if (TREE_CODE (rhs) == MEM_REF
6420 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6421 {
6422 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6423 if (TREE_CODE (val) == ADDR_EXPR
6424 && is_gimple_min_invariant (val))
6425 {
6426 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6427 unshare_expr (val),
6428 TREE_OPERAND (rhs, 1));
6429 if (tem)
6430 rhs = tem;
6431 }
6432 }
6433 return fold_const_aggregate_ref_1 (rhs, valueize);
6434 }
6435 else if (kind == tcc_declaration)
6436 return get_symbol_constant_value (rhs);
6437 return rhs;
6438 }
6439
6440 case GIMPLE_UNARY_RHS:
f3582e54 6441 return NULL_TREE;
cfef45c8
RG
6442
6443 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6444 /* Translate &x + CST into an invariant form suitable for
6445 further propagation. */
6446 if (subcode == POINTER_PLUS_EXPR)
6447 {
4b1b9e64
RB
6448 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6449 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6450 if (TREE_CODE (op0) == ADDR_EXPR
6451 && TREE_CODE (op1) == INTEGER_CST)
6452 {
6453 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
6454 return build1_loc
6455 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
6456 fold_build2 (MEM_REF,
6457 TREE_TYPE (TREE_TYPE (op0)),
6458 unshare_expr (op0), off));
6459 }
6460 }
59c20dc7
RB
6461 /* Canonicalize bool != 0 and bool == 0 appearing after
6462 valueization. While gimple_simplify handles this
6463 it can get confused by the ~X == 1 -> X == 0 transform
6464 which we cant reduce to a SSA name or a constant
6465 (and we have no way to tell gimple_simplify to not
6466 consider those transforms in the first place). */
6467 else if (subcode == EQ_EXPR
6468 || subcode == NE_EXPR)
6469 {
6470 tree lhs = gimple_assign_lhs (stmt);
6471 tree op0 = gimple_assign_rhs1 (stmt);
6472 if (useless_type_conversion_p (TREE_TYPE (lhs),
6473 TREE_TYPE (op0)))
6474 {
6475 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6476 op0 = (*valueize) (op0);
8861704d
RB
6477 if (TREE_CODE (op0) == INTEGER_CST)
6478 std::swap (op0, op1);
6479 if (TREE_CODE (op1) == INTEGER_CST
6480 && ((subcode == NE_EXPR && integer_zerop (op1))
6481 || (subcode == EQ_EXPR && integer_onep (op1))))
6482 return op0;
59c20dc7
RB
6483 }
6484 }
4b1b9e64 6485 return NULL_TREE;
cfef45c8
RG
6486
6487 case GIMPLE_TERNARY_RHS:
6488 {
6489 /* Handle ternary operators that can appear in GIMPLE form. */
6490 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6491 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6492 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6493 return fold_ternary_loc (loc, subcode,
6494 gimple_expr_type (stmt), op0, op1, op2);
6495 }
6496
6497 default:
6498 gcc_unreachable ();
6499 }
6500 }
6501
6502 case GIMPLE_CALL:
6503 {
25583c4f 6504 tree fn;
538dd0b7 6505 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6506
6507 if (gimple_call_internal_p (stmt))
31e071ae
MP
6508 {
6509 enum tree_code subcode = ERROR_MARK;
6510 switch (gimple_call_internal_fn (stmt))
6511 {
6512 case IFN_UBSAN_CHECK_ADD:
6513 subcode = PLUS_EXPR;
6514 break;
6515 case IFN_UBSAN_CHECK_SUB:
6516 subcode = MINUS_EXPR;
6517 break;
6518 case IFN_UBSAN_CHECK_MUL:
6519 subcode = MULT_EXPR;
6520 break;
68fa96d6
ML
6521 case IFN_BUILTIN_EXPECT:
6522 {
6523 tree arg0 = gimple_call_arg (stmt, 0);
6524 tree op0 = (*valueize) (arg0);
6525 if (TREE_CODE (op0) == INTEGER_CST)
6526 return op0;
6527 return NULL_TREE;
6528 }
31e071ae
MP
6529 default:
6530 return NULL_TREE;
6531 }
368b454d
JJ
6532 tree arg0 = gimple_call_arg (stmt, 0);
6533 tree arg1 = gimple_call_arg (stmt, 1);
6534 tree op0 = (*valueize) (arg0);
6535 tree op1 = (*valueize) (arg1);
31e071ae
MP
6536
6537 if (TREE_CODE (op0) != INTEGER_CST
6538 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6539 {
6540 switch (subcode)
6541 {
6542 case MULT_EXPR:
6543 /* x * 0 = 0 * x = 0 without overflow. */
6544 if (integer_zerop (op0) || integer_zerop (op1))
6545 return build_zero_cst (TREE_TYPE (arg0));
6546 break;
6547 case MINUS_EXPR:
6548 /* y - y = 0 without overflow. */
6549 if (operand_equal_p (op0, op1, 0))
6550 return build_zero_cst (TREE_TYPE (arg0));
6551 break;
6552 default:
6553 break;
6554 }
6555 }
6556 tree res
6557 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6558 if (res
6559 && TREE_CODE (res) == INTEGER_CST
6560 && !TREE_OVERFLOW (res))
6561 return res;
6562 return NULL_TREE;
6563 }
25583c4f
RS
6564
6565 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 6566 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 6567 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 6568 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
6569 && gimple_builtin_call_types_compatible_p (stmt,
6570 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6571 {
6572 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6573 tree retval;
cfef45c8
RG
6574 unsigned i;
6575 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6576 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6577 retval = fold_builtin_call_array (loc,
538dd0b7 6578 gimple_call_return_type (call_stmt),
cfef45c8 6579 fn, gimple_call_num_args (stmt), args);
cfef45c8 6580 if (retval)
5c944c6c
RB
6581 {
6582 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6583 STRIP_NOPS (retval);
538dd0b7
DM
6584 retval = fold_convert (gimple_call_return_type (call_stmt),
6585 retval);
5c944c6c 6586 }
cfef45c8
RG
6587 return retval;
6588 }
6589 return NULL_TREE;
6590 }
6591
6592 default:
6593 return NULL_TREE;
6594 }
6595}
6596
6597/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6598 Returns NULL_TREE if folding to a constant is not possible, otherwise
6599 returns a constant according to is_gimple_min_invariant. */
6600
6601tree
355fe088 6602gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6603{
6604 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6605 if (res && is_gimple_min_invariant (res))
6606 return res;
6607 return NULL_TREE;
6608}
6609
6610
6611/* The following set of functions are supposed to fold references using
6612 their constant initializers. */
6613
cfef45c8
RG
6614/* See if we can find constructor defining value of BASE.
6615 When we know the consructor with constant offset (such as
6616 base is array[40] and we do know constructor of array), then
6617 BIT_OFFSET is adjusted accordingly.
6618
6619 As a special case, return error_mark_node when constructor
6620 is not explicitly available, but it is known to be zero
6621 such as 'static const int a;'. */
6622static tree
588db50c 6623get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6624 tree (*valueize)(tree))
6625{
588db50c 6626 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6627 bool reverse;
6628
cfef45c8
RG
6629 if (TREE_CODE (base) == MEM_REF)
6630 {
6a5aca53
ML
6631 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6632 if (!boff.to_shwi (bit_offset))
6633 return NULL_TREE;
cfef45c8
RG
6634
6635 if (valueize
6636 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6637 base = valueize (TREE_OPERAND (base, 0));
6638 if (!base || TREE_CODE (base) != ADDR_EXPR)
6639 return NULL_TREE;
6640 base = TREE_OPERAND (base, 0);
6641 }
13e88953
RB
6642 else if (valueize
6643 && TREE_CODE (base) == SSA_NAME)
6644 base = valueize (base);
cfef45c8
RG
6645
6646 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6647 DECL_INITIAL. If BASE is a nested reference into another
6648 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6649 the inner reference. */
6650 switch (TREE_CODE (base))
6651 {
6652 case VAR_DECL:
cfef45c8 6653 case CONST_DECL:
6a6dac52
JH
6654 {
6655 tree init = ctor_for_folding (base);
6656
688010ba 6657 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6658 NULL means unknown, while error_mark_node is 0. */
6659 if (init == error_mark_node)
6660 return NULL_TREE;
6661 if (!init)
6662 return error_mark_node;
6663 return init;
6664 }
cfef45c8 6665
13e88953
RB
6666 case VIEW_CONVERT_EXPR:
6667 return get_base_constructor (TREE_OPERAND (base, 0),
6668 bit_offset, valueize);
6669
cfef45c8
RG
6670 case ARRAY_REF:
6671 case COMPONENT_REF:
ee45a32d
EB
6672 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6673 &reverse);
588db50c 6674 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6675 return NULL_TREE;
6676 *bit_offset += bit_offset2;
6677 return get_base_constructor (base, bit_offset, valueize);
6678
cfef45c8
RG
6679 case CONSTRUCTOR:
6680 return base;
6681
6682 default:
13e88953
RB
6683 if (CONSTANT_CLASS_P (base))
6684 return base;
6685
cfef45c8
RG
6686 return NULL_TREE;
6687 }
6688}
6689
35b4d3a6
MS
6690/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6691 to the memory at bit OFFSET. When non-null, TYPE is the expected
6692 type of the reference; otherwise the type of the referenced element
6693 is used instead. When SIZE is zero, attempt to fold a reference to
6694 the entire element which OFFSET refers to. Increment *SUBOFF by
6695 the bit offset of the accessed element. */
cfef45c8
RG
6696
6697static tree
6698fold_array_ctor_reference (tree type, tree ctor,
6699 unsigned HOST_WIDE_INT offset,
c44c2088 6700 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6701 tree from_decl,
6702 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6703{
807e902e
KZ
6704 offset_int low_bound;
6705 offset_int elt_size;
807e902e 6706 offset_int access_index;
6a636014 6707 tree domain_type = NULL_TREE;
cfef45c8
RG
6708 HOST_WIDE_INT inner_offset;
6709
6710 /* Compute low bound and elt size. */
eb8f1123
RG
6711 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6712 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6713 if (domain_type && TYPE_MIN_VALUE (domain_type))
6714 {
6aa238a1 6715 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
6716 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6717 return NULL_TREE;
807e902e 6718 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6719 }
6720 else
807e902e 6721 low_bound = 0;
6aa238a1 6722 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
6723 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6724 return NULL_TREE;
807e902e 6725 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6726
35b4d3a6 6727 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 6728 access of a multiple of the array element size. Avoid division
6aa238a1
MS
6729 by zero below when ELT_SIZE is zero, such as with the result of
6730 an initializer for a zero-length array or an empty struct. */
6731 if (elt_size == 0
6732 || (type
6733 && (!TYPE_SIZE_UNIT (type)
831e688a 6734 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
6735 return NULL_TREE;
6736
6737 /* Compute the array index we look for. */
807e902e
KZ
6738 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6739 elt_size);
27bcd47c 6740 access_index += low_bound;
cfef45c8
RG
6741
6742 /* And offset within the access. */
27bcd47c 6743 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 6744
3c076c96
JJ
6745 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
6746 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
6747 {
6748 /* native_encode_expr constraints. */
6749 if (size > MAX_BITSIZE_MODE_ANY_MODE
6750 || size % BITS_PER_UNIT != 0
3c076c96
JJ
6751 || inner_offset % BITS_PER_UNIT != 0
6752 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
6753 return NULL_TREE;
6754
6755 unsigned ctor_idx;
6756 tree val = get_array_ctor_element_at_index (ctor, access_index,
6757 &ctor_idx);
6758 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6759 return build_zero_cst (type);
6760
6761 /* native-encode adjacent ctor elements. */
6762 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6763 unsigned bufoff = 0;
6764 offset_int index = 0;
6765 offset_int max_index = access_index;
6766 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6767 if (!val)
6768 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6769 else if (!CONSTANT_CLASS_P (val))
6770 return NULL_TREE;
6771 if (!elt->index)
6772 ;
6773 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6774 {
6775 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6776 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6777 }
6778 else
6779 index = max_index = wi::to_offset (elt->index);
6780 index = wi::umax (index, access_index);
6781 do
6782 {
3c076c96
JJ
6783 if (bufoff + elt_sz > sizeof (buf))
6784 elt_sz = sizeof (buf) - bufoff;
6785 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 6786 inner_offset / BITS_PER_UNIT);
3c076c96 6787 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
6788 return NULL_TREE;
6789 inner_offset = 0;
6790 bufoff += len;
6791
6792 access_index += 1;
6793 if (wi::cmpu (access_index, index) == 0)
6794 val = elt->value;
6795 else if (wi::cmpu (access_index, max_index) > 0)
6796 {
6797 ctor_idx++;
6798 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6799 {
6800 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6801 ++max_index;
6802 }
6803 else
6804 {
6805 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6806 index = 0;
6807 max_index = access_index;
6808 if (!elt->index)
6809 ;
6810 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6811 {
6812 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6813 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6814 }
6815 else
6816 index = max_index = wi::to_offset (elt->index);
6817 index = wi::umax (index, access_index);
6818 if (wi::cmpu (access_index, index) == 0)
6819 val = elt->value;
6820 else
6821 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6822 }
6823 }
6824 }
6825 while (bufoff < size / BITS_PER_UNIT);
6826 *suboff += size;
6827 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6828 }
6829
6a636014 6830 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6831 {
6832 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6833 {
6834 /* For the final reference to the entire accessed element
6835 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6836 may be null) in favor of the type of the element, and set
6837 SIZE to the size of the accessed element. */
6838 inner_offset = 0;
6839 type = TREE_TYPE (val);
6840 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6841 }
6842
6843 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6844 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6845 suboff);
6846 }
cfef45c8 6847
35b4d3a6
MS
6848 /* Memory not explicitly mentioned in constructor is 0 (or
6849 the reference is out of range). */
6850 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6851}
6852
35b4d3a6
MS
6853/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6854 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6855 is the expected type of the reference; otherwise the type of
6856 the referenced member is used instead. When SIZE is zero,
6857 attempt to fold a reference to the entire member which OFFSET
6858 refers to; in this case. Increment *SUBOFF by the bit offset
6859 of the accessed member. */
cfef45c8
RG
6860
6861static tree
6862fold_nonarray_ctor_reference (tree type, tree ctor,
6863 unsigned HOST_WIDE_INT offset,
c44c2088 6864 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6865 tree from_decl,
6866 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6867{
6868 unsigned HOST_WIDE_INT cnt;
6869 tree cfield, cval;
6870
6871 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6872 cval)
6873 {
6874 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6875 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6876 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6877
6878 if (!field_size)
6879 {
6880 /* Determine the size of the flexible array member from
6881 the size of the initializer provided for it. */
6882 field_size = TYPE_SIZE (TREE_TYPE (cval));
6883 }
cfef45c8
RG
6884
6885 /* Variable sized objects in static constructors makes no sense,
6886 but field_size can be NULL for flexible array members. */
6887 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6888 && TREE_CODE (byte_offset) == INTEGER_CST
6889 && (field_size != NULL_TREE
6890 ? TREE_CODE (field_size) == INTEGER_CST
6891 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6892
6893 /* Compute bit offset of the field. */
35b4d3a6
MS
6894 offset_int bitoffset
6895 = (wi::to_offset (field_offset)
6896 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6897 /* Compute bit offset where the field ends. */
35b4d3a6 6898 offset_int bitoffset_end;
cfef45c8 6899 if (field_size != NULL_TREE)
807e902e 6900 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6901 else
807e902e 6902 bitoffset_end = 0;
cfef45c8 6903
35b4d3a6
MS
6904 /* Compute the bit offset of the end of the desired access.
6905 As a special case, if the size of the desired access is
6906 zero, assume the access is to the entire field (and let
6907 the caller make any necessary adjustments by storing
6908 the actual bounds of the field in FIELDBOUNDS). */
6909 offset_int access_end = offset_int (offset);
6910 if (size)
6911 access_end += size;
6912 else
6913 access_end = bitoffset_end;
b8b2b009 6914
35b4d3a6
MS
6915 /* Is there any overlap between the desired access at
6916 [OFFSET, OFFSET+SIZE) and the offset of the field within
6917 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6918 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6919 && (field_size == NULL_TREE
807e902e 6920 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6921 {
35b4d3a6
MS
6922 *suboff += bitoffset.to_uhwi ();
6923
6924 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6925 {
6926 /* For the final reference to the entire accessed member
6927 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6928 be null) in favor of the type of the member, and set
6929 SIZE to the size of the accessed member. */
6930 offset = bitoffset.to_uhwi ();
6931 type = TREE_TYPE (cval);
6932 size = (bitoffset_end - bitoffset).to_uhwi ();
6933 }
6934
6935 /* We do have overlap. Now see if the field is large enough
6936 to cover the access. Give up for accesses that extend
6937 beyond the end of the object or that span multiple fields. */
807e902e 6938 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6939 return NULL_TREE;
032c80e9 6940 if (offset < bitoffset)
b8b2b009 6941 return NULL_TREE;
35b4d3a6
MS
6942
6943 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6944 return fold_ctor_reference (type, cval,
27bcd47c 6945 inner_offset.to_uhwi (), size,
35b4d3a6 6946 from_decl, suboff);
cfef45c8
RG
6947 }
6948 }
14b7950f
MS
6949
6950 if (!type)
6951 return NULL_TREE;
6952
6953 return build_zero_cst (type);
cfef45c8
RG
6954}
6955
35b4d3a6 6956/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 6957 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
6958 is zero, attempt to fold a reference to the entire subobject
6959 which OFFSET refers to. This is used when folding accesses to
6960 string members of aggregates. When non-null, set *SUBOFF to
6961 the bit offset of the accessed subobject. */
cfef45c8 6962
8403c2cf 6963tree
35b4d3a6
MS
6964fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6965 const poly_uint64 &poly_size, tree from_decl,
6966 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
6967{
6968 tree ret;
6969
6970 /* We found the field with exact match. */
35b4d3a6
MS
6971 if (type
6972 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 6973 && known_eq (poly_offset, 0U))
9d60be38 6974 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6975
30acf282
RS
6976 /* The remaining optimizations need a constant size and offset. */
6977 unsigned HOST_WIDE_INT size, offset;
6978 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6979 return NULL_TREE;
6980
cfef45c8
RG
6981 /* We are at the end of walk, see if we can view convert the
6982 result. */
6983 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6984 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
6985 && !compare_tree_int (TYPE_SIZE (type), size)
6986 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 6987 {
9d60be38 6988 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 6989 if (ret)
672d9f8e
RB
6990 {
6991 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6992 if (ret)
6993 STRIP_USELESS_TYPE_CONVERSION (ret);
6994 }
cfef45c8
RG
6995 return ret;
6996 }
b2505143
RB
6997 /* For constants and byte-aligned/sized reads try to go through
6998 native_encode/interpret. */
6999 if (CONSTANT_CLASS_P (ctor)
7000 && BITS_PER_UNIT == 8
7001 && offset % BITS_PER_UNIT == 0
ea69031c 7002 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 7003 && size % BITS_PER_UNIT == 0
ea69031c
JJ
7004 && size <= MAX_BITSIZE_MODE_ANY_MODE
7005 && can_native_interpret_type_p (type))
b2505143
RB
7006 {
7007 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
7008 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7009 offset / BITS_PER_UNIT);
7010 if (len > 0)
7011 return native_interpret_expr (type, buf, len);
b2505143 7012 }
cfef45c8
RG
7013 if (TREE_CODE (ctor) == CONSTRUCTOR)
7014 {
35b4d3a6
MS
7015 unsigned HOST_WIDE_INT dummy = 0;
7016 if (!suboff)
7017 suboff = &dummy;
cfef45c8 7018
ea69031c 7019 tree ret;
eb8f1123
RG
7020 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7021 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
7022 ret = fold_array_ctor_reference (type, ctor, offset, size,
7023 from_decl, suboff);
7024 else
7025 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7026 from_decl, suboff);
7027
7028 /* Fall back to native_encode_initializer. Needs to be done
7029 only in the outermost fold_ctor_reference call (because it itself
7030 recurses into CONSTRUCTORs) and doesn't update suboff. */
7031 if (ret == NULL_TREE
7032 && suboff == &dummy
7033 && BITS_PER_UNIT == 8
7034 && offset % BITS_PER_UNIT == 0
7035 && offset / BITS_PER_UNIT <= INT_MAX
7036 && size % BITS_PER_UNIT == 0
7037 && size <= MAX_BITSIZE_MODE_ANY_MODE
7038 && can_native_interpret_type_p (type))
7039 {
7040 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7041 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7042 offset / BITS_PER_UNIT);
7043 if (len > 0)
7044 return native_interpret_expr (type, buf, len);
7045 }
35b4d3a6 7046
ea69031c 7047 return ret;
cfef45c8
RG
7048 }
7049
7050 return NULL_TREE;
7051}
7052
7053/* Return the tree representing the element referenced by T if T is an
7054 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7055 names using VALUEIZE. Return NULL_TREE otherwise. */
7056
7057tree
7058fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7059{
7060 tree ctor, idx, base;
588db50c 7061 poly_int64 offset, size, max_size;
cfef45c8 7062 tree tem;
ee45a32d 7063 bool reverse;
cfef45c8 7064
f8a7df45
RG
7065 if (TREE_THIS_VOLATILE (t))
7066 return NULL_TREE;
7067
3a65ee74 7068 if (DECL_P (t))
cfef45c8
RG
7069 return get_symbol_constant_value (t);
7070
7071 tem = fold_read_from_constant_string (t);
7072 if (tem)
7073 return tem;
7074
7075 switch (TREE_CODE (t))
7076 {
7077 case ARRAY_REF:
7078 case ARRAY_RANGE_REF:
7079 /* Constant indexes are handled well by get_base_constructor.
7080 Only special case variable offsets.
7081 FIXME: This code can't handle nested references with variable indexes
7082 (they will be handled only by iteration of ccp). Perhaps we can bring
7083 get_ref_base_and_extent here and make it use a valueize callback. */
7084 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7085 && valueize
7086 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 7087 && poly_int_tree_p (idx))
cfef45c8
RG
7088 {
7089 tree low_bound, unit_size;
7090
7091 /* If the resulting bit-offset is constant, track it. */
7092 if ((low_bound = array_ref_low_bound (t),
588db50c 7093 poly_int_tree_p (low_bound))
cfef45c8 7094 && (unit_size = array_ref_element_size (t),
807e902e 7095 tree_fits_uhwi_p (unit_size)))
cfef45c8 7096 {
588db50c
RS
7097 poly_offset_int woffset
7098 = wi::sext (wi::to_poly_offset (idx)
7099 - wi::to_poly_offset (low_bound),
807e902e 7100 TYPE_PRECISION (TREE_TYPE (idx)));
a9e6359a
RB
7101 woffset *= tree_to_uhwi (unit_size);
7102 woffset *= BITS_PER_UNIT;
588db50c 7103 if (woffset.to_shwi (&offset))
807e902e 7104 {
807e902e
KZ
7105 base = TREE_OPERAND (t, 0);
7106 ctor = get_base_constructor (base, &offset, valueize);
7107 /* Empty constructor. Always fold to 0. */
7108 if (ctor == error_mark_node)
7109 return build_zero_cst (TREE_TYPE (t));
7110 /* Out of bound array access. Value is undefined,
7111 but don't fold. */
588db50c 7112 if (maybe_lt (offset, 0))
807e902e 7113 return NULL_TREE;
67914693 7114 /* We cannot determine ctor. */
807e902e
KZ
7115 if (!ctor)
7116 return NULL_TREE;
7117 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7118 tree_to_uhwi (unit_size)
7119 * BITS_PER_UNIT,
7120 base);
7121 }
cfef45c8
RG
7122 }
7123 }
7124 /* Fallthru. */
7125
7126 case COMPONENT_REF:
7127 case BIT_FIELD_REF:
7128 case TARGET_MEM_REF:
7129 case MEM_REF:
ee45a32d 7130 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
7131 ctor = get_base_constructor (base, &offset, valueize);
7132
7133 /* Empty constructor. Always fold to 0. */
7134 if (ctor == error_mark_node)
7135 return build_zero_cst (TREE_TYPE (t));
7136 /* We do not know precise address. */
588db50c 7137 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 7138 return NULL_TREE;
67914693 7139 /* We cannot determine ctor. */
cfef45c8
RG
7140 if (!ctor)
7141 return NULL_TREE;
7142
7143 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 7144 if (maybe_lt (offset, 0))
cfef45c8
RG
7145 return NULL_TREE;
7146
c44c2088
JH
7147 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7148 base);
cfef45c8
RG
7149
7150 case REALPART_EXPR:
7151 case IMAGPART_EXPR:
7152 {
7153 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7154 if (c && TREE_CODE (c) == COMPLEX_CST)
7155 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 7156 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
7157 break;
7158 }
7159
7160 default:
7161 break;
7162 }
7163
7164 return NULL_TREE;
7165}
7166
7167tree
7168fold_const_aggregate_ref (tree t)
7169{
7170 return fold_const_aggregate_ref_1 (t, NULL);
7171}
06bc3ec7 7172
85942f45 7173/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
7174 at OFFSET.
7175 Set CAN_REFER if non-NULL to false if method
7176 is not referable or if the virtual table is ill-formed (such as rewriten
7177 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
7178
7179tree
85942f45
JH
7180gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7181 tree v,
ec77d61f
JH
7182 unsigned HOST_WIDE_INT offset,
7183 bool *can_refer)
81fa35bd 7184{
85942f45
JH
7185 tree vtable = v, init, fn;
7186 unsigned HOST_WIDE_INT size;
8c311b50
JH
7187 unsigned HOST_WIDE_INT elt_size, access_index;
7188 tree domain_type;
81fa35bd 7189
ec77d61f
JH
7190 if (can_refer)
7191 *can_refer = true;
7192
9de2f554 7193 /* First of all double check we have virtual table. */
8813a647 7194 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 7195 {
ec77d61f
JH
7196 /* Pass down that we lost track of the target. */
7197 if (can_refer)
7198 *can_refer = false;
7199 return NULL_TREE;
7200 }
9de2f554 7201
2aa3da06
JH
7202 init = ctor_for_folding (v);
7203
9de2f554 7204 /* The virtual tables should always be born with constructors
2aa3da06
JH
7205 and we always should assume that they are avaialble for
7206 folding. At the moment we do not stream them in all cases,
7207 but it should never happen that ctor seem unreachable. */
7208 gcc_assert (init);
7209 if (init == error_mark_node)
7210 {
ec77d61f
JH
7211 /* Pass down that we lost track of the target. */
7212 if (can_refer)
7213 *can_refer = false;
2aa3da06
JH
7214 return NULL_TREE;
7215 }
81fa35bd 7216 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 7217 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 7218 offset *= BITS_PER_UNIT;
81fa35bd 7219 offset += token * size;
9de2f554 7220
8c311b50
JH
7221 /* Lookup the value in the constructor that is assumed to be array.
7222 This is equivalent to
7223 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7224 offset, size, NULL);
7225 but in a constant time. We expect that frontend produced a simple
7226 array without indexed initializers. */
7227
7228 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7229 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7230 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7231 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7232
7233 access_index = offset / BITS_PER_UNIT / elt_size;
7234 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7235
bf8d8309
MP
7236 /* The C++ FE can now produce indexed fields, and we check if the indexes
7237 match. */
8c311b50
JH
7238 if (access_index < CONSTRUCTOR_NELTS (init))
7239 {
7240 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
7241 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7242 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
7243 STRIP_NOPS (fn);
7244 }
7245 else
7246 fn = NULL;
9de2f554
JH
7247
7248 /* For type inconsistent program we may end up looking up virtual method
7249 in virtual table that does not contain TOKEN entries. We may overrun
7250 the virtual table and pick up a constant or RTTI info pointer.
7251 In any case the call is undefined. */
7252 if (!fn
7253 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7254 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7255 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7256 else
7257 {
7258 fn = TREE_OPERAND (fn, 0);
7259
7260 /* When cgraph node is missing and function is not public, we cannot
7261 devirtualize. This can happen in WHOPR when the actual method
7262 ends up in other partition, because we found devirtualization
7263 possibility too late. */
7264 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
7265 {
7266 if (can_refer)
7267 {
7268 *can_refer = false;
7269 return fn;
7270 }
7271 return NULL_TREE;
7272 }
9de2f554 7273 }
81fa35bd 7274
7501ca28
RG
7275 /* Make sure we create a cgraph node for functions we'll reference.
7276 They can be non-existent if the reference comes from an entry
7277 of an external vtable for example. */
d52f5295 7278 cgraph_node::get_create (fn);
7501ca28 7279
81fa35bd
MJ
7280 return fn;
7281}
7282
85942f45
JH
7283/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7284 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7285 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
7286 OBJ_TYPE_REF_OBJECT(REF).
7287 Set CAN_REFER if non-NULL to false if method
7288 is not referable or if the virtual table is ill-formed (such as rewriten
7289 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
7290
7291tree
ec77d61f
JH
7292gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7293 bool *can_refer)
85942f45
JH
7294{
7295 unsigned HOST_WIDE_INT offset;
7296 tree v;
7297
7298 v = BINFO_VTABLE (known_binfo);
7299 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7300 if (!v)
7301 return NULL_TREE;
7302
7303 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
7304 {
7305 if (can_refer)
7306 *can_refer = false;
7307 return NULL_TREE;
7308 }
7309 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
7310}
7311
737f500a
RB
7312/* Given a pointer value T, return a simplified version of an
7313 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
7314 possible. Note that the resulting type may be different from
7315 the type pointed to in the sense that it is still compatible
7316 from the langhooks point of view. */
7317
7318tree
7319gimple_fold_indirect_ref (tree t)
7320{
7321 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7322 tree sub = t;
7323 tree subtype;
7324
7325 STRIP_NOPS (sub);
7326 subtype = TREE_TYPE (sub);
737f500a
RB
7327 if (!POINTER_TYPE_P (subtype)
7328 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7329 return NULL_TREE;
7330
7331 if (TREE_CODE (sub) == ADDR_EXPR)
7332 {
7333 tree op = TREE_OPERAND (sub, 0);
7334 tree optype = TREE_TYPE (op);
7335 /* *&p => p */
7336 if (useless_type_conversion_p (type, optype))
7337 return op;
7338
7339 /* *(foo *)&fooarray => fooarray[0] */
7340 if (TREE_CODE (optype) == ARRAY_TYPE
7341 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7342 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7343 {
7344 tree type_domain = TYPE_DOMAIN (optype);
7345 tree min_val = size_zero_node;
7346 if (type_domain && TYPE_MIN_VALUE (type_domain))
7347 min_val = TYPE_MIN_VALUE (type_domain);
7348 if (TREE_CODE (min_val) == INTEGER_CST)
7349 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7350 }
7351 /* *(foo *)&complexfoo => __real__ complexfoo */
7352 else if (TREE_CODE (optype) == COMPLEX_TYPE
7353 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7354 return fold_build1 (REALPART_EXPR, type, op);
7355 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7356 else if (TREE_CODE (optype) == VECTOR_TYPE
7357 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7358 {
7359 tree part_width = TYPE_SIZE (type);
7360 tree index = bitsize_int (0);
7361 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7362 }
7363 }
7364
7365 /* *(p + CST) -> ... */
7366 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7367 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7368 {
7369 tree addr = TREE_OPERAND (sub, 0);
7370 tree off = TREE_OPERAND (sub, 1);
7371 tree addrtype;
7372
7373 STRIP_NOPS (addr);
7374 addrtype = TREE_TYPE (addr);
7375
7376 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7377 if (TREE_CODE (addr) == ADDR_EXPR
7378 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7379 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7380 && tree_fits_uhwi_p (off))
b184c8f1 7381 {
ae7e9ddd 7382 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7383 tree part_width = TYPE_SIZE (type);
7384 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7385 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7386 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7387 tree index = bitsize_int (indexi);
928686b1
RS
7388 if (known_lt (offset / part_widthi,
7389 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7390 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7391 part_width, index);
7392 }
7393
7394 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7395 if (TREE_CODE (addr) == ADDR_EXPR
7396 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7397 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7398 {
7399 tree size = TYPE_SIZE_UNIT (type);
7400 if (tree_int_cst_equal (size, off))
7401 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7402 }
7403
7404 /* *(p + CST) -> MEM_REF <p, CST>. */
7405 if (TREE_CODE (addr) != ADDR_EXPR
7406 || DECL_P (TREE_OPERAND (addr, 0)))
7407 return fold_build2 (MEM_REF, type,
7408 addr,
8e6cdc90 7409 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7410 }
7411
7412 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7413 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7414 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7415 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7416 {
7417 tree type_domain;
7418 tree min_val = size_zero_node;
7419 tree osub = sub;
7420 sub = gimple_fold_indirect_ref (sub);
7421 if (! sub)
7422 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7423 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7424 if (type_domain && TYPE_MIN_VALUE (type_domain))
7425 min_val = TYPE_MIN_VALUE (type_domain);
7426 if (TREE_CODE (min_val) == INTEGER_CST)
7427 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7428 }
7429
7430 return NULL_TREE;
7431}
19e51b40
JJ
7432
7433/* Return true if CODE is an operation that when operating on signed
7434 integer types involves undefined behavior on overflow and the
7435 operation can be expressed with unsigned arithmetic. */
7436
7437bool
7438arith_code_with_undefined_signed_overflow (tree_code code)
7439{
7440 switch (code)
7441 {
8e2c037d 7442 case ABS_EXPR:
19e51b40
JJ
7443 case PLUS_EXPR:
7444 case MINUS_EXPR:
7445 case MULT_EXPR:
7446 case NEGATE_EXPR:
7447 case POINTER_PLUS_EXPR:
7448 return true;
7449 default:
7450 return false;
7451 }
7452}
7453
7454/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7455 operation that can be transformed to unsigned arithmetic by converting
7456 its operand, carrying out the operation in the corresponding unsigned
7457 type and converting the result back to the original type.
7458
7459 Returns a sequence of statements that replace STMT and also contain
7460 a modified form of STMT itself. */
7461
7462gimple_seq
355fe088 7463rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7464{
7465 if (dump_file && (dump_flags & TDF_DETAILS))
7466 {
7467 fprintf (dump_file, "rewriting stmt with undefined signed "
7468 "overflow ");
7469 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7470 }
7471
7472 tree lhs = gimple_assign_lhs (stmt);
7473 tree type = unsigned_type_for (TREE_TYPE (lhs));
7474 gimple_seq stmts = NULL;
8e2c037d
RB
7475 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7476 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7477 else
7478 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7479 {
7480 tree op = gimple_op (stmt, i);
7481 op = gimple_convert (&stmts, type, op);
7482 gimple_set_op (stmt, i, op);
7483 }
19e51b40
JJ
7484 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7485 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7486 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 7487 gimple_set_modified (stmt, true);
19e51b40 7488 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7489 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7490 gimple_seq_add_stmt (&stmts, cvt);
7491
7492 return stmts;
7493}
d4f5cd5e 7494
3d2cf79f 7495
c26de36d
RB
7496/* The valueization hook we use for the gimple_build API simplification.
7497 This makes us match fold_buildN behavior by only combining with
7498 statements in the sequence(s) we are currently building. */
7499
7500static tree
7501gimple_build_valueize (tree op)
7502{
7503 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7504 return op;
7505 return NULL_TREE;
7506}
7507
3d2cf79f 7508/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7509 simplifying it first if possible. Returns the built
3d2cf79f
RB
7510 expression value and appends statements possibly defining it
7511 to SEQ. */
7512
7513tree
7514gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7515 enum tree_code code, tree type, tree op0)
3d2cf79f 7516{
c26de36d 7517 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7518 if (!res)
7519 {
a15ebbcd 7520 res = create_tmp_reg_or_ssa_name (type);
355fe088 7521 gimple *stmt;
3d2cf79f
RB
7522 if (code == REALPART_EXPR
7523 || code == IMAGPART_EXPR
7524 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7525 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7526 else
0d0e4a03 7527 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7528 gimple_set_location (stmt, loc);
7529 gimple_seq_add_stmt_without_update (seq, stmt);
7530 }
7531 return res;
7532}
7533
7534/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7535 simplifying it first if possible. Returns the built
3d2cf79f
RB
7536 expression value and appends statements possibly defining it
7537 to SEQ. */
7538
7539tree
7540gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7541 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7542{
c26de36d 7543 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7544 if (!res)
7545 {
a15ebbcd 7546 res = create_tmp_reg_or_ssa_name (type);
355fe088 7547 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7548 gimple_set_location (stmt, loc);
7549 gimple_seq_add_stmt_without_update (seq, stmt);
7550 }
7551 return res;
7552}
7553
7554/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7555 simplifying it first if possible. Returns the built
3d2cf79f
RB
7556 expression value and appends statements possibly defining it
7557 to SEQ. */
7558
7559tree
7560gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7561 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7562{
7563 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7564 seq, gimple_build_valueize);
3d2cf79f
RB
7565 if (!res)
7566 {
a15ebbcd 7567 res = create_tmp_reg_or_ssa_name (type);
355fe088 7568 gimple *stmt;
3d2cf79f 7569 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7570 stmt = gimple_build_assign (res, code,
7571 build3 (code, type, op0, op1, op2));
3d2cf79f 7572 else
0d0e4a03 7573 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7574 gimple_set_location (stmt, loc);
7575 gimple_seq_add_stmt_without_update (seq, stmt);
7576 }
7577 return res;
7578}
7579
7580/* Build the call FN (ARG0) with a result of type TYPE
7581 (or no result if TYPE is void) with location LOC,
c26de36d 7582 simplifying it first if possible. Returns the built
3d2cf79f
RB
7583 expression value (or NULL_TREE if TYPE is void) and appends
7584 statements possibly defining it to SEQ. */
7585
7586tree
eb69361d
RS
7587gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7588 tree type, tree arg0)
3d2cf79f 7589{
c26de36d 7590 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7591 if (!res)
7592 {
eb69361d
RS
7593 gcall *stmt;
7594 if (internal_fn_p (fn))
7595 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7596 else
7597 {
7598 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7599 stmt = gimple_build_call (decl, 1, arg0);
7600 }
3d2cf79f
RB
7601 if (!VOID_TYPE_P (type))
7602 {
a15ebbcd 7603 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7604 gimple_call_set_lhs (stmt, res);
7605 }
7606 gimple_set_location (stmt, loc);
7607 gimple_seq_add_stmt_without_update (seq, stmt);
7608 }
7609 return res;
7610}
7611
7612/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7613 (or no result if TYPE is void) with location LOC,
c26de36d 7614 simplifying it first if possible. Returns the built
3d2cf79f
RB
7615 expression value (or NULL_TREE if TYPE is void) and appends
7616 statements possibly defining it to SEQ. */
7617
7618tree
eb69361d
RS
7619gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7620 tree type, tree arg0, tree arg1)
3d2cf79f 7621{
c26de36d 7622 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7623 if (!res)
7624 {
eb69361d
RS
7625 gcall *stmt;
7626 if (internal_fn_p (fn))
7627 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7628 else
7629 {
7630 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7631 stmt = gimple_build_call (decl, 2, arg0, arg1);
7632 }
3d2cf79f
RB
7633 if (!VOID_TYPE_P (type))
7634 {
a15ebbcd 7635 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7636 gimple_call_set_lhs (stmt, res);
7637 }
7638 gimple_set_location (stmt, loc);
7639 gimple_seq_add_stmt_without_update (seq, stmt);
7640 }
7641 return res;
7642}
7643
7644/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7645 (or no result if TYPE is void) with location LOC,
c26de36d 7646 simplifying it first if possible. Returns the built
3d2cf79f
RB
7647 expression value (or NULL_TREE if TYPE is void) and appends
7648 statements possibly defining it to SEQ. */
7649
7650tree
eb69361d
RS
7651gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7652 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7653{
c26de36d
RB
7654 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7655 seq, gimple_build_valueize);
3d2cf79f
RB
7656 if (!res)
7657 {
eb69361d
RS
7658 gcall *stmt;
7659 if (internal_fn_p (fn))
7660 stmt = gimple_build_call_internal (as_internal_fn (fn),
7661 3, arg0, arg1, arg2);
7662 else
7663 {
7664 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7665 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7666 }
3d2cf79f
RB
7667 if (!VOID_TYPE_P (type))
7668 {
a15ebbcd 7669 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7670 gimple_call_set_lhs (stmt, res);
7671 }
7672 gimple_set_location (stmt, loc);
7673 gimple_seq_add_stmt_without_update (seq, stmt);
7674 }
7675 return res;
7676}
7677
7678/* Build the conversion (TYPE) OP with a result of type TYPE
7679 with location LOC if such conversion is neccesary in GIMPLE,
7680 simplifying it first.
7681 Returns the built expression value and appends
7682 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7683
7684tree
7685gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7686{
7687 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7688 return op;
3d2cf79f 7689 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7690}
68e57f04 7691
74e3c262
RB
7692/* Build the conversion (ptrofftype) OP with a result of a type
7693 compatible with ptrofftype with location LOC if such conversion
7694 is neccesary in GIMPLE, simplifying it first.
7695 Returns the built expression value and appends
7696 statements possibly defining it to SEQ. */
7697
7698tree
7699gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7700{
7701 if (ptrofftype_p (TREE_TYPE (op)))
7702 return op;
7703 return gimple_convert (seq, loc, sizetype, op);
7704}
7705
e7c45b66
RS
7706/* Build a vector of type TYPE in which each element has the value OP.
7707 Return a gimple value for the result, appending any new statements
7708 to SEQ. */
7709
7710tree
7711gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7712 tree op)
7713{
928686b1
RS
7714 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7715 && !CONSTANT_CLASS_P (op))
7716 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7717
e7c45b66
RS
7718 tree res, vec = build_vector_from_val (type, op);
7719 if (is_gimple_val (vec))
7720 return vec;
7721 if (gimple_in_ssa_p (cfun))
7722 res = make_ssa_name (type);
7723 else
7724 res = create_tmp_reg (type);
7725 gimple *stmt = gimple_build_assign (res, vec);
7726 gimple_set_location (stmt, loc);
7727 gimple_seq_add_stmt_without_update (seq, stmt);
7728 return res;
7729}
7730
abe73c3d
RS
7731/* Build a vector from BUILDER, handling the case in which some elements
7732 are non-constant. Return a gimple value for the result, appending any
7733 new instructions to SEQ.
7734
7735 BUILDER must not have a stepped encoding on entry. This is because
7736 the function is not geared up to handle the arithmetic that would
7737 be needed in the variable case, and any code building a vector that
7738 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7739
7740tree
abe73c3d
RS
7741gimple_build_vector (gimple_seq *seq, location_t loc,
7742 tree_vector_builder *builder)
e7c45b66 7743{
abe73c3d
RS
7744 gcc_assert (builder->nelts_per_pattern () <= 2);
7745 unsigned int encoded_nelts = builder->encoded_nelts ();
7746 for (unsigned int i = 0; i < encoded_nelts; ++i)
7747 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7748 {
abe73c3d 7749 tree type = builder->type ();
928686b1 7750 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7751 vec<constructor_elt, va_gc> *v;
7752 vec_alloc (v, nelts);
7753 for (i = 0; i < nelts; ++i)
abe73c3d 7754 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7755
7756 tree res;
7757 if (gimple_in_ssa_p (cfun))
7758 res = make_ssa_name (type);
7759 else
7760 res = create_tmp_reg (type);
7761 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7762 gimple_set_location (stmt, loc);
7763 gimple_seq_add_stmt_without_update (seq, stmt);
7764 return res;
7765 }
abe73c3d 7766 return builder->build ();
e7c45b66
RS
7767}
7768
68e57f04
RS
7769/* Return true if the result of assignment STMT is known to be non-negative.
7770 If the return value is based on the assumption that signed overflow is
7771 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7772 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7773
7774static bool
7775gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7776 int depth)
7777{
7778 enum tree_code code = gimple_assign_rhs_code (stmt);
7779 switch (get_gimple_rhs_class (code))
7780 {
7781 case GIMPLE_UNARY_RHS:
7782 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7783 gimple_expr_type (stmt),
7784 gimple_assign_rhs1 (stmt),
7785 strict_overflow_p, depth);
7786 case GIMPLE_BINARY_RHS:
7787 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7788 gimple_expr_type (stmt),
7789 gimple_assign_rhs1 (stmt),
7790 gimple_assign_rhs2 (stmt),
7791 strict_overflow_p, depth);
7792 case GIMPLE_TERNARY_RHS:
7793 return false;
7794 case GIMPLE_SINGLE_RHS:
7795 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7796 strict_overflow_p, depth);
7797 case GIMPLE_INVALID_RHS:
7798 break;
7799 }
7800 gcc_unreachable ();
7801}
7802
7803/* Return true if return value of call STMT is known to be non-negative.
7804 If the return value is based on the assumption that signed overflow is
7805 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7806 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7807
7808static bool
7809gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7810 int depth)
7811{
7812 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7813 gimple_call_arg (stmt, 0) : NULL_TREE;
7814 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7815 gimple_call_arg (stmt, 1) : NULL_TREE;
7816
7817 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7818 gimple_call_combined_fn (stmt),
68e57f04
RS
7819 arg0,
7820 arg1,
7821 strict_overflow_p, depth);
7822}
7823
4534c203
RB
7824/* Return true if return value of call STMT is known to be non-negative.
7825 If the return value is based on the assumption that signed overflow is
7826 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7827 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7828
7829static bool
7830gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7831 int depth)
7832{
7833 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7834 {
7835 tree arg = gimple_phi_arg_def (stmt, i);
7836 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7837 return false;
7838 }
7839 return true;
7840}
7841
68e57f04
RS
7842/* Return true if STMT is known to compute a non-negative value.
7843 If the return value is based on the assumption that signed overflow is
7844 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7845 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7846
7847bool
7848gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7849 int depth)
7850{
7851 switch (gimple_code (stmt))
7852 {
7853 case GIMPLE_ASSIGN:
7854 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7855 depth);
7856 case GIMPLE_CALL:
7857 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7858 depth);
4534c203
RB
7859 case GIMPLE_PHI:
7860 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7861 depth);
68e57f04
RS
7862 default:
7863 return false;
7864 }
7865}
67dbe582
RS
7866
7867/* Return true if the floating-point value computed by assignment STMT
7868 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7869 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7870
7871 DEPTH is the current nesting depth of the query. */
7872
7873static bool
7874gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7875{
7876 enum tree_code code = gimple_assign_rhs_code (stmt);
7877 switch (get_gimple_rhs_class (code))
7878 {
7879 case GIMPLE_UNARY_RHS:
7880 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7881 gimple_assign_rhs1 (stmt), depth);
7882 case GIMPLE_BINARY_RHS:
7883 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7884 gimple_assign_rhs1 (stmt),
7885 gimple_assign_rhs2 (stmt), depth);
7886 case GIMPLE_TERNARY_RHS:
7887 return false;
7888 case GIMPLE_SINGLE_RHS:
7889 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7890 case GIMPLE_INVALID_RHS:
7891 break;
7892 }
7893 gcc_unreachable ();
7894}
7895
7896/* Return true if the floating-point value computed by call STMT is known
7897 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7898 considered integer values. Return false for signaling NaN.
67dbe582
RS
7899
7900 DEPTH is the current nesting depth of the query. */
7901
7902static bool
7903gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7904{
7905 tree arg0 = (gimple_call_num_args (stmt) > 0
7906 ? gimple_call_arg (stmt, 0)
7907 : NULL_TREE);
7908 tree arg1 = (gimple_call_num_args (stmt) > 1
7909 ? gimple_call_arg (stmt, 1)
7910 : NULL_TREE);
1d9da71f 7911 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
7912 arg0, arg1, depth);
7913}
7914
7915/* Return true if the floating-point result of phi STMT is known to have
7916 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 7917 integer values. Return false for signaling NaN.
67dbe582
RS
7918
7919 DEPTH is the current nesting depth of the query. */
7920
7921static bool
7922gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7923{
7924 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7925 {
7926 tree arg = gimple_phi_arg_def (stmt, i);
7927 if (!integer_valued_real_single_p (arg, depth + 1))
7928 return false;
7929 }
7930 return true;
7931}
7932
7933/* Return true if the floating-point value computed by STMT is known
7934 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 7935 considered integer values. Return false for signaling NaN.
67dbe582
RS
7936
7937 DEPTH is the current nesting depth of the query. */
7938
7939bool
7940gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7941{
7942 switch (gimple_code (stmt))
7943 {
7944 case GIMPLE_ASSIGN:
7945 return gimple_assign_integer_valued_real_p (stmt, depth);
7946 case GIMPLE_CALL:
7947 return gimple_call_integer_valued_real_p (stmt, depth);
7948 case GIMPLE_PHI:
7949 return gimple_phi_integer_valued_real_p (stmt, depth);
7950 default:
7951 return false;
7952 }
7953}