]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
analyzer: fix initialization from constant pool [PR96609,PR96616]
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
8d9254fc 2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
e7868dc6 68#include "varasm.h"
cbdd87d4 69
598f7235
MS
70enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
598f7235
MS
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83};
84
03c4a945
MS
85static bool
86get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 87
b3b9f3d0 88/* Return true when DECL can be referenced from current unit.
c44c2088
JH
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
1389294c 92
1389294c
JH
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
3e89949e 105 we devirtualize only during final compilation stage.
b3b9f3d0
JH
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
1389294c 110static bool
c44c2088 111can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 112{
2c8326a5 113 varpool_node *vnode;
1389294c 114 struct cgraph_node *node;
5e20cdc9 115 symtab_node *snode;
c44c2088 116
00de328a 117 if (DECL_ABSTRACT_P (decl))
1632a686
JH
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 122 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
123 return true;
124
d4babd37
JM
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
1632a686 128 {
d4babd37
JM
129 if (DECL_EXTERNAL (decl))
130 return false;
3aaf0529
JH
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
3dafb85c 133 if (symtab->function_flags_ready)
3aaf0529 134 return true;
d52f5295 135 snode = symtab_node::get (decl);
3aaf0529 136 if (!snode || !snode->definition)
1632a686 137 return false;
7de90a6c 138 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 139 return !node || !node->inlined_to;
1632a686
JH
140 }
141
6da8be89 142 /* We will later output the initializer, so we can refer to it.
c44c2088 143 So we are concerned only when DECL comes from initializer of
3aaf0529 144 external var or var that has been optimized out. */
c44c2088 145 if (!from_decl
8813a647 146 || !VAR_P (from_decl)
3aaf0529 147 || (!DECL_EXTERNAL (from_decl)
9041d2e6 148 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 149 && vnode->definition)
6da8be89 150 || (flag_ltrans
9041d2e6 151 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 152 && vnode->in_other_partition))
c44c2088 153 return true;
c44c2088
JH
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
a33a931b 159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 161 return false;
b3b9f3d0
JH
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
3aaf0529
JH
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
170
171 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 172 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
3dafb85c 178 if (!symtab->function_flags_ready)
b3b9f3d0 179 return true;
c44c2088 180
d52f5295 181 snode = symtab_node::get (decl);
3aaf0529
JH
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 188 return !node || !node->inlined_to;
1389294c
JH
189}
190
a15ebbcd
ML
191/* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
edc19e03
WS
195tree
196create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
197{
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202}
203
0038d4e0 204/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
207
208tree
c44c2088 209canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 210{
37f808c4
RB
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
50619002
EB
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
315f5f1b
RG
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 218 {
315f5f1b
RG
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
17f39a39
JH
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
5a27a197
RG
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
5a27a197
RG
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
239 if (!base)
240 return NULL_TREE;
b3b9f3d0 241
8813a647 242 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 243 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 244 return NULL_TREE;
13f92e8d
JJ
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
8813a647 247 if (VAR_P (base))
46eb666a 248 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
249 else if (TREE_CODE (base) == FUNCTION_DECL)
250 {
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
d52f5295 254 cgraph_node::get_create (base);
7501ca28 255 }
0038d4e0 256 /* Fixup types in global initializers. */
73aef89e
RG
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
259
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
17f39a39 263 }
37f808c4
RB
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
266 {
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
272 }
50619002 273 return orig_cval;
17f39a39 274}
cbdd87d4
RG
275
276/* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
278
279tree
280get_symbol_constant_value (tree sym)
281{
6a6dac52
JH
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
cbdd87d4 284 {
cbdd87d4
RG
285 if (val)
286 {
9d60be38 287 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 288 if (val && is_gimple_min_invariant (val))
17f39a39 289 return val;
1389294c
JH
290 else
291 return NULL_TREE;
cbdd87d4
RG
292 }
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
296 if (!val
b8a8c472 297 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 298 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
299 }
300
301 return NULL_TREE;
302}
303
304
cbdd87d4
RG
305
306/* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
310
311static tree
312maybe_fold_reference (tree expr, bool is_lhs)
313{
17f39a39 314 tree result;
cbdd87d4 315
f0eddb90
RG
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332
f0eddb90
RG
333 if (!is_lhs
334 && (result = fold_const_aggregate_ref (expr))
335 && is_gimple_min_invariant (result))
336 return result;
cbdd87d4 337
cbdd87d4
RG
338 return NULL_TREE;
339}
340
341
342/* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
345 folded. */
346
347static tree
348fold_gimple_assign (gimple_stmt_iterator *si)
349{
355fe088 350 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
352 location_t loc = gimple_location (stmt);
353
354 tree result = NULL_TREE;
355
356 switch (get_gimple_rhs_class (subcode))
357 {
358 case GIMPLE_SINGLE_RHS:
359 {
360 tree rhs = gimple_assign_rhs1 (stmt);
361
8c00ba08
JW
362 if (TREE_CLOBBER_P (rhs))
363 return NULL_TREE;
364
4e71066d 365 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
366 return maybe_fold_reference (rhs, false);
367
bdf37f7a
JH
368 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 {
370 tree val = OBJ_TYPE_REF_EXPR (rhs);
371 if (is_gimple_min_invariant (val))
372 return val;
f8a39967 373 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
374 {
375 bool final;
376 vec <cgraph_node *>targets
f8a39967 377 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 378 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 379 {
2b5f0895
XDL
380 if (dump_enabled_p ())
381 {
4f5b9c80 382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets.length () == 1
386 ? targets[0]->name ()
3ef276e4 387 : "NULL");
2b5f0895 388 }
3ef276e4
RB
389 if (targets.length () == 1)
390 {
391 val = fold_convert (TREE_TYPE (val),
392 build_fold_addr_expr_loc
393 (loc, targets[0]->decl));
394 STRIP_USELESS_TYPE_CONVERSION (val);
395 }
396 else
67914693
SL
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
3ef276e4 399 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
400 return val;
401 }
402 }
bdf37f7a 403 }
7524f419 404
cbdd87d4
RG
405 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 {
70f34814
RG
407 tree ref = TREE_OPERAND (rhs, 0);
408 tree tem = maybe_fold_reference (ref, true);
409 if (tem
410 && TREE_CODE (tem) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem, 1)))
412 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 else if (tem)
cbdd87d4
RG
414 result = fold_convert (TREE_TYPE (rhs),
415 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
416 else if (TREE_CODE (ref) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref, 1)))
418 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
419
420 if (result)
421 {
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result);
427
428 if (result != rhs && valid_gimple_rhs_p (result))
429 return result;
430 }
cbdd87d4
RG
431 }
432
433 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
435 {
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
437 unsigned i;
438 tree val;
439
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 441 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
442 return NULL_TREE;
443
444 return build_vector_from_ctor (TREE_TYPE (rhs),
445 CONSTRUCTOR_ELTS (rhs));
446 }
447
448 else if (DECL_P (rhs))
9d60be38 449 return get_symbol_constant_value (rhs);
cbdd87d4
RG
450 }
451 break;
452
453 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
454 break;
455
456 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
457 break;
458
0354c0c7 459 case GIMPLE_TERNARY_RHS:
5c099d40
RB
460 result = fold_ternary_loc (loc, subcode,
461 TREE_TYPE (gimple_assign_lhs (stmt)),
462 gimple_assign_rhs1 (stmt),
463 gimple_assign_rhs2 (stmt),
464 gimple_assign_rhs3 (stmt));
0354c0c7
BS
465
466 if (result)
467 {
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
0354c0c7
BS
471 }
472 break;
473
cbdd87d4
RG
474 case GIMPLE_INVALID_RHS:
475 gcc_unreachable ();
476 }
477
478 return NULL_TREE;
479}
480
fef5a0d9
RB
481
482/* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
486
487static void
488gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489{
355fe088 490 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
491
492 if (gimple_has_location (stmt))
493 annotate_all_with_location (stmts, gimple_location (stmt));
494
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
355fe088 497 gimple *laststore = NULL;
fef5a0d9
RB
498 for (gimple_stmt_iterator i = gsi_last (stmts);
499 !gsi_end_p (i); gsi_prev (&i))
500 {
355fe088 501 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
502 if ((gimple_assign_single_p (new_stmt)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 || (is_gimple_call (new_stmt)
505 && (gimple_call_flags (new_stmt)
506 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 {
508 tree vdef;
509 if (!laststore)
510 vdef = gimple_vdef (stmt);
511 else
512 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 gimple_set_vdef (new_stmt, vdef);
514 if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 laststore = new_stmt;
517 }
518 }
519
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse = gimple_vuse (stmt);
523 for (gimple_stmt_iterator i = gsi_start (stmts);
524 !gsi_end_p (i); gsi_next (&i))
525 {
355fe088 526 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt))
530 gimple_set_vuse (new_stmt, reaching_vuse);
531 gimple_set_modified (new_stmt, true);
532 if (gimple_vdef (new_stmt))
533 reaching_vuse = gimple_vdef (new_stmt);
534 }
535
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
538 if (reaching_vuse
539 && reaching_vuse == gimple_vuse (stmt))
540 {
541 tree vdef = gimple_vdef (stmt);
542 if (vdef
543 && TREE_CODE (vdef) == SSA_NAME)
544 {
545 unlink_stmt_vdef (stmt);
546 release_ssa_name (vdef);
547 }
548 }
549
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p, stmts, false);
552}
553
cbdd87d4
RG
554/* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
cbdd87d4
RG
563
564void
565gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566{
567 tree lhs;
355fe088 568 gimple *stmt, *new_stmt;
cbdd87d4 569 gimple_stmt_iterator i;
355a7673 570 gimple_seq stmts = NULL;
cbdd87d4
RG
571
572 stmt = gsi_stmt (*si_p);
573
574 gcc_assert (is_gimple_call (stmt));
575
45852dcc 576 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 577
e256dfce 578 lhs = gimple_call_lhs (stmt);
cbdd87d4 579 if (lhs == NULL_TREE)
6e572326
RG
580 {
581 gimplify_and_add (expr, &stmts);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts))
585 {
9fdc58de 586 pop_gimplify_context (NULL);
6e572326
RG
587 if (gimple_in_ssa_p (cfun))
588 {
589 unlink_stmt_vdef (stmt);
590 release_defs (stmt);
591 }
f6b4dc28 592 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
593 return;
594 }
595 }
cbdd87d4 596 else
e256dfce 597 {
381cdae4 598 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
599 new_stmt = gimple_build_assign (lhs, tmp);
600 i = gsi_last (stmts);
601 gsi_insert_after_without_update (&i, new_stmt,
602 GSI_CONTINUE_LINKING);
603 }
cbdd87d4
RG
604
605 pop_gimplify_context (NULL);
606
fef5a0d9
RB
607 gsi_replace_with_seq_vops (si_p, stmts);
608}
cbdd87d4 609
fef5a0d9
RB
610
611/* Replace the call at *GSI with the gimple value VAL. */
612
e3174bdf 613void
fef5a0d9
RB
614replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615{
355fe088 616 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 617 tree lhs = gimple_call_lhs (stmt);
355fe088 618 gimple *repl;
fef5a0d9 619 if (lhs)
e256dfce 620 {
fef5a0d9
RB
621 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 val = fold_convert (TREE_TYPE (lhs), val);
623 repl = gimple_build_assign (lhs, val);
624 }
625 else
626 repl = gimple_build_nop ();
627 tree vdef = gimple_vdef (stmt);
628 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 {
630 unlink_stmt_vdef (stmt);
631 release_ssa_name (vdef);
632 }
f6b4dc28 633 gsi_replace (gsi, repl, false);
fef5a0d9
RB
634}
635
636/* Replace the call at *GSI with the new call REPL and fold that
637 again. */
638
639static void
355fe088 640replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 641{
355fe088 642 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
643 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644 gimple_set_location (repl, gimple_location (stmt));
779724a5 645 gimple_move_vops (repl, stmt);
f6b4dc28 646 gsi_replace (gsi, repl, false);
fef5a0d9
RB
647 fold_stmt (gsi);
648}
649
650/* Return true if VAR is a VAR_DECL or a component thereof. */
651
652static bool
653var_decl_component_p (tree var)
654{
655 tree inner = var;
656 while (handled_component_p (inner))
657 inner = TREE_OPERAND (inner, 0);
47cac108
RB
658 return (DECL_P (inner)
659 || (TREE_CODE (inner) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
661}
662
c89af696
AH
663/* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
665
666static bool
667size_must_be_zero_p (tree size)
668{
669 if (integer_zerop (size))
670 return true;
671
3f27391f 672 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
673 return false;
674
6512c0f1
MS
675 tree type = TREE_TYPE (size);
676 int prec = TYPE_PRECISION (type);
677
6512c0f1
MS
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 681 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
c89af696
AH
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
6512c0f1
MS
687}
688
cc8bea0a
MS
689/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
fef5a0d9
RB
695
696static bool
697gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 698 tree dest, tree src, enum built_in_function code)
fef5a0d9 699{
355fe088 700 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
fef5a0d9
RB
703 location_t loc = gimple_location (stmt);
704
6512c0f1
MS
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len))
fef5a0d9 708 {
355fe088 709 gimple *repl;
fef5a0d9
RB
710 if (gimple_call_lhs (stmt))
711 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 716 {
fef5a0d9
RB
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
f6b4dc28 720 gsi_replace (gsi, repl, false);
fef5a0d9
RB
721 return true;
722 }
723
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src, dest, 0))
727 {
cc8bea0a
MS
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 730 32667). */
fef5a0d9
RB
731 unlink_stmt_vdef (stmt);
732 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
733 release_ssa_name (gimple_vdef (stmt));
734 if (!lhs)
735 {
f6b4dc28 736 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
737 return true;
738 }
739 goto done;
740 }
741 else
742 {
b541b871
EB
743 /* We cannot (easily) change the type of the copy if it is a storage
744 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
745 modify the storage order of objects (see storage_order_barrier_p). */
746 tree srctype
747 = POINTER_TYPE_P (TREE_TYPE (src))
748 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
749 tree desttype
750 = POINTER_TYPE_P (TREE_TYPE (dest))
751 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
752 tree destvar, srcvar, srcoff;
fef5a0d9 753 unsigned int src_align, dest_align;
d01b568a 754 unsigned HOST_WIDE_INT tmp_len;
b541b871 755 const char *tmp_str;
fef5a0d9
RB
756
757 /* Build accesses at offset zero with a ref-all character type. */
b541b871
EB
758 tree off0
759 = build_int_cst (build_pointer_type_for_mode (char_type_node,
760 ptr_mode, true), 0);
fef5a0d9
RB
761
762 /* If we can perform the copy efficiently with first doing all loads
763 and then all stores inline it that way. Currently efficiently
764 means that we can load all the memory into a single integer
765 register which is what MOVE_MAX gives us. */
766 src_align = get_pointer_alignment (src);
767 dest_align = get_pointer_alignment (dest);
768 if (tree_fits_uhwi_p (len)
769 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
770 /* FIXME: Don't transform copies from strings with known length.
771 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
772 from being handled, and the case was XFAILed for that reason.
773 Now that it is handled and the XFAIL removed, as soon as other
774 strlenopt tests that rely on it for passing are adjusted, this
775 hack can be removed. */
776 && !c_strlen (src, 1)
d01b568a 777 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
b541b871
EB
778 && memchr (tmp_str, 0, tmp_len) == NULL)
779 && !(srctype
780 && AGGREGATE_TYPE_P (srctype)
781 && TYPE_REVERSE_STORAGE_ORDER (srctype))
782 && !(desttype
783 && AGGREGATE_TYPE_P (desttype)
784 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
785 {
786 unsigned ilen = tree_to_uhwi (len);
146ec50f 787 if (pow2p_hwi (ilen))
fef5a0d9 788 {
213694e5
MS
789 /* Detect out-of-bounds accesses without issuing warnings.
790 Avoid folding out-of-bounds copies but to avoid false
791 positives for unreachable code defer warning until after
792 DCE has worked its magic.
793 -Wrestrict is still diagnosed. */
794 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
795 dest, src, len, len,
796 false, false))
797 if (warning != OPT_Wrestrict)
798 return false;
cc8bea0a 799
64ab8765 800 scalar_int_mode mode;
fef5a0d9
RB
801 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
802 if (type
64ab8765
RS
803 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
804 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
805 /* If the destination pointer is not aligned we must be able
806 to emit an unaligned store. */
64ab8765 807 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 808 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 809 || (optab_handler (movmisalign_optab, mode)
f869c12f 810 != CODE_FOR_nothing)))
fef5a0d9
RB
811 {
812 tree srctype = type;
813 tree desttype = type;
64ab8765 814 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
815 srctype = build_aligned_type (type, src_align);
816 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
817 tree tem = fold_const_aggregate_ref (srcmem);
818 if (tem)
819 srcmem = tem;
64ab8765 820 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 821 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 822 && (optab_handler (movmisalign_optab, mode)
f869c12f 823 == CODE_FOR_nothing))
fef5a0d9
RB
824 srcmem = NULL_TREE;
825 if (srcmem)
826 {
355fe088 827 gimple *new_stmt;
fef5a0d9
RB
828 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
829 {
830 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
831 srcmem
832 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
833 new_stmt);
fef5a0d9
RB
834 gimple_assign_set_lhs (new_stmt, srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 }
64ab8765 838 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
839 desttype = build_aligned_type (type, dest_align);
840 new_stmt
841 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
842 dest, off0),
843 srcmem);
779724a5 844 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
845 if (!lhs)
846 {
f6b4dc28 847 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
848 return true;
849 }
850 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
851 goto done;
852 }
853 }
854 }
855 }
856
0d67a510 857 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
858 {
859 /* Both DEST and SRC must be pointer types.
860 ??? This is what old code did. Is the testing for pointer types
861 really mandatory?
862
863 If either SRC is readonly or length is 1, we can use memcpy. */
864 if (!dest_align || !src_align)
865 return false;
866 if (readonly_data_expr (src)
867 || (tree_fits_uhwi_p (len)
868 && (MIN (src_align, dest_align) / BITS_PER_UNIT
869 >= tree_to_uhwi (len))))
870 {
871 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
872 if (!fn)
873 return false;
874 gimple_call_set_fndecl (stmt, fn);
875 gimple_call_set_arg (stmt, 0, dest);
876 gimple_call_set_arg (stmt, 1, src);
877 fold_stmt (gsi);
878 return true;
879 }
880
881 /* If *src and *dest can't overlap, optimize into memcpy as well. */
882 if (TREE_CODE (src) == ADDR_EXPR
883 && TREE_CODE (dest) == ADDR_EXPR)
884 {
885 tree src_base, dest_base, fn;
a90c8804
RS
886 poly_int64 src_offset = 0, dest_offset = 0;
887 poly_uint64 maxsize;
fef5a0d9
RB
888
889 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
890 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
891 if (src_base == NULL)
892 src_base = srcvar;
fef5a0d9 893 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
894 dest_base = get_addr_base_and_unit_offset (destvar,
895 &dest_offset);
896 if (dest_base == NULL)
897 dest_base = destvar;
a90c8804 898 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 899 maxsize = -1;
fef5a0d9
RB
900 if (SSA_VAR_P (src_base)
901 && SSA_VAR_P (dest_base))
902 {
903 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
904 && ranges_maybe_overlap_p (src_offset, maxsize,
905 dest_offset, maxsize))
fef5a0d9
RB
906 return false;
907 }
908 else if (TREE_CODE (src_base) == MEM_REF
909 && TREE_CODE (dest_base) == MEM_REF)
910 {
911 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
912 TREE_OPERAND (dest_base, 0), 0))
913 return false;
a90c8804
RS
914 poly_offset_int full_src_offset
915 = mem_ref_offset (src_base) + src_offset;
916 poly_offset_int full_dest_offset
917 = mem_ref_offset (dest_base) + dest_offset;
918 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
919 full_dest_offset, maxsize))
fef5a0d9
RB
920 return false;
921 }
922 else
923 return false;
924
925 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
926 if (!fn)
927 return false;
928 gimple_call_set_fndecl (stmt, fn);
929 gimple_call_set_arg (stmt, 0, dest);
930 gimple_call_set_arg (stmt, 1, src);
931 fold_stmt (gsi);
932 return true;
933 }
934
935 /* If the destination and source do not alias optimize into
936 memcpy as well. */
937 if ((is_gimple_min_invariant (dest)
938 || TREE_CODE (dest) == SSA_NAME)
939 && (is_gimple_min_invariant (src)
940 || TREE_CODE (src) == SSA_NAME))
941 {
942 ao_ref destr, srcr;
943 ao_ref_init_from_ptr_and_size (&destr, dest, len);
944 ao_ref_init_from_ptr_and_size (&srcr, src, len);
945 if (!refs_may_alias_p_1 (&destr, &srcr, false))
946 {
947 tree fn;
948 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
949 if (!fn)
950 return false;
951 gimple_call_set_fndecl (stmt, fn);
952 gimple_call_set_arg (stmt, 0, dest);
953 gimple_call_set_arg (stmt, 1, src);
954 fold_stmt (gsi);
955 return true;
956 }
957 }
958
959 return false;
960 }
961
962 if (!tree_fits_shwi_p (len))
963 return false;
b541b871
EB
964 if (!srctype
965 || (AGGREGATE_TYPE_P (srctype)
966 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
967 return false;
968 if (!desttype
969 || (AGGREGATE_TYPE_P (desttype)
970 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
971 return false;
972 /* In the following try to find a type that is most natural to be
973 used for the memcpy source and destination and that allows
974 the most optimization when memcpy is turned into a plain assignment
975 using that type. In theory we could always use a char[len] type
976 but that only gains us that the destination and source possibly
977 no longer will have their address taken. */
fef5a0d9
RB
978 if (TREE_CODE (srctype) == ARRAY_TYPE
979 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 980 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
981 if (TREE_CODE (desttype) == ARRAY_TYPE
982 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 983 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
984 if (TREE_ADDRESSABLE (srctype)
985 || TREE_ADDRESSABLE (desttype))
986 return false;
987
988 /* Make sure we are not copying using a floating-point mode or
989 a type whose size possibly does not match its precision. */
990 if (FLOAT_MODE_P (TYPE_MODE (desttype))
991 || TREE_CODE (desttype) == BOOLEAN_TYPE
992 || TREE_CODE (desttype) == ENUMERAL_TYPE)
993 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
994 if (FLOAT_MODE_P (TYPE_MODE (srctype))
995 || TREE_CODE (srctype) == BOOLEAN_TYPE
996 || TREE_CODE (srctype) == ENUMERAL_TYPE)
997 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
998 if (!srctype)
999 srctype = desttype;
1000 if (!desttype)
1001 desttype = srctype;
1002 if (!srctype)
1003 return false;
1004
1005 src_align = get_pointer_alignment (src);
1006 dest_align = get_pointer_alignment (dest);
fef5a0d9 1007
5105b576
RB
1008 /* Choose between src and destination type for the access based
1009 on alignment, whether the access constitutes a register access
1010 and whether it may actually expose a declaration for SSA rewrite
e362a897
EB
1011 or SRA decomposition. Also try to expose a string constant, we
1012 might be able to concatenate several of them later into a single
1013 string store. */
42f74245 1014 destvar = NULL_TREE;
5105b576 1015 srcvar = NULL_TREE;
42f74245
RB
1016 if (TREE_CODE (dest) == ADDR_EXPR
1017 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
1018 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1019 && dest_align >= TYPE_ALIGN (desttype)
1020 && (is_gimple_reg_type (desttype)
1021 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1023 else if (TREE_CODE (src) == ADDR_EXPR
1024 && var_decl_component_p (TREE_OPERAND (src, 0))
1025 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1026 && src_align >= TYPE_ALIGN (srctype)
1027 && (is_gimple_reg_type (srctype)
1028 || dest_align >= TYPE_ALIGN (srctype)))
1029 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
e362a897
EB
1030 /* FIXME: Don't transform copies from strings with known original length.
1031 As soon as strlenopt tests that rely on it for passing are adjusted,
1032 this hack can be removed. */
1033 else if (gimple_call_alloca_for_var_p (stmt)
1034 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1035 && integer_zerop (srcoff)
1036 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1037 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1038 srctype = TREE_TYPE (srcvar);
1039 else
fef5a0d9
RB
1040 return false;
1041
5105b576
RB
1042 /* Now that we chose an access type express the other side in
1043 terms of it if the target allows that with respect to alignment
1044 constraints. */
fef5a0d9
RB
1045 if (srcvar == NULL_TREE)
1046 {
fef5a0d9
RB
1047 if (src_align >= TYPE_ALIGN (desttype))
1048 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1049 else
1050 {
1051 if (STRICT_ALIGNMENT)
1052 return false;
1053 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1054 src_align);
1055 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1056 }
1057 }
1058 else if (destvar == NULL_TREE)
1059 {
fef5a0d9
RB
1060 if (dest_align >= TYPE_ALIGN (srctype))
1061 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1062 else
1063 {
1064 if (STRICT_ALIGNMENT)
1065 return false;
1066 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1067 dest_align);
1068 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1069 }
1070 }
1071
213694e5
MS
1072 /* Same as above, detect out-of-bounds accesses without issuing
1073 warnings. Avoid folding out-of-bounds copies but to avoid
1074 false positives for unreachable code defer warning until
1075 after DCE has worked its magic.
1076 -Wrestrict is still diagnosed. */
1077 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1078 dest, src, len, len,
1079 false, false))
1080 if (warning != OPT_Wrestrict)
1081 return false;
cc8bea0a 1082
355fe088 1083 gimple *new_stmt;
fef5a0d9
RB
1084 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1085 {
921b13d0
RB
1086 tree tem = fold_const_aggregate_ref (srcvar);
1087 if (tem)
1088 srcvar = tem;
1089 if (! is_gimple_min_invariant (srcvar))
1090 {
1091 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1092 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1093 new_stmt);
921b13d0
RB
1094 gimple_assign_set_lhs (new_stmt, srcvar);
1095 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1096 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1097 }
d7257171
RB
1098 new_stmt = gimple_build_assign (destvar, srcvar);
1099 goto set_vop_and_replace;
fef5a0d9 1100 }
d7257171 1101
e362a897
EB
1102 /* We get an aggregate copy. If the source is a STRING_CST, then
1103 directly use its type to perform the copy. */
1104 if (TREE_CODE (srcvar) == STRING_CST)
1105 desttype = srctype;
1106
1107 /* Or else, use an unsigned char[] type to perform the copy in order
1108 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1109 types or float modes behavior on copying. */
1110 else
1111 {
1112 desttype = build_array_type_nelts (unsigned_char_type_node,
1113 tree_to_uhwi (len));
1114 srctype = desttype;
1115 if (src_align > TYPE_ALIGN (srctype))
1116 srctype = build_aligned_type (srctype, src_align);
1117 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1118 }
1119
d7257171
RB
1120 if (dest_align > TYPE_ALIGN (desttype))
1121 desttype = build_aligned_type (desttype, dest_align);
e362a897
EB
1122 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1123 new_stmt = gimple_build_assign (destvar, srcvar);
1124
d7257171 1125set_vop_and_replace:
779724a5 1126 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1127 if (!lhs)
1128 {
f6b4dc28 1129 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1130 return true;
1131 }
1132 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1133 }
1134
1135done:
74e3c262 1136 gimple_seq stmts = NULL;
0d67a510 1137 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1138 len = NULL_TREE;
0d67a510 1139 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1140 {
1141 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1142 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1143 TREE_TYPE (dest), dest, len);
1144 }
0d67a510
ML
1145 else
1146 gcc_unreachable ();
fef5a0d9 1147
74e3c262 1148 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1149 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1150 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1151 return true;
1152}
1153
b3d8d88e
MS
1154/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1155 to built-in memcmp (a, b, len). */
1156
1157static bool
1158gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1159{
1160 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1161
1162 if (!fn)
1163 return false;
1164
1165 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1166
1167 gimple *stmt = gsi_stmt (*gsi);
1168 tree a = gimple_call_arg (stmt, 0);
1169 tree b = gimple_call_arg (stmt, 1);
1170 tree len = gimple_call_arg (stmt, 2);
1171
1172 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1173 replace_call_with_call_and_fold (gsi, repl);
1174
1175 return true;
1176}
1177
1178/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1179 to built-in memmove (dest, src, len). */
1180
1181static bool
1182gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1183{
1184 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1185
1186 if (!fn)
1187 return false;
1188
1189 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1190 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1191 len) into memmove (dest, src, len). */
1192
1193 gimple *stmt = gsi_stmt (*gsi);
1194 tree src = gimple_call_arg (stmt, 0);
1195 tree dest = gimple_call_arg (stmt, 1);
1196 tree len = gimple_call_arg (stmt, 2);
1197
1198 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1199 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1200 replace_call_with_call_and_fold (gsi, repl);
1201
1202 return true;
1203}
1204
1205/* Transform a call to built-in bzero (dest, len) at *GSI into one
1206 to built-in memset (dest, 0, len). */
1207
1208static bool
1209gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1210{
1211 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1212
1213 if (!fn)
1214 return false;
1215
1216 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1217
1218 gimple *stmt = gsi_stmt (*gsi);
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree len = gimple_call_arg (stmt, 1);
1221
1222 gimple_seq seq = NULL;
1223 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1224 gimple_seq_add_stmt_without_update (&seq, repl);
1225 gsi_replace_with_seq_vops (gsi, seq);
1226 fold_stmt (gsi);
1227
1228 return true;
1229}
1230
fef5a0d9
RB
1231/* Fold function call to builtin memset or bzero at *GSI setting the
1232 memory of size LEN to VAL. Return whether a simplification was made. */
1233
1234static bool
1235gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1236{
355fe088 1237 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1238 tree etype;
1239 unsigned HOST_WIDE_INT length, cval;
1240
1241 /* If the LEN parameter is zero, return DEST. */
1242 if (integer_zerop (len))
1243 {
1244 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1245 return true;
1246 }
1247
1248 if (! tree_fits_uhwi_p (len))
1249 return false;
1250
1251 if (TREE_CODE (c) != INTEGER_CST)
1252 return false;
1253
1254 tree dest = gimple_call_arg (stmt, 0);
1255 tree var = dest;
1256 if (TREE_CODE (var) != ADDR_EXPR)
1257 return false;
1258
1259 var = TREE_OPERAND (var, 0);
1260 if (TREE_THIS_VOLATILE (var))
1261 return false;
1262
1263 etype = TREE_TYPE (var);
1264 if (TREE_CODE (etype) == ARRAY_TYPE)
1265 etype = TREE_TYPE (etype);
1266
1267 if (!INTEGRAL_TYPE_P (etype)
1268 && !POINTER_TYPE_P (etype))
1269 return NULL_TREE;
1270
1271 if (! var_decl_component_p (var))
1272 return NULL_TREE;
1273
1274 length = tree_to_uhwi (len);
7a504f33 1275 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1276 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1277 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1278 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1279 return NULL_TREE;
1280
1281 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1282 return NULL_TREE;
1283
1ba9acb1
RB
1284 if (!type_has_mode_precision_p (etype))
1285 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1286 TYPE_UNSIGNED (etype));
1287
fef5a0d9
RB
1288 if (integer_zerop (c))
1289 cval = 0;
1290 else
1291 {
1292 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1293 return NULL_TREE;
1294
1295 cval = TREE_INT_CST_LOW (c);
1296 cval &= 0xff;
1297 cval |= cval << 8;
1298 cval |= cval << 16;
1299 cval |= (cval << 31) << 1;
1300 }
1301
1302 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1303 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1304 gimple_move_vops (store, stmt);
fef5a0d9
RB
1305 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1306 if (gimple_call_lhs (stmt))
1307 {
355fe088 1308 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1309 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1310 }
1311 else
1312 {
1313 gimple_stmt_iterator gsi2 = *gsi;
1314 gsi_prev (gsi);
1315 gsi_remove (&gsi2, true);
1316 }
1317
1318 return true;
1319}
1320
fb471a13 1321/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1322
1323static bool
03c4a945
MS
1324get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1325 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1326{
fb471a13 1327 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1328
fb471a13
MS
1329 /* The length computed by this invocation of the function. */
1330 tree val = NULL_TREE;
1331
eef2da67
MS
1332 /* True if VAL is an optimistic (tight) bound determined from
1333 the size of the character array in which the string may be
1334 stored. In that case, the computed VAL is used to set
1335 PDATA->MAXBOUND. */
1336 bool tight_bound = false;
1337
fb471a13
MS
1338 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1339 if (TREE_CODE (arg) == ADDR_EXPR
1340 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1341 {
fb471a13
MS
1342 tree op = TREE_OPERAND (arg, 0);
1343 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1344 {
fb471a13
MS
1345 tree aop0 = TREE_OPERAND (op, 0);
1346 if (TREE_CODE (aop0) == INDIRECT_REF
1347 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1348 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1349 pdata, eltsize);
fef5a0d9 1350 }
598f7235 1351 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1352 && rkind == SRK_LENRANGE)
fef5a0d9 1353 {
fb471a13
MS
1354 /* Fail if an array is the last member of a struct object
1355 since it could be treated as a (fake) flexible array
1356 member. */
1357 tree idx = TREE_OPERAND (op, 1);
1358
1359 arg = TREE_OPERAND (op, 0);
1360 tree optype = TREE_TYPE (arg);
1361 if (tree dom = TYPE_DOMAIN (optype))
1362 if (tree bound = TYPE_MAX_VALUE (dom))
1363 if (TREE_CODE (bound) == INTEGER_CST
1364 && TREE_CODE (idx) == INTEGER_CST
1365 && tree_int_cst_lt (bound, idx))
1366 return false;
fef5a0d9 1367 }
fb471a13 1368 }
7d583f42 1369
598f7235 1370 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1371 {
1372 /* We are computing the maximum value (not string length). */
1373 val = arg;
1374 if (TREE_CODE (val) != INTEGER_CST
1375 || tree_int_cst_sgn (val) < 0)
1376 return false;
1377 }
1378 else
1379 {
1380 c_strlen_data lendata = { };
1381 val = c_strlen (arg, 1, &lendata, eltsize);
1382
fb471a13
MS
1383 if (!val && lendata.decl)
1384 {
03c4a945
MS
1385 /* ARG refers to an unterminated const character array.
1386 DATA.DECL with size DATA.LEN. */
1387 val = lendata.minlen;
730832cd 1388 pdata->decl = lendata.decl;
7d583f42 1389 }
fb471a13
MS
1390 }
1391
a7160771
MS
1392 /* Set if VAL represents the maximum length based on array size (set
1393 when exact length cannot be determined). */
1394 bool maxbound = false;
1395
84de9426 1396 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1397 {
1398 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1399 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1400 pdata, eltsize);
88d0c3f0 1401
fb471a13 1402 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1403 {
fb471a13 1404 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1405
fb471a13
MS
1406 /* Determine the "innermost" array type. */
1407 while (TREE_CODE (optype) == ARRAY_TYPE
1408 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1409 optype = TREE_TYPE (optype);
c42d0aa0 1410
fb471a13
MS
1411 /* Avoid arrays of pointers. */
1412 tree eltype = TREE_TYPE (optype);
1413 if (TREE_CODE (optype) != ARRAY_TYPE
1414 || !INTEGRAL_TYPE_P (eltype))
1415 return false;
c42d0aa0 1416
fb471a13
MS
1417 /* Fail when the array bound is unknown or zero. */
1418 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1419 if (!val
1420 || TREE_CODE (val) != INTEGER_CST
1421 || integer_zerop (val))
fb471a13 1422 return false;
1bfd6a00 1423
fb471a13
MS
1424 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1425 integer_one_node);
c42d0aa0 1426
fb471a13
MS
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
730832cd 1429 pdata->minlen = ssize_int (0);
204a7ecb 1430
eef2da67 1431 tight_bound = true;
fb471a13
MS
1432 }
1433 else if (TREE_CODE (arg) == COMPONENT_REF
1434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1435 == ARRAY_TYPE))
1436 {
1437 /* Use the type of the member array to determine the upper
1438 bound on the length of the array. This may be overly
1439 optimistic if the array itself isn't NUL-terminated and
1440 the caller relies on the subsequent member to contain
1441 the NUL but that would only be considered valid if
03c4a945 1442 the array were the last member of a struct. */
fb471a13
MS
1443
1444 tree fld = TREE_OPERAND (arg, 1);
1445
1446 tree optype = TREE_TYPE (fld);
1447
1448 /* Determine the "innermost" array type. */
1449 while (TREE_CODE (optype) == ARRAY_TYPE
1450 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1451 optype = TREE_TYPE (optype);
1452
1453 /* Fail when the array bound is unknown or zero. */
1454 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1455 if (!val
1456 || TREE_CODE (val) != INTEGER_CST
1457 || integer_zerop (val))
fb471a13
MS
1458 return false;
1459 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1460 integer_one_node);
1461
1462 /* Set the minimum size to zero since the string in
1463 the array could have zero length. */
730832cd 1464 pdata->minlen = ssize_int (0);
fb471a13 1465
eef2da67
MS
1466 /* The array size determined above is an optimistic bound
1467 on the length. If the array isn't nul-terminated the
1468 length computed by the library function would be greater.
1469 Even though using strlen to cross the subobject boundary
1470 is undefined, avoid drawing conclusions from the member
1471 type about the length here. */
1472 tight_bound = true;
1473 }
e7868dc6
MS
1474 else if (TREE_CODE (arg) == MEM_REF
1475 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1477 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1478 {
1479 /* Handle a MEM_REF into a DECL accessing an array of integers,
1480 being conservative about references to extern structures with
1481 flexible array members that can be initialized to arbitrary
1482 numbers of elements as an extension (static structs are okay).
1483 FIXME: Make this less conservative -- see
1484 component_ref_size in tree.c. */
1485 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1486 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1487 && (decl_binds_to_current_def_p (ref)
1488 || !array_at_struct_end_p (arg)))
1489 {
1490 /* Fail if the offset is out of bounds. Such accesses
1491 should be diagnosed at some point. */
1492 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1493 if (!val
1494 || TREE_CODE (val) != INTEGER_CST
1495 || integer_zerop (val))
e7868dc6
MS
1496 return false;
1497
1498 poly_offset_int psiz = wi::to_offset (val);
1499 poly_offset_int poff = mem_ref_offset (arg);
1500 if (known_le (psiz, poff))
1501 return false;
1502
1503 pdata->minlen = ssize_int (0);
1504
1505 /* Subtract the offset and one for the terminating nul. */
1506 psiz -= poff;
1507 psiz -= 1;
1508 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1509 /* Since VAL reflects the size of a declared object
1510 rather the type of the access it is not a tight bound. */
1511 }
1512 }
1513 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1514 {
eef2da67
MS
1515 /* Avoid handling pointers to arrays. GCC might misuse
1516 a pointer to an array of one bound to point to an array
1517 object of a greater bound. */
1518 tree argtype = TREE_TYPE (arg);
1519 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1520 {
eef2da67 1521 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1522 if (!val
1523 || TREE_CODE (val) != INTEGER_CST
1524 || integer_zerop (val))
88d0c3f0 1525 return false;
fb471a13
MS
1526 val = wide_int_to_tree (TREE_TYPE (val),
1527 wi::sub (wi::to_wide (val), 1));
1528
e495e31a
MS
1529 /* Set the minimum size to zero since the string in
1530 the array could have zero length. */
730832cd 1531 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1532 }
1533 }
a7160771 1534 maxbound = true;
fb471a13 1535 }
88d0c3f0 1536
fb471a13
MS
1537 if (!val)
1538 return false;
fef5a0d9 1539
fb471a13 1540 /* Adjust the lower bound on the string length as necessary. */
730832cd 1541 if (!pdata->minlen
598f7235 1542 || (rkind != SRK_STRLEN
730832cd 1543 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1544 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1545 && tree_int_cst_lt (val, pdata->minlen)))
1546 pdata->minlen = val;
88d0c3f0 1547
a7160771 1548 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1549 {
1550 /* Adjust the tighter (more optimistic) string length bound
1551 if necessary and proceed to adjust the more conservative
1552 bound. */
1553 if (TREE_CODE (val) == INTEGER_CST)
1554 {
a7160771
MS
1555 if (tree_int_cst_lt (pdata->maxbound, val))
1556 pdata->maxbound = val;
730832cd
MS
1557 }
1558 else
1559 pdata->maxbound = val;
1560 }
a7160771
MS
1561 else if (pdata->maxbound || maxbound)
1562 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1563 if VAL corresponds to the maximum length determined based
1564 on the type of the object. */
730832cd
MS
1565 pdata->maxbound = val;
1566
eef2da67
MS
1567 if (tight_bound)
1568 {
1569 /* VAL computed above represents an optimistically tight bound
1570 on the length of the string based on the referenced object's
1571 or subobject's type. Determine the conservative upper bound
1572 based on the enclosing object's size if possible. */
84de9426 1573 if (rkind == SRK_LENRANGE)
eef2da67
MS
1574 {
1575 poly_int64 offset;
1576 tree base = get_addr_base_and_unit_offset (arg, &offset);
1577 if (!base)
1578 {
1579 /* When the call above fails due to a non-constant offset
1580 assume the offset is zero and use the size of the whole
1581 enclosing object instead. */
1582 base = get_base_address (arg);
1583 offset = 0;
1584 }
1585 /* If the base object is a pointer no upper bound on the length
1586 can be determined. Otherwise the maximum length is equal to
1587 the size of the enclosing object minus the offset of
1588 the referenced subobject minus 1 (for the terminating nul). */
1589 tree type = TREE_TYPE (base);
1590 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1591 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1592 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1593 val = build_all_ones_cst (size_type_node);
1594 else
1595 {
1596 val = DECL_SIZE_UNIT (base);
1597 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1598 size_int (offset + 1));
1599 }
1600 }
1601 else
1602 return false;
1603 }
1604
730832cd 1605 if (pdata->maxlen)
fb471a13
MS
1606 {
1607 /* Adjust the more conservative bound if possible/necessary
1608 and fail otherwise. */
598f7235 1609 if (rkind != SRK_STRLEN)
fef5a0d9 1610 {
730832cd 1611 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1612 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1613 return false;
fef5a0d9 1614
730832cd
MS
1615 if (tree_int_cst_lt (pdata->maxlen, val))
1616 pdata->maxlen = val;
fb471a13
MS
1617 return true;
1618 }
730832cd 1619 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1620 {
1621 /* Fail if the length of this ARG is different from that
1622 previously determined from another ARG. */
1623 return false;
1624 }
fef5a0d9
RB
1625 }
1626
730832cd 1627 pdata->maxlen = val;
84de9426 1628 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1629}
1630
5d6655eb
MS
1631/* For an ARG referencing one or more strings, try to obtain the range
1632 of their lengths, or the size of the largest array ARG referes to if
1633 the range of lengths cannot be determined, and store all in *PDATA.
1634 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1635 the maximum constant value.
1636 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1637 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1638 length or if we are unable to determine the length, return false.
fb471a13 1639 VISITED is a bitmap of visited variables.
598f7235
MS
1640 RKIND determines the kind of value or range to obtain (see
1641 strlen_range_kind).
1642 Set PDATA->DECL if ARG refers to an unterminated constant array.
1643 On input, set ELTSIZE to 1 for normal single byte character strings,
1644 and either 2 or 4 for wide characer strings (the size of wchar_t).
1645 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1646
1647static bool
03c4a945
MS
1648get_range_strlen (tree arg, bitmap *visited,
1649 strlen_range_kind rkind,
1650 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1651{
1652
1653 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1654 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1655
fef5a0d9
RB
1656 /* If ARG is registered for SSA update we cannot look at its defining
1657 statement. */
1658 if (name_registered_for_update_p (arg))
1659 return false;
1660
1661 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1662 if (!*visited)
1663 *visited = BITMAP_ALLOC (NULL);
1664 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1665 return true;
1666
fb471a13
MS
1667 tree var = arg;
1668 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1669
fef5a0d9
RB
1670 switch (gimple_code (def_stmt))
1671 {
1672 case GIMPLE_ASSIGN:
598f7235
MS
1673 /* The RHS of the statement defining VAR must either have a
1674 constant length or come from another SSA_NAME with a constant
1675 length. */
fef5a0d9
RB
1676 if (gimple_assign_single_p (def_stmt)
1677 || gimple_assign_unary_nop_p (def_stmt))
1678 {
598f7235 1679 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1680 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1681 }
1682 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1683 {
c8602fe6
JJ
1684 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1685 gimple_assign_rhs3 (def_stmt) };
1686
1687 for (unsigned int i = 0; i < 2; i++)
03c4a945 1688 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1689 {
84de9426 1690 if (rkind != SRK_LENRANGE)
c8602fe6 1691 return false;
80c2bad6
MS
1692 /* Set the upper bound to the maximum to prevent
1693 it from being adjusted in the next iteration but
1694 leave MINLEN and the more conservative MAXBOUND
1695 determined so far alone (or leave them null if
1696 they haven't been set yet). That the MINLEN is
1697 in fact zero can be determined from MAXLEN being
1698 unbounded but the discovered minimum is used for
1699 diagnostics. */
730832cd 1700 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1701 }
1702 return true;
cc8bea0a 1703 }
fef5a0d9
RB
1704 return false;
1705
1706 case GIMPLE_PHI:
598f7235
MS
1707 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1708 must have a constant length. */
c8602fe6 1709 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1710 {
1711 tree arg = gimple_phi_arg (def_stmt, i)->def;
1712
1713 /* If this PHI has itself as an argument, we cannot
1714 determine the string length of this argument. However,
1715 if we can find a constant string length for the other
1716 PHI args then we can still be sure that this is a
1717 constant string length. So be optimistic and just
1718 continue with the next argument. */
1719 if (arg == gimple_phi_result (def_stmt))
1720 continue;
1721
03c4a945 1722 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1723 {
84de9426 1724 if (rkind != SRK_LENRANGE)
88d0c3f0 1725 return false;
80c2bad6
MS
1726 /* Set the upper bound to the maximum to prevent
1727 it from being adjusted in the next iteration but
1728 leave MINLEN and the more conservative MAXBOUND
1729 determined so far alone (or leave them null if
1730 they haven't been set yet). That the MINLEN is
1731 in fact zero can be determined from MAXLEN being
1732 unbounded but the discovered minimum is used for
1733 diagnostics. */
730832cd 1734 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1735 }
fef5a0d9 1736 }
fef5a0d9
RB
1737 return true;
1738
1739 default:
1740 return false;
1741 }
1742}
5d6655eb 1743
97623b52
MS
1744/* Try to obtain the range of the lengths of the string(s) referenced
1745 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1746 of lengths cannot be determined, and store all in *PDATA which must
1747 be zero-initialized on input except PDATA->MAXBOUND may be set to
1748 a non-null tree node other than INTEGER_CST to request to have it
1749 set to the length of the longest string in a PHI. ELTSIZE is
1750 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1751 some power of 2 for wide characters.
1752 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1753 for optimization. Returning false means that a nonzero PDATA->MINLEN
1754 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1755 is -1 (in that case, the actual range is indeterminate, i.e.,
1756 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1757
3f343040 1758bool
84de9426 1759get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1760{
1761 bitmap visited = NULL;
a7160771 1762 tree maxbound = pdata->maxbound;
88d0c3f0 1763
84de9426 1764 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1765 {
5d6655eb
MS
1766 /* On failure extend the length range to an impossible maximum
1767 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1768 members can stay unchanged regardless. */
1769 pdata->minlen = ssize_int (0);
1770 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1771 }
5d6655eb
MS
1772 else if (!pdata->minlen)
1773 pdata->minlen = ssize_int (0);
1774
a7160771
MS
1775 /* If it's unchanged from it initial non-null value, set the conservative
1776 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1777 if (maxbound && pdata->maxbound == maxbound)
1778 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1779
1780 if (visited)
1781 BITMAP_FREE (visited);
3f343040 1782
03c4a945 1783 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1784}
1785
5d6655eb
MS
1786/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1787 For ARG of pointer types, NONSTR indicates if the caller is prepared
1788 to handle unterminated strings. For integer ARG and when RKIND ==
1789 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1790
5d6655eb
MS
1791 If an unterminated array is discovered and our caller handles
1792 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1793 return the maximum size. Otherwise return NULL. */
1794
598f7235
MS
1795static tree
1796get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1797{
598f7235
MS
1798 /* A non-null NONSTR is meaningless when determining the maximum
1799 value of an integer ARG. */
1800 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1801 /* ARG must have an integral type when RKIND says so. */
1802 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1803
dcb7fae2 1804 bitmap visited = NULL;
3f343040 1805
5d6655eb
MS
1806 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1807 is unbounded. */
730832cd 1808 c_strlen_data lendata = { };
03c4a945 1809 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 1810 lendata.maxlen = NULL_TREE;
5d6655eb
MS
1811 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1812 lendata.maxlen = NULL_TREE;
1813
dcb7fae2
RB
1814 if (visited)
1815 BITMAP_FREE (visited);
1816
e08341bb
MS
1817 if (nonstr)
1818 {
1819 /* For callers prepared to handle unterminated arrays set
1820 *NONSTR to point to the declaration of the array and return
1821 the maximum length/size. */
730832cd
MS
1822 *nonstr = lendata.decl;
1823 return lendata.maxlen;
e08341bb
MS
1824 }
1825
1826 /* Fail if the constant array isn't nul-terminated. */
730832cd 1827 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
1828}
1829
fef5a0d9
RB
1830
1831/* Fold function call to builtin strcpy with arguments DEST and SRC.
1832 If LEN is not NULL, it represents the length of the string to be
1833 copied. Return NULL_TREE if no simplification can be made. */
1834
1835static bool
1836gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1837 tree dest, tree src)
fef5a0d9 1838{
cc8bea0a
MS
1839 gimple *stmt = gsi_stmt (*gsi);
1840 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1841 tree fn;
1842
1843 /* If SRC and DEST are the same (and not volatile), return DEST. */
1844 if (operand_equal_p (src, dest, 0))
1845 {
8cd95cec
MS
1846 /* Issue -Wrestrict unless the pointers are null (those do
1847 not point to objects and so do not indicate an overlap;
1848 such calls could be the result of sanitization and jump
1849 threading). */
1850 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1851 {
1852 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1853
e9b9fa4c
MS
1854 warning_at (loc, OPT_Wrestrict,
1855 "%qD source argument is the same as destination",
1856 func);
1857 }
cc8bea0a 1858
fef5a0d9
RB
1859 replace_call_with_value (gsi, dest);
1860 return true;
1861 }
1862
1863 if (optimize_function_for_size_p (cfun))
1864 return false;
1865
1866 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1867 if (!fn)
1868 return false;
1869
e08341bb
MS
1870 /* Set to non-null if ARG refers to an unterminated array. */
1871 tree nonstr = NULL;
598f7235 1872 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
1873
1874 if (nonstr)
1875 {
1876 /* Avoid folding calls with unterminated arrays. */
1877 if (!gimple_no_warning_p (stmt))
1878 warn_string_no_nul (loc, "strcpy", src, nonstr);
1879 gimple_set_no_warning (stmt, true);
1880 return false;
1881 }
1882
fef5a0d9 1883 if (!len)
dcb7fae2 1884 return false;
fef5a0d9
RB
1885
1886 len = fold_convert_loc (loc, size_type_node, len);
1887 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1888 len = force_gimple_operand_gsi (gsi, len, true,
1889 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1890 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1893}
1894
1895/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1896 If SLEN is not NULL, it represents the length of the source string.
1897 Return NULL_TREE if no simplification can be made. */
1898
1899static bool
dcb7fae2
RB
1900gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1901 tree dest, tree src, tree len)
fef5a0d9 1902{
025d57f0
MS
1903 gimple *stmt = gsi_stmt (*gsi);
1904 location_t loc = gimple_location (stmt);
6a33d0ff 1905 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1906
1907 /* If the LEN parameter is zero, return DEST. */
1908 if (integer_zerop (len))
1909 {
53b28abf 1910 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
1911 decorate with attribute nonstring. */
1912 if (!nonstring)
1913 {
1914 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1915
1916 /* Warn about the lack of nul termination: the result is not
1917 a (nul-terminated) string. */
598f7235 1918 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
1919 if (slen && !integer_zerop (slen))
1920 warning_at (loc, OPT_Wstringop_truncation,
1921 "%G%qD destination unchanged after copying no bytes "
1922 "from a string of length %E",
8a45b051 1923 stmt, fndecl, slen);
6a33d0ff
MS
1924 else
1925 warning_at (loc, OPT_Wstringop_truncation,
1926 "%G%qD destination unchanged after copying no bytes",
8a45b051 1927 stmt, fndecl);
6a33d0ff 1928 }
025d57f0 1929
fef5a0d9
RB
1930 replace_call_with_value (gsi, dest);
1931 return true;
1932 }
1933
1934 /* We can't compare slen with len as constants below if len is not a
1935 constant. */
dcb7fae2 1936 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1937 return false;
1938
fef5a0d9 1939 /* Now, we must be passed a constant src ptr parameter. */
598f7235 1940 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 1941 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1942 return false;
1943
025d57f0
MS
1944 /* The size of the source string including the terminating nul. */
1945 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1946
1947 /* We do not support simplification of this case, though we do
1948 support it when expanding trees into RTL. */
1949 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1950 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1951 return false;
1952
5d0d5d68
MS
1953 /* Diagnose truncation that leaves the copy unterminated. */
1954 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1955
fef5a0d9 1956 /* OK transform into builtin memcpy. */
025d57f0 1957 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1958 if (!fn)
1959 return false;
1960
1961 len = fold_convert_loc (loc, size_type_node, len);
1962 len = force_gimple_operand_gsi (gsi, len, true,
1963 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1964 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1965 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1966
fef5a0d9
RB
1967 return true;
1968}
1969
71dea1dd
WD
1970/* Fold function call to builtin strchr or strrchr.
1971 If both arguments are constant, evaluate and fold the result,
1972 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1973 In general strlen is significantly faster than strchr
1974 due to being a simpler operation. */
1975static bool
71dea1dd 1976gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1977{
1978 gimple *stmt = gsi_stmt (*gsi);
1979 tree str = gimple_call_arg (stmt, 0);
1980 tree c = gimple_call_arg (stmt, 1);
1981 location_t loc = gimple_location (stmt);
71dea1dd
WD
1982 const char *p;
1983 char ch;
912d9ec3 1984
71dea1dd 1985 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1986 return false;
1987
b5338fb3
MS
1988 /* Avoid folding if the first argument is not a nul-terminated array.
1989 Defer warning until later. */
1990 if (!check_nul_terminated_array (NULL_TREE, str))
1991 return false;
1992
71dea1dd
WD
1993 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1994 {
1995 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1996
1997 if (p1 == NULL)
1998 {
1999 replace_call_with_value (gsi, integer_zero_node);
2000 return true;
2001 }
2002
2003 tree len = build_int_cst (size_type_node, p1 - p);
2004 gimple_seq stmts = NULL;
2005 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2006 POINTER_PLUS_EXPR, str, len);
2007 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2008 gsi_replace_with_seq_vops (gsi, stmts);
2009 return true;
2010 }
2011
2012 if (!integer_zerop (c))
912d9ec3
WD
2013 return false;
2014
71dea1dd 2015 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 2016 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
2017 {
2018 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2019
c8952930 2020 if (strchr_fn)
71dea1dd
WD
2021 {
2022 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2025 }
2026
2027 return false;
2028 }
2029
912d9ec3
WD
2030 tree len;
2031 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2032
2033 if (!strlen_fn)
2034 return false;
2035
2036 /* Create newstr = strlen (str). */
2037 gimple_seq stmts = NULL;
2038 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2039 gimple_set_location (new_stmt, loc);
a15ebbcd 2040 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2041 gimple_call_set_lhs (new_stmt, len);
2042 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2043
2044 /* Create (str p+ strlen (str)). */
2045 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2046 POINTER_PLUS_EXPR, str, len);
2047 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2048 gsi_replace_with_seq_vops (gsi, stmts);
2049 /* gsi now points at the assignment to the lhs, get a
2050 stmt iterator to the strlen.
2051 ??? We can't use gsi_for_stmt as that doesn't work when the
2052 CFG isn't built yet. */
2053 gimple_stmt_iterator gsi2 = *gsi;
2054 gsi_prev (&gsi2);
2055 fold_stmt (&gsi2);
2056 return true;
2057}
2058
c8952930
JJ
2059/* Fold function call to builtin strstr.
2060 If both arguments are constant, evaluate and fold the result,
2061 additionally fold strstr (x, "") into x and strstr (x, "c")
2062 into strchr (x, 'c'). */
2063static bool
2064gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2065{
2066 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2067 if (!gimple_call_lhs (stmt))
2068 return false;
2069
c8952930
JJ
2070 tree haystack = gimple_call_arg (stmt, 0);
2071 tree needle = gimple_call_arg (stmt, 1);
c8952930 2072
b5338fb3
MS
2073 /* Avoid folding if either argument is not a nul-terminated array.
2074 Defer warning until later. */
2075 if (!check_nul_terminated_array (NULL_TREE, haystack)
2076 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2077 return false;
2078
b5338fb3 2079 const char *q = c_getstr (needle);
c8952930
JJ
2080 if (q == NULL)
2081 return false;
2082
b5338fb3 2083 if (const char *p = c_getstr (haystack))
c8952930
JJ
2084 {
2085 const char *r = strstr (p, q);
2086
2087 if (r == NULL)
2088 {
2089 replace_call_with_value (gsi, integer_zero_node);
2090 return true;
2091 }
2092
2093 tree len = build_int_cst (size_type_node, r - p);
2094 gimple_seq stmts = NULL;
2095 gimple *new_stmt
2096 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2097 haystack, len);
2098 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2099 gsi_replace_with_seq_vops (gsi, stmts);
2100 return true;
2101 }
2102
2103 /* For strstr (x, "") return x. */
2104 if (q[0] == '\0')
2105 {
2106 replace_call_with_value (gsi, haystack);
2107 return true;
2108 }
2109
2110 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2111 if (q[1] == '\0')
2112 {
2113 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2114 if (strchr_fn)
2115 {
2116 tree c = build_int_cst (integer_type_node, q[0]);
2117 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2120 }
2121 }
2122
2123 return false;
2124}
2125
fef5a0d9
RB
2126/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2127 to the call.
2128
2129 Return NULL_TREE if no simplification was possible, otherwise return the
2130 simplified form of the call as a tree.
2131
2132 The simplified form may be a constant or other expression which
2133 computes the same value, but in a more efficient manner (including
2134 calls to other builtin functions).
2135
2136 The call may contain arguments which need to be evaluated, but
2137 which are not useful to determine the result of the call. In
2138 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2139 COMPOUND_EXPR will be an argument which must be evaluated.
2140 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2141 COMPOUND_EXPR in the chain will contain the tree for the simplified
2142 form of the builtin function call. */
2143
2144static bool
dcb7fae2 2145gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2146{
355fe088 2147 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2148 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2149
2150 const char *p = c_getstr (src);
2151
2152 /* If the string length is zero, return the dst parameter. */
2153 if (p && *p == '\0')
2154 {
2155 replace_call_with_value (gsi, dst);
2156 return true;
2157 }
2158
2159 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2160 return false;
2161
2162 /* See if we can store by pieces into (dst + strlen(dst)). */
2163 tree newdst;
2164 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2165 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2166
2167 if (!strlen_fn || !memcpy_fn)
2168 return false;
2169
2170 /* If the length of the source string isn't computable don't
2171 split strcat into strlen and memcpy. */
598f7235 2172 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2173 if (! len)
fef5a0d9
RB
2174 return false;
2175
2176 /* Create strlen (dst). */
2177 gimple_seq stmts = NULL, stmts2;
355fe088 2178 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2179 gimple_set_location (repl, loc);
a15ebbcd 2180 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2181 gimple_call_set_lhs (repl, newdst);
2182 gimple_seq_add_stmt_without_update (&stmts, repl);
2183
2184 /* Create (dst p+ strlen (dst)). */
2185 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2186 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2187 gimple_seq_add_seq_without_update (&stmts, stmts2);
2188
2189 len = fold_convert_loc (loc, size_type_node, len);
2190 len = size_binop_loc (loc, PLUS_EXPR, len,
2191 build_int_cst (size_type_node, 1));
2192 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2193 gimple_seq_add_seq_without_update (&stmts, stmts2);
2194
2195 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 if (gimple_call_lhs (stmt))
2198 {
2199 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2200 gimple_seq_add_stmt_without_update (&stmts, repl);
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 /* gsi now points at the assignment to the lhs, get a
2203 stmt iterator to the memcpy call.
2204 ??? We can't use gsi_for_stmt as that doesn't work when the
2205 CFG isn't built yet. */
2206 gimple_stmt_iterator gsi2 = *gsi;
2207 gsi_prev (&gsi2);
2208 fold_stmt (&gsi2);
2209 }
2210 else
2211 {
2212 gsi_replace_with_seq_vops (gsi, stmts);
2213 fold_stmt (gsi);
2214 }
2215 return true;
2216}
2217
07f1cf56
RB
2218/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2219 are the arguments to the call. */
2220
2221static bool
2222gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2223{
355fe088 2224 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2225 tree dest = gimple_call_arg (stmt, 0);
2226 tree src = gimple_call_arg (stmt, 1);
2227 tree size = gimple_call_arg (stmt, 2);
2228 tree fn;
2229 const char *p;
2230
2231
2232 p = c_getstr (src);
2233 /* If the SRC parameter is "", return DEST. */
2234 if (p && *p == '\0')
2235 {
2236 replace_call_with_value (gsi, dest);
2237 return true;
2238 }
2239
2240 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2241 return false;
2242
2243 /* If __builtin_strcat_chk is used, assume strcat is available. */
2244 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2245 if (!fn)
2246 return false;
2247
355fe088 2248 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2249 replace_call_with_call_and_fold (gsi, repl);
2250 return true;
2251}
2252
ad03a744
RB
2253/* Simplify a call to the strncat builtin. */
2254
2255static bool
2256gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2257{
8a45b051 2258 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2259 tree dst = gimple_call_arg (stmt, 0);
2260 tree src = gimple_call_arg (stmt, 1);
2261 tree len = gimple_call_arg (stmt, 2);
2262
2263 const char *p = c_getstr (src);
2264
2265 /* If the requested length is zero, or the src parameter string
2266 length is zero, return the dst parameter. */
2267 if (integer_zerop (len) || (p && *p == '\0'))
2268 {
2269 replace_call_with_value (gsi, dst);
2270 return true;
2271 }
2272
025d57f0
MS
2273 if (TREE_CODE (len) != INTEGER_CST || !p)
2274 return false;
2275
2276 unsigned srclen = strlen (p);
2277
2278 int cmpsrc = compare_tree_int (len, srclen);
2279
2280 /* Return early if the requested len is less than the string length.
2281 Warnings will be issued elsewhere later. */
2282 if (cmpsrc < 0)
2283 return false;
2284
2285 unsigned HOST_WIDE_INT dstsize;
2286
2287 bool nowarn = gimple_no_warning_p (stmt);
2288
2289 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2290 {
025d57f0 2291 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2292
025d57f0
MS
2293 if (cmpdst >= 0)
2294 {
2295 tree fndecl = gimple_call_fndecl (stmt);
2296
2297 /* Strncat copies (at most) LEN bytes and always appends
2298 the terminating NUL so the specified bound should never
2299 be equal to (or greater than) the size of the destination.
2300 If it is, the copy could overflow. */
2301 location_t loc = gimple_location (stmt);
2302 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2303 cmpdst == 0
2304 ? G_("%G%qD specified bound %E equals "
2305 "destination size")
2306 : G_("%G%qD specified bound %E exceeds "
2307 "destination size %wu"),
2308 stmt, fndecl, len, dstsize);
2309 if (nowarn)
2310 gimple_set_no_warning (stmt, true);
2311 }
2312 }
ad03a744 2313
025d57f0
MS
2314 if (!nowarn && cmpsrc == 0)
2315 {
2316 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2317 location_t loc = gimple_location (stmt);
eec5f615
MS
2318
2319 /* To avoid possible overflow the specified bound should also
2320 not be equal to the length of the source, even when the size
2321 of the destination is unknown (it's not an uncommon mistake
2322 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2323 if (warning_at (loc, OPT_Wstringop_overflow_,
2324 "%G%qD specified bound %E equals source length",
2325 stmt, fndecl, len))
2326 gimple_set_no_warning (stmt, true);
ad03a744
RB
2327 }
2328
025d57f0
MS
2329 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2330
2331 /* If the replacement _DECL isn't initialized, don't do the
2332 transformation. */
2333 if (!fn)
2334 return false;
2335
2336 /* Otherwise, emit a call to strcat. */
2337 gcall *repl = gimple_build_call (fn, 2, dst, src);
2338 replace_call_with_call_and_fold (gsi, repl);
2339 return true;
ad03a744
RB
2340}
2341
745583f9
RB
2342/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2343 LEN, and SIZE. */
2344
2345static bool
2346gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2347{
355fe088 2348 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2349 tree dest = gimple_call_arg (stmt, 0);
2350 tree src = gimple_call_arg (stmt, 1);
2351 tree len = gimple_call_arg (stmt, 2);
2352 tree size = gimple_call_arg (stmt, 3);
2353 tree fn;
2354 const char *p;
2355
2356 p = c_getstr (src);
2357 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2358 if ((p && *p == '\0')
2359 || integer_zerop (len))
2360 {
2361 replace_call_with_value (gsi, dest);
2362 return true;
2363 }
2364
2365 if (! tree_fits_uhwi_p (size))
2366 return false;
2367
2368 if (! integer_all_onesp (size))
2369 {
2370 tree src_len = c_strlen (src, 1);
2371 if (src_len
2372 && tree_fits_uhwi_p (src_len)
2373 && tree_fits_uhwi_p (len)
2374 && ! tree_int_cst_lt (len, src_len))
2375 {
2376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2377 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2378 if (!fn)
2379 return false;
2380
355fe088 2381 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2382 replace_call_with_call_and_fold (gsi, repl);
2383 return true;
2384 }
2385 return false;
2386 }
2387
2388 /* If __builtin_strncat_chk is used, assume strncat is available. */
2389 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2390 if (!fn)
2391 return false;
2392
355fe088 2393 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2396}
2397
a918bfbf
ML
2398/* Build and append gimple statements to STMTS that would load a first
2399 character of a memory location identified by STR. LOC is location
2400 of the statement. */
2401
2402static tree
2403gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2404{
2405 tree var;
2406
2407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2408 tree cst_uchar_ptr_node
2409 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2410 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2411
2412 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2413 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2414 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2415
2416 gimple_assign_set_lhs (stmt, var);
2417 gimple_seq_add_stmt_without_update (stmts, stmt);
2418
2419 return var;
2420}
2421
d2f8402a 2422/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2423
2424static bool
2425gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2426{
2427 gimple *stmt = gsi_stmt (*gsi);
2428 tree callee = gimple_call_fndecl (stmt);
2429 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2430
2431 tree type = integer_type_node;
2432 tree str1 = gimple_call_arg (stmt, 0);
2433 tree str2 = gimple_call_arg (stmt, 1);
2434 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2435
2436 tree bound_node = NULL_TREE;
d2f8402a 2437 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2438
2439 /* Handle strncmp and strncasecmp functions. */
2440 if (gimple_call_num_args (stmt) == 3)
2441 {
d86d8b35
MS
2442 bound_node = gimple_call_arg (stmt, 2);
2443 if (tree_fits_uhwi_p (bound_node))
2444 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2445 }
2446
d86d8b35 2447 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2448 if (bound == 0)
a918bfbf
ML
2449 {
2450 replace_call_with_value (gsi, integer_zero_node);
2451 return true;
2452 }
2453
2454 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2455 if (operand_equal_p (str1, str2, 0))
2456 {
2457 replace_call_with_value (gsi, integer_zero_node);
2458 return true;
2459 }
2460
d2f8402a
MS
2461 /* Initially set to the number of characters, including the terminating
2462 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2463 the array Sx is not terminated by a nul.
2464 For nul-terminated strings then adjusted to their length so that
2465 LENx == NULPOSx holds. */
2466 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2467 const char *p1 = c_getstr (str1, &len1);
2468 const char *p2 = c_getstr (str2, &len2);
2469
2470 /* The position of the terminating nul character if one exists, otherwise
2471 a value greater than LENx. */
2472 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2473
2474 if (p1)
2475 {
2476 size_t n = strnlen (p1, len1);
2477 if (n < len1)
2478 len1 = nulpos1 = n;
2479 }
2480
2481 if (p2)
2482 {
2483 size_t n = strnlen (p2, len2);
2484 if (n < len2)
2485 len2 = nulpos2 = n;
2486 }
a918bfbf
ML
2487
2488 /* For known strings, return an immediate value. */
2489 if (p1 && p2)
2490 {
2491 int r = 0;
2492 bool known_result = false;
2493
2494 switch (fcode)
2495 {
2496 case BUILT_IN_STRCMP:
8b0b334a 2497 case BUILT_IN_STRCMP_EQ:
d2f8402a 2498 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2499 break;
d2f8402a
MS
2500
2501 r = strcmp (p1, p2);
2502 known_result = true;
2503 break;
2504
a918bfbf 2505 case BUILT_IN_STRNCMP:
8b0b334a 2506 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2507 {
d86d8b35
MS
2508 if (bound == HOST_WIDE_INT_M1U)
2509 break;
2510
d2f8402a
MS
2511 /* Reduce the bound to be no more than the length
2512 of the shorter of the two strings, or the sizes
2513 of the unterminated arrays. */
2514 unsigned HOST_WIDE_INT n = bound;
2515
2516 if (len1 == nulpos1 && len1 < n)
2517 n = len1 + 1;
2518 if (len2 == nulpos2 && len2 < n)
2519 n = len2 + 1;
2520
2521 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2522 break;
d2f8402a
MS
2523
2524 r = strncmp (p1, p2, n);
a918bfbf
ML
2525 known_result = true;
2526 break;
2527 }
2528 /* Only handleable situation is where the string are equal (result 0),
2529 which is already handled by operand_equal_p case. */
2530 case BUILT_IN_STRCASECMP:
2531 break;
2532 case BUILT_IN_STRNCASECMP:
2533 {
d2f8402a 2534 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2535 break;
d2f8402a 2536 r = strncmp (p1, p2, bound);
a918bfbf
ML
2537 if (r == 0)
2538 known_result = true;
5de73c05 2539 break;
a918bfbf
ML
2540 }
2541 default:
2542 gcc_unreachable ();
2543 }
2544
2545 if (known_result)
2546 {
2547 replace_call_with_value (gsi, build_cmp_result (type, r));
2548 return true;
2549 }
2550 }
2551
d2f8402a 2552 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2553 || fcode == BUILT_IN_STRCMP
8b0b334a 2554 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2555 || fcode == BUILT_IN_STRCASECMP;
2556
2557 location_t loc = gimple_location (stmt);
2558
2559 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2560 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2561 {
2562 gimple_seq stmts = NULL;
2563 tree var = gimple_load_first_char (loc, str1, &stmts);
2564 if (lhs)
2565 {
2566 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2567 gimple_seq_add_stmt_without_update (&stmts, stmt);
2568 }
2569
2570 gsi_replace_with_seq_vops (gsi, stmts);
2571 return true;
2572 }
2573
2574 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2575 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2576 {
2577 gimple_seq stmts = NULL;
2578 tree var = gimple_load_first_char (loc, str2, &stmts);
2579
2580 if (lhs)
2581 {
2582 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2583 stmt = gimple_build_assign (c, NOP_EXPR, var);
2584 gimple_seq_add_stmt_without_update (&stmts, stmt);
2585
2586 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2587 gimple_seq_add_stmt_without_update (&stmts, stmt);
2588 }
2589
2590 gsi_replace_with_seq_vops (gsi, stmts);
2591 return true;
2592 }
2593
d2f8402a 2594 /* If BOUND is one, return an expression corresponding to
a918bfbf 2595 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2596 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2597 {
2598 gimple_seq stmts = NULL;
2599 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2600 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2601
2602 if (lhs)
2603 {
2604 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2605 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2606 gimple_seq_add_stmt_without_update (&stmts, convert1);
2607
2608 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2609 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2610 gimple_seq_add_stmt_without_update (&stmts, convert2);
2611
2612 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2613 gimple_seq_add_stmt_without_update (&stmts, stmt);
2614 }
2615
2616 gsi_replace_with_seq_vops (gsi, stmts);
2617 return true;
2618 }
2619
d2f8402a
MS
2620 /* If BOUND is greater than the length of one constant string,
2621 and the other argument is also a nul-terminated string, replace
2622 strncmp with strcmp. */
2623 if (fcode == BUILT_IN_STRNCMP
2624 && bound > 0 && bound < HOST_WIDE_INT_M1U
2625 && ((p2 && len2 < bound && len2 == nulpos2)
2626 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2627 {
2628 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2629 if (!fn)
2630 return false;
2631 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2632 replace_call_with_call_and_fold (gsi, repl);
2633 return true;
2634 }
2635
a918bfbf
ML
2636 return false;
2637}
2638
488c6247
ML
2639/* Fold a call to the memchr pointed by GSI iterator. */
2640
2641static bool
2642gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2643{
2644 gimple *stmt = gsi_stmt (*gsi);
2645 tree lhs = gimple_call_lhs (stmt);
2646 tree arg1 = gimple_call_arg (stmt, 0);
2647 tree arg2 = gimple_call_arg (stmt, 1);
2648 tree len = gimple_call_arg (stmt, 2);
2649
2650 /* If the LEN parameter is zero, return zero. */
2651 if (integer_zerop (len))
2652 {
2653 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2654 return true;
2655 }
2656
2657 char c;
2658 if (TREE_CODE (arg2) != INTEGER_CST
2659 || !tree_fits_uhwi_p (len)
2660 || !target_char_cst_p (arg2, &c))
2661 return false;
2662
2663 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2664 unsigned HOST_WIDE_INT string_length;
2665 const char *p1 = c_getstr (arg1, &string_length);
2666
2667 if (p1)
2668 {
2669 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2670 if (r == NULL)
2671 {
5fd336bb
JM
2672 tree mem_size, offset_node;
2673 string_constant (arg1, &offset_node, &mem_size, NULL);
2674 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2675 ? 0 : tree_to_uhwi (offset_node);
2676 /* MEM_SIZE is the size of the array the string literal
2677 is stored in. */
2678 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2679 gcc_checking_assert (string_length <= string_size);
2680 if (length <= string_size)
488c6247
ML
2681 {
2682 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2683 return true;
2684 }
2685 }
2686 else
2687 {
2688 unsigned HOST_WIDE_INT offset = r - p1;
2689 gimple_seq stmts = NULL;
2690 if (lhs != NULL_TREE)
2691 {
2692 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2693 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2694 arg1, offset_cst);
2695 gimple_seq_add_stmt_without_update (&stmts, stmt);
2696 }
2697 else
2698 gimple_seq_add_stmt_without_update (&stmts,
2699 gimple_build_nop ());
2700
2701 gsi_replace_with_seq_vops (gsi, stmts);
2702 return true;
2703 }
2704 }
2705
2706 return false;
2707}
a918bfbf 2708
fef5a0d9
RB
2709/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2710 to the call. IGNORE is true if the value returned
2711 by the builtin will be ignored. UNLOCKED is true is true if this
2712 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2713 the known length of the string. Return NULL_TREE if no simplification
2714 was possible. */
2715
2716static bool
2717gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2718 tree arg0, tree arg1,
dcb7fae2 2719 bool unlocked)
fef5a0d9 2720{
355fe088 2721 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2722
fef5a0d9
RB
2723 /* If we're using an unlocked function, assume the other unlocked
2724 functions exist explicitly. */
2725 tree const fn_fputc = (unlocked
2726 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2727 : builtin_decl_implicit (BUILT_IN_FPUTC));
2728 tree const fn_fwrite = (unlocked
2729 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2730 : builtin_decl_implicit (BUILT_IN_FWRITE));
2731
2732 /* If the return value is used, don't do the transformation. */
dcb7fae2 2733 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2734 return false;
2735
fef5a0d9
RB
2736 /* Get the length of the string passed to fputs. If the length
2737 can't be determined, punt. */
598f7235 2738 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2739 if (!len
2740 || TREE_CODE (len) != INTEGER_CST)
2741 return false;
2742
2743 switch (compare_tree_int (len, 1))
2744 {
2745 case -1: /* length is 0, delete the call entirely . */
2746 replace_call_with_value (gsi, integer_zero_node);
2747 return true;
2748
2749 case 0: /* length is 1, call fputc. */
2750 {
2751 const char *p = c_getstr (arg0);
2752 if (p != NULL)
2753 {
2754 if (!fn_fputc)
2755 return false;
2756
355fe088 2757 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2758 build_int_cst
2759 (integer_type_node, p[0]), arg1);
2760 replace_call_with_call_and_fold (gsi, repl);
2761 return true;
2762 }
2763 }
2764 /* FALLTHROUGH */
2765 case 1: /* length is greater than 1, call fwrite. */
2766 {
2767 /* If optimizing for size keep fputs. */
2768 if (optimize_function_for_size_p (cfun))
2769 return false;
2770 /* New argument list transforming fputs(string, stream) to
2771 fwrite(string, 1, len, stream). */
2772 if (!fn_fwrite)
2773 return false;
2774
355fe088 2775 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2776 size_one_node, len, arg1);
2777 replace_call_with_call_and_fold (gsi, repl);
2778 return true;
2779 }
2780 default:
2781 gcc_unreachable ();
2782 }
2783 return false;
2784}
2785
2786/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2787 DEST, SRC, LEN, and SIZE are the arguments to the call.
2788 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2789 code of the builtin. If MAXLEN is not NULL, it is maximum length
2790 passed as third argument. */
2791
2792static bool
2793gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2794 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2795 enum built_in_function fcode)
2796{
355fe088 2797 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2798 location_t loc = gimple_location (stmt);
2799 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2800 tree fn;
2801
2802 /* If SRC and DEST are the same (and not volatile), return DEST
2803 (resp. DEST+LEN for __mempcpy_chk). */
2804 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2805 {
2806 if (fcode != BUILT_IN_MEMPCPY_CHK)
2807 {
2808 replace_call_with_value (gsi, dest);
2809 return true;
2810 }
2811 else
2812 {
74e3c262
RB
2813 gimple_seq stmts = NULL;
2814 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2815 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2816 TREE_TYPE (dest), dest, len);
74e3c262 2817 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2818 replace_call_with_value (gsi, temp);
2819 return true;
2820 }
2821 }
2822
2823 if (! tree_fits_uhwi_p (size))
2824 return false;
2825
598f7235 2826 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
2827 if (! integer_all_onesp (size))
2828 {
2829 if (! tree_fits_uhwi_p (len))
2830 {
2831 /* If LEN is not constant, try MAXLEN too.
2832 For MAXLEN only allow optimizing into non-_ocs function
2833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2834 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2835 {
2836 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2837 {
2838 /* (void) __mempcpy_chk () can be optimized into
2839 (void) __memcpy_chk (). */
2840 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2841 if (!fn)
2842 return false;
2843
355fe088 2844 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2845 replace_call_with_call_and_fold (gsi, repl);
2846 return true;
2847 }
2848 return false;
2849 }
2850 }
2851 else
2852 maxlen = len;
2853
2854 if (tree_int_cst_lt (size, maxlen))
2855 return false;
2856 }
2857
2858 fn = NULL_TREE;
2859 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2860 mem{cpy,pcpy,move,set} is available. */
2861 switch (fcode)
2862 {
2863 case BUILT_IN_MEMCPY_CHK:
2864 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2865 break;
2866 case BUILT_IN_MEMPCPY_CHK:
2867 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2868 break;
2869 case BUILT_IN_MEMMOVE_CHK:
2870 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2871 break;
2872 case BUILT_IN_MEMSET_CHK:
2873 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2874 break;
2875 default:
2876 break;
2877 }
2878
2879 if (!fn)
2880 return false;
2881
355fe088 2882 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2883 replace_call_with_call_and_fold (gsi, repl);
2884 return true;
2885}
2886
2887/* Fold a call to the __st[rp]cpy_chk builtin.
2888 DEST, SRC, and SIZE are the arguments to the call.
2889 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2890 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2891 strings passed as second argument. */
2892
2893static bool
2894gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2895 tree dest,
fef5a0d9 2896 tree src, tree size,
fef5a0d9
RB
2897 enum built_in_function fcode)
2898{
355fe088 2899 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2900 location_t loc = gimple_location (stmt);
2901 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2902 tree len, fn;
2903
2904 /* If SRC and DEST are the same (and not volatile), return DEST. */
2905 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2906 {
8cd95cec
MS
2907 /* Issue -Wrestrict unless the pointers are null (those do
2908 not point to objects and so do not indicate an overlap;
2909 such calls could be the result of sanitization and jump
2910 threading). */
2911 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2912 {
2913 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2914
e9b9fa4c
MS
2915 warning_at (loc, OPT_Wrestrict,
2916 "%qD source argument is the same as destination",
2917 func);
2918 }
cc8bea0a 2919
fef5a0d9
RB
2920 replace_call_with_value (gsi, dest);
2921 return true;
2922 }
2923
2924 if (! tree_fits_uhwi_p (size))
2925 return false;
2926
598f7235 2927 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
2928 if (! integer_all_onesp (size))
2929 {
2930 len = c_strlen (src, 1);
2931 if (! len || ! tree_fits_uhwi_p (len))
2932 {
2933 /* If LEN is not constant, try MAXLEN too.
2934 For MAXLEN only allow optimizing into non-_ocs function
2935 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2936 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2937 {
2938 if (fcode == BUILT_IN_STPCPY_CHK)
2939 {
2940 if (! ignore)
2941 return false;
2942
2943 /* If return value of __stpcpy_chk is ignored,
2944 optimize into __strcpy_chk. */
2945 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2946 if (!fn)
2947 return false;
2948
355fe088 2949 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2950 replace_call_with_call_and_fold (gsi, repl);
2951 return true;
2952 }
2953
2954 if (! len || TREE_SIDE_EFFECTS (len))
2955 return false;
2956
2957 /* If c_strlen returned something, but not a constant,
2958 transform __strcpy_chk into __memcpy_chk. */
2959 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2960 if (!fn)
2961 return false;
2962
74e3c262 2963 gimple_seq stmts = NULL;
770fe3a3 2964 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
2965 len = gimple_convert (&stmts, loc, size_type_node, len);
2966 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2967 build_int_cst (size_type_node, 1));
2968 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2969 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2970 replace_call_with_call_and_fold (gsi, repl);
2971 return true;
2972 }
e256dfce 2973 }
fef5a0d9
RB
2974 else
2975 maxlen = len;
2976
2977 if (! tree_int_cst_lt (maxlen, size))
2978 return false;
e256dfce
RG
2979 }
2980
fef5a0d9
RB
2981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2982 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2983 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2984 if (!fn)
2985 return false;
2986
355fe088 2987 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2988 replace_call_with_call_and_fold (gsi, repl);
2989 return true;
2990}
2991
2992/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2993 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2994 length passed as third argument. IGNORE is true if return value can be
2995 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2996
2997static bool
2998gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2999 tree dest, tree src,
dcb7fae2 3000 tree len, tree size,
fef5a0d9
RB
3001 enum built_in_function fcode)
3002{
355fe088 3003 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3004 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3005 tree fn;
3006
3007 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 3008 {
fef5a0d9
RB
3009 /* If return value of __stpncpy_chk is ignored,
3010 optimize into __strncpy_chk. */
3011 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3012 if (fn)
3013 {
355fe088 3014 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3015 replace_call_with_call_and_fold (gsi, repl);
3016 return true;
3017 }
cbdd87d4
RG
3018 }
3019
fef5a0d9
RB
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3022
598f7235 3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3024 if (! integer_all_onesp (size))
cbdd87d4 3025 {
fef5a0d9 3026 if (! tree_fits_uhwi_p (len))
fe2ef088 3027 {
fef5a0d9
RB
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 return false;
8a1561bc 3033 }
fef5a0d9
RB
3034 else
3035 maxlen = len;
3036
3037 if (tree_int_cst_lt (size, maxlen))
3038 return false;
cbdd87d4
RG
3039 }
3040
fef5a0d9
RB
3041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3042 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3043 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3044 if (!fn)
3045 return false;
3046
355fe088 3047 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3048 replace_call_with_call_and_fold (gsi, repl);
3049 return true;
cbdd87d4
RG
3050}
3051
2625bb5d
RB
3052/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3053 Return NULL_TREE if no simplification can be made. */
3054
3055static bool
3056gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3057{
3058 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3059 location_t loc = gimple_location (stmt);
3060 tree dest = gimple_call_arg (stmt, 0);
3061 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3062 tree fn, lenp1;
2625bb5d
RB
3063
3064 /* If the result is unused, replace stpcpy with strcpy. */
3065 if (gimple_call_lhs (stmt) == NULL_TREE)
3066 {
3067 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3068 if (!fn)
3069 return false;
3070 gimple_call_set_fndecl (stmt, fn);
3071 fold_stmt (gsi);
3072 return true;
3073 }
3074
01b0acb7 3075 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3076 c_strlen_data data = { };
7d583f42 3077 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3078 if (!len
3079 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3080 {
7d583f42
JL
3081 data.decl = unterminated_array (src);
3082 if (!data.decl)
01b0acb7
MS
3083 return false;
3084 }
3085
7d583f42 3086 if (data.decl)
01b0acb7
MS
3087 {
3088 /* Avoid folding calls with unterminated arrays. */
3089 if (!gimple_no_warning_p (stmt))
7d583f42 3090 warn_string_no_nul (loc, "stpcpy", src, data.decl);
01b0acb7
MS
3091 gimple_set_no_warning (stmt, true);
3092 return false;
3093 }
2625bb5d
RB
3094
3095 if (optimize_function_for_size_p (cfun)
3096 /* If length is zero it's small enough. */
3097 && !integer_zerop (len))
3098 return false;
3099
3100 /* If the source has a known length replace stpcpy with memcpy. */
3101 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3102 if (!fn)
3103 return false;
3104
3105 gimple_seq stmts = NULL;
3106 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3107 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3108 tem, build_int_cst (size_type_node, 1));
3109 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3110 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3111 gimple_move_vops (repl, stmt);
2625bb5d
RB
3112 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3113 /* Replace the result with dest + len. */
3114 stmts = NULL;
3115 tem = gimple_convert (&stmts, loc, sizetype, len);
3116 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3117 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3118 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3119 gsi_replace (gsi, ret, false);
2625bb5d
RB
3120 /* Finally fold the memcpy call. */
3121 gimple_stmt_iterator gsi2 = *gsi;
3122 gsi_prev (&gsi2);
3123 fold_stmt (&gsi2);
3124 return true;
3125}
3126
fef5a0d9
RB
3127/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3128 NULL_TREE if a normal call should be emitted rather than expanding
3129 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3130 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3131 passed as second argument. */
cbdd87d4
RG
3132
3133static bool
fef5a0d9 3134gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3135 enum built_in_function fcode)
cbdd87d4 3136{
538dd0b7 3137 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3138 tree dest, size, len, fn, fmt, flag;
3139 const char *fmt_str;
cbdd87d4 3140
fef5a0d9
RB
3141 /* Verify the required arguments in the original call. */
3142 if (gimple_call_num_args (stmt) < 5)
3143 return false;
cbdd87d4 3144
fef5a0d9
RB
3145 dest = gimple_call_arg (stmt, 0);
3146 len = gimple_call_arg (stmt, 1);
3147 flag = gimple_call_arg (stmt, 2);
3148 size = gimple_call_arg (stmt, 3);
3149 fmt = gimple_call_arg (stmt, 4);
3150
3151 if (! tree_fits_uhwi_p (size))
3152 return false;
3153
3154 if (! integer_all_onesp (size))
3155 {
598f7235 3156 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3157 if (! tree_fits_uhwi_p (len))
cbdd87d4 3158 {
fef5a0d9
RB
3159 /* If LEN is not constant, try MAXLEN too.
3160 For MAXLEN only allow optimizing into non-_ocs function
3161 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3162 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3163 return false;
3164 }
3165 else
fef5a0d9 3166 maxlen = len;
cbdd87d4 3167
fef5a0d9
RB
3168 if (tree_int_cst_lt (size, maxlen))
3169 return false;
3170 }
cbdd87d4 3171
fef5a0d9
RB
3172 if (!init_target_chars ())
3173 return false;
cbdd87d4 3174
fef5a0d9
RB
3175 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3176 or if format doesn't contain % chars or is "%s". */
3177 if (! integer_zerop (flag))
3178 {
3179 fmt_str = c_getstr (fmt);
3180 if (fmt_str == NULL)
3181 return false;
3182 if (strchr (fmt_str, target_percent) != NULL
3183 && strcmp (fmt_str, target_percent_s))
3184 return false;
cbdd87d4
RG
3185 }
3186
fef5a0d9
RB
3187 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3188 available. */
3189 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3190 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3191 if (!fn)
491e0b9b
RG
3192 return false;
3193
fef5a0d9
RB
3194 /* Replace the called function and the first 5 argument by 3 retaining
3195 trailing varargs. */
3196 gimple_call_set_fndecl (stmt, fn);
3197 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3198 gimple_call_set_arg (stmt, 0, dest);
3199 gimple_call_set_arg (stmt, 1, len);
3200 gimple_call_set_arg (stmt, 2, fmt);
3201 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3202 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3203 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3204 fold_stmt (gsi);
3205 return true;
3206}
cbdd87d4 3207
fef5a0d9
RB
3208/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3209 Return NULL_TREE if a normal call should be emitted rather than
3210 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3211 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3212
fef5a0d9
RB
3213static bool
3214gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3215 enum built_in_function fcode)
3216{
538dd0b7 3217 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3218 tree dest, size, len, fn, fmt, flag;
3219 const char *fmt_str;
3220 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3221
fef5a0d9
RB
3222 /* Verify the required arguments in the original call. */
3223 if (nargs < 4)
3224 return false;
3225 dest = gimple_call_arg (stmt, 0);
3226 flag = gimple_call_arg (stmt, 1);
3227 size = gimple_call_arg (stmt, 2);
3228 fmt = gimple_call_arg (stmt, 3);
3229
3230 if (! tree_fits_uhwi_p (size))
3231 return false;
3232
3233 len = NULL_TREE;
3234
3235 if (!init_target_chars ())
3236 return false;
3237
3238 /* Check whether the format is a literal string constant. */
3239 fmt_str = c_getstr (fmt);
3240 if (fmt_str != NULL)
3241 {
3242 /* If the format doesn't contain % args or %%, we know the size. */
3243 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3244 {
fef5a0d9
RB
3245 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3246 len = build_int_cstu (size_type_node, strlen (fmt_str));
3247 }
3248 /* If the format is "%s" and first ... argument is a string literal,
3249 we know the size too. */
3250 else if (fcode == BUILT_IN_SPRINTF_CHK
3251 && strcmp (fmt_str, target_percent_s) == 0)
3252 {
3253 tree arg;
cbdd87d4 3254
fef5a0d9
RB
3255 if (nargs == 5)
3256 {
3257 arg = gimple_call_arg (stmt, 4);
3258 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3259 {
3260 len = c_strlen (arg, 1);
3261 if (! len || ! tree_fits_uhwi_p (len))
3262 len = NULL_TREE;
3263 }
3264 }
3265 }
3266 }
cbdd87d4 3267
fef5a0d9
RB
3268 if (! integer_all_onesp (size))
3269 {
3270 if (! len || ! tree_int_cst_lt (len, size))
3271 return false;
3272 }
cbdd87d4 3273
fef5a0d9
RB
3274 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3275 or if format doesn't contain % chars or is "%s". */
3276 if (! integer_zerop (flag))
3277 {
3278 if (fmt_str == NULL)
3279 return false;
3280 if (strchr (fmt_str, target_percent) != NULL
3281 && strcmp (fmt_str, target_percent_s))
3282 return false;
3283 }
cbdd87d4 3284
fef5a0d9
RB
3285 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3286 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3287 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3288 if (!fn)
3289 return false;
3290
3291 /* Replace the called function and the first 4 argument by 2 retaining
3292 trailing varargs. */
3293 gimple_call_set_fndecl (stmt, fn);
3294 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3295 gimple_call_set_arg (stmt, 0, dest);
3296 gimple_call_set_arg (stmt, 1, fmt);
3297 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3298 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3299 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3300 fold_stmt (gsi);
3301 return true;
3302}
3303
35770bb2
RB
3304/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3305 ORIG may be null if this is a 2-argument call. We don't attempt to
3306 simplify calls with more than 3 arguments.
3307
a104bd88 3308 Return true if simplification was possible, otherwise false. */
35770bb2 3309
a104bd88 3310bool
dcb7fae2 3311gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3312{
355fe088 3313 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3314 tree dest = gimple_call_arg (stmt, 0);
3315 tree fmt = gimple_call_arg (stmt, 1);
3316 tree orig = NULL_TREE;
3317 const char *fmt_str = NULL;
3318
3319 /* Verify the required arguments in the original call. We deal with two
3320 types of sprintf() calls: 'sprintf (str, fmt)' and
3321 'sprintf (dest, "%s", orig)'. */
3322 if (gimple_call_num_args (stmt) > 3)
3323 return false;
3324
3325 if (gimple_call_num_args (stmt) == 3)
3326 orig = gimple_call_arg (stmt, 2);
3327
3328 /* Check whether the format is a literal string constant. */
3329 fmt_str = c_getstr (fmt);
3330 if (fmt_str == NULL)
3331 return false;
3332
3333 if (!init_target_chars ())
3334 return false;
3335
3336 /* If the format doesn't contain % args or %%, use strcpy. */
3337 if (strchr (fmt_str, target_percent) == NULL)
3338 {
3339 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3340
3341 if (!fn)
3342 return false;
3343
3344 /* Don't optimize sprintf (buf, "abc", ptr++). */
3345 if (orig)
3346 return false;
3347
3348 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3349 'format' is known to contain no % formats. */
3350 gimple_seq stmts = NULL;
355fe088 3351 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3352
3353 /* Propagate the NO_WARNING bit to avoid issuing the same
3354 warning more than once. */
3355 if (gimple_no_warning_p (stmt))
3356 gimple_set_no_warning (repl, true);
3357
35770bb2 3358 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3359 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3360 {
a73468e8
JJ
3361 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3362 strlen (fmt_str)));
35770bb2
RB
3363 gimple_seq_add_stmt_without_update (&stmts, repl);
3364 gsi_replace_with_seq_vops (gsi, stmts);
3365 /* gsi now points at the assignment to the lhs, get a
3366 stmt iterator to the memcpy call.
3367 ??? We can't use gsi_for_stmt as that doesn't work when the
3368 CFG isn't built yet. */
3369 gimple_stmt_iterator gsi2 = *gsi;
3370 gsi_prev (&gsi2);
3371 fold_stmt (&gsi2);
3372 }
3373 else
3374 {
3375 gsi_replace_with_seq_vops (gsi, stmts);
3376 fold_stmt (gsi);
3377 }
3378 return true;
3379 }
3380
3381 /* If the format is "%s", use strcpy if the result isn't used. */
3382 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3383 {
3384 tree fn;
3385 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3386
3387 if (!fn)
3388 return false;
3389
3390 /* Don't crash on sprintf (str1, "%s"). */
3391 if (!orig)
3392 return false;
3393
dcb7fae2
RB
3394 tree orig_len = NULL_TREE;
3395 if (gimple_call_lhs (stmt))
35770bb2 3396 {
598f7235 3397 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3398 if (!orig_len)
35770bb2
RB
3399 return false;
3400 }
3401
3402 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3403 gimple_seq stmts = NULL;
355fe088 3404 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3405
3406 /* Propagate the NO_WARNING bit to avoid issuing the same
3407 warning more than once. */
3408 if (gimple_no_warning_p (stmt))
3409 gimple_set_no_warning (repl, true);
3410
35770bb2 3411 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3412 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3413 {
a73468e8 3414 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3415 TREE_TYPE (orig_len)))
a73468e8
JJ
3416 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3417 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3418 gimple_seq_add_stmt_without_update (&stmts, repl);
3419 gsi_replace_with_seq_vops (gsi, stmts);
3420 /* gsi now points at the assignment to the lhs, get a
3421 stmt iterator to the memcpy call.
3422 ??? We can't use gsi_for_stmt as that doesn't work when the
3423 CFG isn't built yet. */
3424 gimple_stmt_iterator gsi2 = *gsi;
3425 gsi_prev (&gsi2);
3426 fold_stmt (&gsi2);
3427 }
3428 else
3429 {
3430 gsi_replace_with_seq_vops (gsi, stmts);
3431 fold_stmt (gsi);
3432 }
3433 return true;
3434 }
3435 return false;
3436}
3437
d7e78447
RB
3438/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3439 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3440 attempt to simplify calls with more than 4 arguments.
35770bb2 3441
a104bd88 3442 Return true if simplification was possible, otherwise false. */
d7e78447 3443
a104bd88 3444bool
dcb7fae2 3445gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3446{
538dd0b7 3447 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3448 tree dest = gimple_call_arg (stmt, 0);
3449 tree destsize = gimple_call_arg (stmt, 1);
3450 tree fmt = gimple_call_arg (stmt, 2);
3451 tree orig = NULL_TREE;
3452 const char *fmt_str = NULL;
3453
3454 if (gimple_call_num_args (stmt) > 4)
3455 return false;
3456
3457 if (gimple_call_num_args (stmt) == 4)
3458 orig = gimple_call_arg (stmt, 3);
3459
3460 if (!tree_fits_uhwi_p (destsize))
3461 return false;
3462 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3463
3464 /* Check whether the format is a literal string constant. */
3465 fmt_str = c_getstr (fmt);
3466 if (fmt_str == NULL)
3467 return false;
3468
3469 if (!init_target_chars ())
3470 return false;
3471
3472 /* If the format doesn't contain % args or %%, use strcpy. */
3473 if (strchr (fmt_str, target_percent) == NULL)
3474 {
3475 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3476 if (!fn)
3477 return false;
3478
3479 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3480 if (orig)
3481 return false;
3482
3483 /* We could expand this as
3484 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3485 or to
3486 memcpy (str, fmt_with_nul_at_cstm1, cst);
3487 but in the former case that might increase code size
3488 and in the latter case grow .rodata section too much.
3489 So punt for now. */
3490 size_t len = strlen (fmt_str);
3491 if (len >= destlen)
3492 return false;
3493
3494 gimple_seq stmts = NULL;
355fe088 3495 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3496 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3497 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3498 {
a73468e8
JJ
3499 repl = gimple_build_assign (lhs,
3500 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3501 gimple_seq_add_stmt_without_update (&stmts, repl);
3502 gsi_replace_with_seq_vops (gsi, stmts);
3503 /* gsi now points at the assignment to the lhs, get a
3504 stmt iterator to the memcpy call.
3505 ??? We can't use gsi_for_stmt as that doesn't work when the
3506 CFG isn't built yet. */
3507 gimple_stmt_iterator gsi2 = *gsi;
3508 gsi_prev (&gsi2);
3509 fold_stmt (&gsi2);
3510 }
3511 else
3512 {
3513 gsi_replace_with_seq_vops (gsi, stmts);
3514 fold_stmt (gsi);
3515 }
3516 return true;
3517 }
3518
3519 /* If the format is "%s", use strcpy if the result isn't used. */
3520 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3521 {
3522 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3523 if (!fn)
3524 return false;
3525
3526 /* Don't crash on snprintf (str1, cst, "%s"). */
3527 if (!orig)
3528 return false;
3529
598f7235 3530 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3531 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3532 return false;
d7e78447
RB
3533
3534 /* We could expand this as
3535 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3536 or to
3537 memcpy (str1, str2_with_nul_at_cstm1, cst);
3538 but in the former case that might increase code size
3539 and in the latter case grow .rodata section too much.
3540 So punt for now. */
3541 if (compare_tree_int (orig_len, destlen) >= 0)
3542 return false;
3543
3544 /* Convert snprintf (str1, cst, "%s", str2) into
3545 strcpy (str1, str2) if strlen (str2) < cst. */
3546 gimple_seq stmts = NULL;
355fe088 3547 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3548 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3549 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3550 {
a73468e8 3551 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3552 TREE_TYPE (orig_len)))
a73468e8
JJ
3553 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3554 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3555 gimple_seq_add_stmt_without_update (&stmts, repl);
3556 gsi_replace_with_seq_vops (gsi, stmts);
3557 /* gsi now points at the assignment to the lhs, get a
3558 stmt iterator to the memcpy call.
3559 ??? We can't use gsi_for_stmt as that doesn't work when the
3560 CFG isn't built yet. */
3561 gimple_stmt_iterator gsi2 = *gsi;
3562 gsi_prev (&gsi2);
3563 fold_stmt (&gsi2);
3564 }
3565 else
3566 {
3567 gsi_replace_with_seq_vops (gsi, stmts);
3568 fold_stmt (gsi);
3569 }
3570 return true;
3571 }
3572 return false;
3573}
35770bb2 3574
edd7ae68
RB
3575/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3576 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3577 more than 3 arguments, and ARG may be null in the 2-argument case.
3578
3579 Return NULL_TREE if no simplification was possible, otherwise return the
3580 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3581 code of the function to be simplified. */
3582
3583static bool
3584gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3585 tree fp, tree fmt, tree arg,
3586 enum built_in_function fcode)
3587{
3588 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3589 tree fn_fputc, fn_fputs;
3590 const char *fmt_str = NULL;
3591
3592 /* If the return value is used, don't do the transformation. */
3593 if (gimple_call_lhs (stmt) != NULL_TREE)
3594 return false;
3595
3596 /* Check whether the format is a literal string constant. */
3597 fmt_str = c_getstr (fmt);
3598 if (fmt_str == NULL)
3599 return false;
3600
3601 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3602 {
3603 /* If we're using an unlocked function, assume the other
3604 unlocked functions exist explicitly. */
3605 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3606 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3607 }
3608 else
3609 {
3610 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3611 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3612 }
3613
3614 if (!init_target_chars ())
3615 return false;
3616
3617 /* If the format doesn't contain % args or %%, use strcpy. */
3618 if (strchr (fmt_str, target_percent) == NULL)
3619 {
3620 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3621 && arg)
3622 return false;
3623
3624 /* If the format specifier was "", fprintf does nothing. */
3625 if (fmt_str[0] == '\0')
3626 {
3627 replace_call_with_value (gsi, NULL_TREE);
3628 return true;
3629 }
3630
3631 /* When "string" doesn't contain %, replace all cases of
3632 fprintf (fp, string) with fputs (string, fp). The fputs
3633 builtin will take care of special cases like length == 1. */
3634 if (fn_fputs)
3635 {
3636 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3637 replace_call_with_call_and_fold (gsi, repl);
3638 return true;
3639 }
3640 }
3641
3642 /* The other optimizations can be done only on the non-va_list variants. */
3643 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3644 return false;
3645
3646 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3647 else if (strcmp (fmt_str, target_percent_s) == 0)
3648 {
3649 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3650 return false;
3651 if (fn_fputs)
3652 {
3653 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3654 replace_call_with_call_and_fold (gsi, repl);
3655 return true;
3656 }
3657 }
3658
3659 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3660 else if (strcmp (fmt_str, target_percent_c) == 0)
3661 {
3662 if (!arg
3663 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3664 return false;
3665 if (fn_fputc)
3666 {
3667 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3668 replace_call_with_call_and_fold (gsi, repl);
3669 return true;
3670 }
3671 }
3672
3673 return false;
3674}
3675
ad03a744
RB
3676/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3677 FMT and ARG are the arguments to the call; we don't fold cases with
3678 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3679
3680 Return NULL_TREE if no simplification was possible, otherwise return the
3681 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3682 code of the function to be simplified. */
3683
3684static bool
3685gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3686 tree arg, enum built_in_function fcode)
3687{
3688 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3689 tree fn_putchar, fn_puts, newarg;
3690 const char *fmt_str = NULL;
3691
3692 /* If the return value is used, don't do the transformation. */
3693 if (gimple_call_lhs (stmt) != NULL_TREE)
3694 return false;
3695
3696 /* Check whether the format is a literal string constant. */
3697 fmt_str = c_getstr (fmt);
3698 if (fmt_str == NULL)
3699 return false;
3700
3701 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3702 {
3703 /* If we're using an unlocked function, assume the other
3704 unlocked functions exist explicitly. */
3705 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3706 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3707 }
3708 else
3709 {
3710 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3711 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3712 }
3713
3714 if (!init_target_chars ())
3715 return false;
3716
3717 if (strcmp (fmt_str, target_percent_s) == 0
3718 || strchr (fmt_str, target_percent) == NULL)
3719 {
3720 const char *str;
3721
3722 if (strcmp (fmt_str, target_percent_s) == 0)
3723 {
3724 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3725 return false;
3726
3727 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3728 return false;
3729
3730 str = c_getstr (arg);
3731 if (str == NULL)
3732 return false;
3733 }
3734 else
3735 {
3736 /* The format specifier doesn't contain any '%' characters. */
3737 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3738 && arg)
3739 return false;
3740 str = fmt_str;
3741 }
3742
3743 /* If the string was "", printf does nothing. */
3744 if (str[0] == '\0')
3745 {
3746 replace_call_with_value (gsi, NULL_TREE);
3747 return true;
3748 }
3749
3750 /* If the string has length of 1, call putchar. */
3751 if (str[1] == '\0')
3752 {
3753 /* Given printf("c"), (where c is any one character,)
3754 convert "c"[0] to an int and pass that to the replacement
3755 function. */
3756 newarg = build_int_cst (integer_type_node, str[0]);
3757 if (fn_putchar)
3758 {
3759 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3760 replace_call_with_call_and_fold (gsi, repl);
3761 return true;
3762 }
3763 }
3764 else
3765 {
3766 /* If the string was "string\n", call puts("string"). */
3767 size_t len = strlen (str);
3768 if ((unsigned char)str[len - 1] == target_newline
3769 && (size_t) (int) len == len
3770 && (int) len > 0)
3771 {
3772 char *newstr;
ad03a744
RB
3773
3774 /* Create a NUL-terminated string that's one char shorter
3775 than the original, stripping off the trailing '\n'. */
a353fec4 3776 newstr = xstrdup (str);
ad03a744 3777 newstr[len - 1] = '\0';
a353fec4
BE
3778 newarg = build_string_literal (len, newstr);
3779 free (newstr);
ad03a744
RB
3780 if (fn_puts)
3781 {
3782 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3783 replace_call_with_call_and_fold (gsi, repl);
3784 return true;
3785 }
3786 }
3787 else
3788 /* We'd like to arrange to call fputs(string,stdout) here,
3789 but we need stdout and don't have a way to get it yet. */
3790 return false;
3791 }
3792 }
3793
3794 /* The other optimizations can be done only on the non-va_list variants. */
3795 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3796 return false;
3797
3798 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3799 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3800 {
3801 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3802 return false;
3803 if (fn_puts)
3804 {
3805 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3806 replace_call_with_call_and_fold (gsi, repl);
3807 return true;
3808 }
3809 }
3810
3811 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3812 else if (strcmp (fmt_str, target_percent_c) == 0)
3813 {
3814 if (!arg || ! useless_type_conversion_p (integer_type_node,
3815 TREE_TYPE (arg)))
3816 return false;
3817 if (fn_putchar)
3818 {
3819 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3820 replace_call_with_call_and_fold (gsi, repl);
3821 return true;
3822 }
3823 }
3824
3825 return false;
3826}
3827
edd7ae68 3828
fef5a0d9
RB
3829
3830/* Fold a call to __builtin_strlen with known length LEN. */
3831
3832static bool
dcb7fae2 3833gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3834{
355fe088 3835 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3836 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3837
3838 wide_int minlen;
3839 wide_int maxlen;
3840
5d6655eb 3841 c_strlen_data lendata = { };
03c4a945 3842 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
3843 && !lendata.decl
3844 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3845 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
3846 {
3847 /* The range of lengths refers to either a single constant
3848 string or to the longest and shortest constant string
3849 referenced by the argument of the strlen() call, or to
3850 the strings that can possibly be stored in the arrays
3851 the argument refers to. */
5d6655eb
MS
3852 minlen = wi::to_wide (lendata.minlen);
3853 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
3854 }
3855 else
3856 {
3857 unsigned prec = TYPE_PRECISION (sizetype);
3858
3859 minlen = wi::shwi (0, prec);
3860 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3861 }
3862
3863 if (minlen == maxlen)
3864 {
5d6655eb
MS
3865 /* Fold the strlen call to a constant. */
3866 tree type = TREE_TYPE (lendata.minlen);
3867 tree len = force_gimple_operand_gsi (gsi,
3868 wide_int_to_tree (type, minlen),
3869 true, NULL, true, GSI_SAME_STMT);
3870 replace_call_with_value (gsi, len);
c42d0aa0
MS
3871 return true;
3872 }
3873
d4bf6975 3874 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 3875 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 3876 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
3877
3878 return false;
cbdd87d4
RG
3879}
3880
48126138
NS
3881/* Fold a call to __builtin_acc_on_device. */
3882
3883static bool
3884gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3885{
3886 /* Defer folding until we know which compiler we're in. */
3887 if (symtab->state != EXPANSION)
3888 return false;
3889
3890 unsigned val_host = GOMP_DEVICE_HOST;
3891 unsigned val_dev = GOMP_DEVICE_NONE;
3892
3893#ifdef ACCEL_COMPILER
3894 val_host = GOMP_DEVICE_NOT_HOST;
3895 val_dev = ACCEL_COMPILER_acc_device;
3896#endif
3897
3898 location_t loc = gimple_location (gsi_stmt (*gsi));
3899
3900 tree host_eq = make_ssa_name (boolean_type_node);
3901 gimple *host_ass = gimple_build_assign
3902 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3903 gimple_set_location (host_ass, loc);
3904 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3905
3906 tree dev_eq = make_ssa_name (boolean_type_node);
3907 gimple *dev_ass = gimple_build_assign
3908 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3909 gimple_set_location (dev_ass, loc);
3910 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3911
3912 tree result = make_ssa_name (boolean_type_node);
3913 gimple *result_ass = gimple_build_assign
3914 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3915 gimple_set_location (result_ass, loc);
3916 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3917
3918 replace_call_with_value (gsi, result);
3919
3920 return true;
3921}
cbdd87d4 3922
fe75f732
PK
3923/* Fold realloc (0, n) -> malloc (n). */
3924
3925static bool
3926gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3927{
3928 gimple *stmt = gsi_stmt (*gsi);
3929 tree arg = gimple_call_arg (stmt, 0);
3930 tree size = gimple_call_arg (stmt, 1);
3931
3932 if (operand_equal_p (arg, null_pointer_node, 0))
3933 {
3934 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3935 if (fn_malloc)
3936 {
3937 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3938 replace_call_with_call_and_fold (gsi, repl);
3939 return true;
3940 }
3941 }
3942 return false;
3943}
3944
dcb7fae2
RB
3945/* Fold the non-target builtin at *GSI and return whether any simplification
3946 was made. */
cbdd87d4 3947
fef5a0d9 3948static bool
dcb7fae2 3949gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 3950{
538dd0b7 3951 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 3952 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 3953
dcb7fae2
RB
3954 /* Give up for always_inline inline builtins until they are
3955 inlined. */
3956 if (avoid_folding_inline_builtin (callee))
3957 return false;
cbdd87d4 3958
edd7ae68
RB
3959 unsigned n = gimple_call_num_args (stmt);
3960 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3961 switch (fcode)
cbdd87d4 3962 {
b3d8d88e
MS
3963 case BUILT_IN_BCMP:
3964 return gimple_fold_builtin_bcmp (gsi);
3965 case BUILT_IN_BCOPY:
3966 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 3967 case BUILT_IN_BZERO:
b3d8d88e
MS
3968 return gimple_fold_builtin_bzero (gsi);
3969
dcb7fae2
RB
3970 case BUILT_IN_MEMSET:
3971 return gimple_fold_builtin_memset (gsi,
3972 gimple_call_arg (stmt, 1),
3973 gimple_call_arg (stmt, 2));
dcb7fae2 3974 case BUILT_IN_MEMCPY:
dcb7fae2 3975 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
3976 case BUILT_IN_MEMMOVE:
3977 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 3978 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
3979 case BUILT_IN_SPRINTF_CHK:
3980 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 3981 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
3982 case BUILT_IN_STRCAT_CHK:
3983 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
3984 case BUILT_IN_STRNCAT_CHK:
3985 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 3986 case BUILT_IN_STRLEN:
dcb7fae2 3987 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 3988 case BUILT_IN_STRCPY:
dcb7fae2 3989 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 3990 gimple_call_arg (stmt, 0),
dcb7fae2 3991 gimple_call_arg (stmt, 1));
cbdd87d4 3992 case BUILT_IN_STRNCPY:
dcb7fae2 3993 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
3994 gimple_call_arg (stmt, 0),
3995 gimple_call_arg (stmt, 1),
dcb7fae2 3996 gimple_call_arg (stmt, 2));
9a7eefec 3997 case BUILT_IN_STRCAT:
dcb7fae2
RB
3998 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3999 gimple_call_arg (stmt, 1));
ad03a744
RB
4000 case BUILT_IN_STRNCAT:
4001 return gimple_fold_builtin_strncat (gsi);
71dea1dd 4002 case BUILT_IN_INDEX:
912d9ec3 4003 case BUILT_IN_STRCHR:
71dea1dd
WD
4004 return gimple_fold_builtin_strchr (gsi, false);
4005 case BUILT_IN_RINDEX:
4006 case BUILT_IN_STRRCHR:
4007 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
4008 case BUILT_IN_STRSTR:
4009 return gimple_fold_builtin_strstr (gsi);
a918bfbf 4010 case BUILT_IN_STRCMP:
8b0b334a 4011 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
4012 case BUILT_IN_STRCASECMP:
4013 case BUILT_IN_STRNCMP:
8b0b334a 4014 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
4015 case BUILT_IN_STRNCASECMP:
4016 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
4017 case BUILT_IN_MEMCHR:
4018 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 4019 case BUILT_IN_FPUTS:
dcb7fae2
RB
4020 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4021 gimple_call_arg (stmt, 1), false);
cbdd87d4 4022 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
4023 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4024 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
4025 case BUILT_IN_MEMCPY_CHK:
4026 case BUILT_IN_MEMPCPY_CHK:
4027 case BUILT_IN_MEMMOVE_CHK:
4028 case BUILT_IN_MEMSET_CHK:
dcb7fae2 4029 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
4030 gimple_call_arg (stmt, 0),
4031 gimple_call_arg (stmt, 1),
4032 gimple_call_arg (stmt, 2),
4033 gimple_call_arg (stmt, 3),
edd7ae68 4034 fcode);
2625bb5d
RB
4035 case BUILT_IN_STPCPY:
4036 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
4037 case BUILT_IN_STRCPY_CHK:
4038 case BUILT_IN_STPCPY_CHK:
dcb7fae2 4039 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
4040 gimple_call_arg (stmt, 0),
4041 gimple_call_arg (stmt, 1),
4042 gimple_call_arg (stmt, 2),
edd7ae68 4043 fcode);
cbdd87d4 4044 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 4045 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
4046 return gimple_fold_builtin_stxncpy_chk (gsi,
4047 gimple_call_arg (stmt, 0),
4048 gimple_call_arg (stmt, 1),
4049 gimple_call_arg (stmt, 2),
4050 gimple_call_arg (stmt, 3),
edd7ae68 4051 fcode);
cbdd87d4
RG
4052 case BUILT_IN_SNPRINTF_CHK:
4053 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 4054 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 4055
edd7ae68
RB
4056 case BUILT_IN_FPRINTF:
4057 case BUILT_IN_FPRINTF_UNLOCKED:
4058 case BUILT_IN_VFPRINTF:
4059 if (n == 2 || n == 3)
4060 return gimple_fold_builtin_fprintf (gsi,
4061 gimple_call_arg (stmt, 0),
4062 gimple_call_arg (stmt, 1),
4063 n == 3
4064 ? gimple_call_arg (stmt, 2)
4065 : NULL_TREE,
4066 fcode);
4067 break;
4068 case BUILT_IN_FPRINTF_CHK:
4069 case BUILT_IN_VFPRINTF_CHK:
4070 if (n == 3 || n == 4)
4071 return gimple_fold_builtin_fprintf (gsi,
4072 gimple_call_arg (stmt, 0),
4073 gimple_call_arg (stmt, 2),
4074 n == 4
4075 ? gimple_call_arg (stmt, 3)
4076 : NULL_TREE,
4077 fcode);
4078 break;
ad03a744
RB
4079 case BUILT_IN_PRINTF:
4080 case BUILT_IN_PRINTF_UNLOCKED:
4081 case BUILT_IN_VPRINTF:
4082 if (n == 1 || n == 2)
4083 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4084 n == 2
4085 ? gimple_call_arg (stmt, 1)
4086 : NULL_TREE, fcode);
4087 break;
4088 case BUILT_IN_PRINTF_CHK:
4089 case BUILT_IN_VPRINTF_CHK:
4090 if (n == 2 || n == 3)
4091 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4092 n == 3
4093 ? gimple_call_arg (stmt, 2)
4094 : NULL_TREE, fcode);
242a37f1 4095 break;
48126138
NS
4096 case BUILT_IN_ACC_ON_DEVICE:
4097 return gimple_fold_builtin_acc_on_device (gsi,
4098 gimple_call_arg (stmt, 0));
fe75f732
PK
4099 case BUILT_IN_REALLOC:
4100 return gimple_fold_builtin_realloc (gsi);
4101
fef5a0d9
RB
4102 default:;
4103 }
4104
4105 /* Try the generic builtin folder. */
4106 bool ignore = (gimple_call_lhs (stmt) == NULL);
4107 tree result = fold_call_stmt (stmt, ignore);
4108 if (result)
4109 {
4110 if (ignore)
4111 STRIP_NOPS (result);
4112 else
4113 result = fold_convert (gimple_call_return_type (stmt), result);
4114 if (!update_call_from_tree (gsi, result))
4115 gimplify_and_update_call_from_tree (gsi, result);
4116 return true;
4117 }
4118
4119 return false;
4120}
4121
451e8dae
NS
4122/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4123 function calls to constants, where possible. */
4124
4125static tree
4126fold_internal_goacc_dim (const gimple *call)
4127{
629b3d75
MJ
4128 int axis = oacc_get_ifn_dim_arg (call);
4129 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 4130 tree result = NULL_TREE;
67d2229e 4131 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 4132
67d2229e 4133 switch (gimple_call_internal_fn (call))
451e8dae 4134 {
67d2229e
TV
4135 case IFN_GOACC_DIM_POS:
4136 /* If the size is 1, we know the answer. */
4137 if (size == 1)
4138 result = build_int_cst (type, 0);
4139 break;
4140 case IFN_GOACC_DIM_SIZE:
4141 /* If the size is not dynamic, we know the answer. */
4142 if (size)
4143 result = build_int_cst (type, size);
4144 break;
4145 default:
4146 break;
451e8dae
NS
4147 }
4148
4149 return result;
4150}
4151
849a76a5
JJ
4152/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4153 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4154 &var where var is only addressable because of such calls. */
4155
4156bool
4157optimize_atomic_compare_exchange_p (gimple *stmt)
4158{
4159 if (gimple_call_num_args (stmt) != 6
4160 || !flag_inline_atomics
4161 || !optimize
45b2222a 4162 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
4163 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4164 || !gimple_vdef (stmt)
4165 || !gimple_vuse (stmt))
4166 return false;
4167
4168 tree fndecl = gimple_call_fndecl (stmt);
4169 switch (DECL_FUNCTION_CODE (fndecl))
4170 {
4171 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4172 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4173 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4174 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4175 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4176 break;
4177 default:
4178 return false;
4179 }
4180
4181 tree expected = gimple_call_arg (stmt, 1);
4182 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
4183 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4184 return false;
4185
4186 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4187 if (!is_gimple_reg_type (etype)
849a76a5 4188 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
4189 || TREE_THIS_VOLATILE (etype)
4190 || VECTOR_TYPE_P (etype)
4191 || TREE_CODE (etype) == COMPLEX_TYPE
4192 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4193 might not preserve all the bits. See PR71716. */
4194 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
4195 || maybe_ne (TYPE_PRECISION (etype),
4196 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
4197 return false;
4198
4199 tree weak = gimple_call_arg (stmt, 3);
4200 if (!integer_zerop (weak) && !integer_onep (weak))
4201 return false;
4202
4203 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4204 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4205 machine_mode mode = TYPE_MODE (itype);
4206
4207 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4208 == CODE_FOR_nothing
4209 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4210 return false;
4211
cf098191 4212 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
4213 return false;
4214
4215 return true;
4216}
4217
4218/* Fold
4219 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4220 into
4221 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4222 i = IMAGPART_EXPR <t>;
4223 r = (_Bool) i;
4224 e = REALPART_EXPR <t>; */
4225
4226void
4227fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4228{
4229 gimple *stmt = gsi_stmt (*gsi);
4230 tree fndecl = gimple_call_fndecl (stmt);
4231 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4232 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4233 tree ctype = build_complex_type (itype);
4234 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
4235 bool throws = false;
4236 edge e = NULL;
849a76a5
JJ
4237 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4238 expected);
4239 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4240 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4241 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4242 {
4243 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4244 build1 (VIEW_CONVERT_EXPR, itype,
4245 gimple_assign_lhs (g)));
4246 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4247 }
4248 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4249 + int_size_in_bytes (itype);
4250 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4251 gimple_call_arg (stmt, 0),
4252 gimple_assign_lhs (g),
4253 gimple_call_arg (stmt, 2),
4254 build_int_cst (integer_type_node, flag),
4255 gimple_call_arg (stmt, 4),
4256 gimple_call_arg (stmt, 5));
4257 tree lhs = make_ssa_name (ctype);
4258 gimple_call_set_lhs (g, lhs);
779724a5 4259 gimple_move_vops (g, stmt);
cc195d46 4260 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 4261 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
4262 {
4263 throws = true;
4264 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4265 }
4266 gimple_call_set_nothrow (as_a <gcall *> (g),
4267 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4268 gimple_call_set_lhs (stmt, NULL_TREE);
4269 gsi_replace (gsi, g, true);
4270 if (oldlhs)
849a76a5 4271 {
849a76a5
JJ
4272 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4273 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
4274 if (throws)
4275 {
4276 gsi_insert_on_edge_immediate (e, g);
4277 *gsi = gsi_for_stmt (g);
4278 }
4279 else
4280 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4281 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4282 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 4283 }
849a76a5
JJ
4284 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4285 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
4286 if (throws && oldlhs == NULL_TREE)
4287 {
4288 gsi_insert_on_edge_immediate (e, g);
4289 *gsi = gsi_for_stmt (g);
4290 }
4291 else
4292 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
4293 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4294 {
4295 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4296 VIEW_CONVERT_EXPR,
4297 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4298 gimple_assign_lhs (g)));
4299 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4300 }
4301 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4302 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4303 *gsi = gsiret;
4304}
4305
1304953e
JJ
4306/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4307 doesn't fit into TYPE. The test for overflow should be regardless of
4308 -fwrapv, and even for unsigned types. */
4309
4310bool
4311arith_overflowed_p (enum tree_code code, const_tree type,
4312 const_tree arg0, const_tree arg1)
4313{
1304953e
JJ
4314 widest2_int warg0 = widest2_int_cst (arg0);
4315 widest2_int warg1 = widest2_int_cst (arg1);
4316 widest2_int wres;
4317 switch (code)
4318 {
4319 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4320 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4321 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4322 default: gcc_unreachable ();
4323 }
4324 signop sign = TYPE_SIGN (type);
4325 if (sign == UNSIGNED && wi::neg_p (wres))
4326 return true;
4327 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4328}
4329
868363d4
RS
4330/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4331 for the memory it references, otherwise return null. VECTYPE is the
4332 type of the memory vector. */
4333
4334static tree
4335gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4336{
4337 tree ptr = gimple_call_arg (call, 0);
4338 tree alias_align = gimple_call_arg (call, 1);
4339 tree mask = gimple_call_arg (call, 2);
4340 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4341 return NULL_TREE;
4342
4343 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4344 if (TYPE_ALIGN (vectype) != align)
4345 vectype = build_aligned_type (vectype, align);
4346 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4347 return fold_build2 (MEM_REF, vectype, ptr, offset);
4348}
4349
4350/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4351
4352static bool
4353gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4354{
4355 tree lhs = gimple_call_lhs (call);
4356 if (!lhs)
4357 return false;
4358
4359 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4360 {
4361 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4362 gimple_set_location (new_stmt, gimple_location (call));
4363 gimple_move_vops (new_stmt, call);
4364 gsi_replace (gsi, new_stmt, false);
4365 return true;
4366 }
4367 return false;
4368}
4369
4370/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4371
4372static bool
4373gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4374{
4375 tree rhs = gimple_call_arg (call, 3);
4376 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4377 {
4378 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4379 gimple_set_location (new_stmt, gimple_location (call));
4380 gimple_move_vops (new_stmt, call);
4381 gsi_replace (gsi, new_stmt, false);
4382 return true;
4383 }
4384 return false;
4385}
4386
cbdd87d4
RG
4387/* Attempt to fold a call statement referenced by the statement iterator GSI.
4388 The statement may be replaced by another statement, e.g., if the call
4389 simplifies to a constant value. Return true if any changes were made.
4390 It is assumed that the operands have been previously folded. */
4391
e021c122 4392static bool
ceeffab0 4393gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 4394{
538dd0b7 4395 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 4396 tree callee;
e021c122
RG
4397 bool changed = false;
4398 unsigned i;
cbdd87d4 4399
e021c122
RG
4400 /* Fold *& in call arguments. */
4401 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4402 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4403 {
4404 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4405 if (tmp)
4406 {
4407 gimple_call_set_arg (stmt, i, tmp);
4408 changed = true;
4409 }
4410 }
3b45a007
RG
4411
4412 /* Check for virtual calls that became direct calls. */
4413 callee = gimple_call_fn (stmt);
25583c4f 4414 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 4415 {
49c471e3
MJ
4416 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4417 {
450ad0cd
JH
4418 if (dump_file && virtual_method_call_p (callee)
4419 && !possible_polymorphic_call_target_p
6f8091fc
JH
4420 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4421 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
4422 {
4423 fprintf (dump_file,
a70e9985 4424 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
4425 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4426 fprintf (dump_file, " to ");
4427 print_generic_expr (dump_file, callee, TDF_SLIM);
4428 fprintf (dump_file, "\n");
4429 }
4430
49c471e3 4431 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
4432 changed = true;
4433 }
a70e9985 4434 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 4435 {
61dd6a2e
JH
4436 bool final;
4437 vec <cgraph_node *>targets
058d0a90 4438 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 4439 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 4440 {
a70e9985 4441 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
4442 if (dump_enabled_p ())
4443 {
4f5b9c80 4444 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
4445 "folding virtual function call to %s\n",
4446 targets.length () == 1
4447 ? targets[0]->name ()
4448 : "__builtin_unreachable");
4449 }
61dd6a2e 4450 if (targets.length () == 1)
cf3e5a89 4451 {
18954840
JJ
4452 tree fndecl = targets[0]->decl;
4453 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 4454 changed = true;
18954840
JJ
4455 /* If changing the call to __cxa_pure_virtual
4456 or similar noreturn function, adjust gimple_call_fntype
4457 too. */
865f7046 4458 if (gimple_call_noreturn_p (stmt)
18954840
JJ
4459 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4460 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4461 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4462 == void_type_node))
4463 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 4464 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
4465 if (lhs
4466 && gimple_call_noreturn_p (stmt)
18954840 4467 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 4468 || should_remove_lhs_p (lhs)))
a70e9985
JJ
4469 {
4470 if (TREE_CODE (lhs) == SSA_NAME)
4471 {
b731b390 4472 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 4473 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 4474 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
4475 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4476 }
4477 gimple_call_set_lhs (stmt, NULL_TREE);
4478 }
0b986c6a 4479 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 4480 }
a70e9985 4481 else
cf3e5a89
JJ
4482 {
4483 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 4484 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 4485 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
4486 /* If the call had a SSA name as lhs morph that into
4487 an uninitialized value. */
a70e9985
JJ
4488 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4489 {
b731b390 4490 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
4491 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4492 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4493 set_ssa_default_def (cfun, var, lhs);
42e52a51 4494 }
779724a5 4495 gimple_move_vops (new_stmt, stmt);
2da6996c 4496 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
4497 return true;
4498 }
e021c122 4499 }
49c471e3 4500 }
e021c122 4501 }
49c471e3 4502
f2d3d07e
RH
4503 /* Check for indirect calls that became direct calls, and then
4504 no longer require a static chain. */
4505 if (gimple_call_chain (stmt))
4506 {
4507 tree fn = gimple_call_fndecl (stmt);
4508 if (fn && !DECL_STATIC_CHAIN (fn))
4509 {
4510 gimple_call_set_chain (stmt, NULL);
4511 changed = true;
4512 }
4513 else
4514 {
4515 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4516 if (tmp)
4517 {
4518 gimple_call_set_chain (stmt, tmp);
4519 changed = true;
4520 }
4521 }
4522 }
4523
e021c122
RG
4524 if (inplace)
4525 return changed;
4526
4527 /* Check for builtins that CCP can handle using information not
4528 available in the generic fold routines. */
fef5a0d9
RB
4529 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4530 {
4531 if (gimple_fold_builtin (gsi))
4532 changed = true;
4533 }
4534 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 4535 {
ea679d55 4536 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 4537 }
368b454d 4538 else if (gimple_call_internal_p (stmt))
ed9c79e1 4539 {
368b454d
JJ
4540 enum tree_code subcode = ERROR_MARK;
4541 tree result = NULL_TREE;
1304953e
JJ
4542 bool cplx_result = false;
4543 tree overflow = NULL_TREE;
368b454d
JJ
4544 switch (gimple_call_internal_fn (stmt))
4545 {
4546 case IFN_BUILTIN_EXPECT:
4547 result = fold_builtin_expect (gimple_location (stmt),
4548 gimple_call_arg (stmt, 0),
4549 gimple_call_arg (stmt, 1),
1e9168b2
ML
4550 gimple_call_arg (stmt, 2),
4551 NULL_TREE);
368b454d 4552 break;
0e82f089 4553 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
4554 {
4555 tree offset = gimple_call_arg (stmt, 1);
4556 tree objsize = gimple_call_arg (stmt, 2);
4557 if (integer_all_onesp (objsize)
4558 || (TREE_CODE (offset) == INTEGER_CST
4559 && TREE_CODE (objsize) == INTEGER_CST
4560 && tree_int_cst_le (offset, objsize)))
4561 {
4562 replace_call_with_value (gsi, NULL_TREE);
4563 return true;
4564 }
4565 }
4566 break;
4567 case IFN_UBSAN_PTR:
4568 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 4569 {
ca1150f0 4570 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
4571 return true;
4572 }
4573 break;
ca1150f0
JJ
4574 case IFN_UBSAN_BOUNDS:
4575 {
4576 tree index = gimple_call_arg (stmt, 1);
4577 tree bound = gimple_call_arg (stmt, 2);
4578 if (TREE_CODE (index) == INTEGER_CST
4579 && TREE_CODE (bound) == INTEGER_CST)
4580 {
4581 index = fold_convert (TREE_TYPE (bound), index);
4582 if (TREE_CODE (index) == INTEGER_CST
4583 && tree_int_cst_le (index, bound))
4584 {
4585 replace_call_with_value (gsi, NULL_TREE);
4586 return true;
4587 }
4588 }
4589 }
4590 break;
451e8dae
NS
4591 case IFN_GOACC_DIM_SIZE:
4592 case IFN_GOACC_DIM_POS:
4593 result = fold_internal_goacc_dim (stmt);
4594 break;
368b454d
JJ
4595 case IFN_UBSAN_CHECK_ADD:
4596 subcode = PLUS_EXPR;
4597 break;
4598 case IFN_UBSAN_CHECK_SUB:
4599 subcode = MINUS_EXPR;
4600 break;
4601 case IFN_UBSAN_CHECK_MUL:
4602 subcode = MULT_EXPR;
4603 break;
1304953e
JJ
4604 case IFN_ADD_OVERFLOW:
4605 subcode = PLUS_EXPR;
4606 cplx_result = true;
4607 break;
4608 case IFN_SUB_OVERFLOW:
4609 subcode = MINUS_EXPR;
4610 cplx_result = true;
4611 break;
4612 case IFN_MUL_OVERFLOW:
4613 subcode = MULT_EXPR;
4614 cplx_result = true;
4615 break;
868363d4
RS
4616 case IFN_MASK_LOAD:
4617 changed |= gimple_fold_mask_load (gsi, stmt);
4618 break;
4619 case IFN_MASK_STORE:
4620 changed |= gimple_fold_mask_store (gsi, stmt);
4621 break;
368b454d
JJ
4622 default:
4623 break;
4624 }
4625 if (subcode != ERROR_MARK)
4626 {
4627 tree arg0 = gimple_call_arg (stmt, 0);
4628 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
4629 tree type = TREE_TYPE (arg0);
4630 if (cplx_result)
4631 {
4632 tree lhs = gimple_call_lhs (stmt);
4633 if (lhs == NULL_TREE)
4634 type = NULL_TREE;
4635 else
4636 type = TREE_TYPE (TREE_TYPE (lhs));
4637 }
4638 if (type == NULL_TREE)
4639 ;
368b454d 4640 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
4641 else if (integer_zerop (arg1))
4642 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
4643 /* x = 0 + y; x = 0 * y; */
4644 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 4645 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
4646 /* x = y - y; */
4647 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 4648 result = integer_zero_node;
368b454d 4649 /* x = y * 1; x = 1 * y; */
1304953e
JJ
4650 else if (subcode == MULT_EXPR && integer_onep (arg1))
4651 result = arg0;
4652 else if (subcode == MULT_EXPR && integer_onep (arg0))
4653 result = arg1;
4654 else if (TREE_CODE (arg0) == INTEGER_CST
4655 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 4656 {
1304953e
JJ
4657 if (cplx_result)
4658 result = int_const_binop (subcode, fold_convert (type, arg0),
4659 fold_convert (type, arg1));
4660 else
4661 result = int_const_binop (subcode, arg0, arg1);
4662 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4663 {
4664 if (cplx_result)
4665 overflow = build_one_cst (type);
4666 else
4667 result = NULL_TREE;
4668 }
4669 }
4670 if (result)
4671 {
4672 if (result == integer_zero_node)
4673 result = build_zero_cst (type);
4674 else if (cplx_result && TREE_TYPE (result) != type)
4675 {
4676 if (TREE_CODE (result) == INTEGER_CST)
4677 {
4678 if (arith_overflowed_p (PLUS_EXPR, type, result,
4679 integer_zero_node))
4680 overflow = build_one_cst (type);
4681 }
4682 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4683 && TYPE_UNSIGNED (type))
4684 || (TYPE_PRECISION (type)
4685 < (TYPE_PRECISION (TREE_TYPE (result))
4686 + (TYPE_UNSIGNED (TREE_TYPE (result))
4687 && !TYPE_UNSIGNED (type)))))
4688 result = NULL_TREE;
4689 if (result)
4690 result = fold_convert (type, result);
4691 }
368b454d
JJ
4692 }
4693 }
1304953e 4694
ed9c79e1
JJ
4695 if (result)
4696 {
1304953e
JJ
4697 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4698 result = drop_tree_overflow (result);
4699 if (cplx_result)
4700 {
4701 if (overflow == NULL_TREE)
4702 overflow = build_zero_cst (TREE_TYPE (result));
4703 tree ctype = build_complex_type (TREE_TYPE (result));
4704 if (TREE_CODE (result) == INTEGER_CST
4705 && TREE_CODE (overflow) == INTEGER_CST)
4706 result = build_complex (ctype, result, overflow);
4707 else
4708 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4709 ctype, result, overflow);
4710 }
ed9c79e1
JJ
4711 if (!update_call_from_tree (gsi, result))
4712 gimplify_and_update_call_from_tree (gsi, result);
4713 changed = true;
4714 }
4715 }
3b45a007 4716
e021c122 4717 return changed;
cbdd87d4
RG
4718}
4719
e0ee10ed 4720
89a79e96
RB
4721/* Return true whether NAME has a use on STMT. */
4722
4723static bool
355fe088 4724has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
4725{
4726 imm_use_iterator iter;
4727 use_operand_p use_p;
4728 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4729 if (USE_STMT (use_p) == stmt)
4730 return true;
4731 return false;
4732}
4733
e0ee10ed
RB
4734/* Worker for fold_stmt_1 dispatch to pattern based folding with
4735 gimple_simplify.
4736
4737 Replaces *GSI with the simplification result in RCODE and OPS
4738 and the associated statements in *SEQ. Does the replacement
4739 according to INPLACE and returns true if the operation succeeded. */
4740
4741static bool
4742replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 4743 gimple_match_op *res_op,
e0ee10ed
RB
4744 gimple_seq *seq, bool inplace)
4745{
355fe088 4746 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
4747 tree *ops = res_op->ops;
4748 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
4749
4750 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
4751 newly created statements. See also maybe_push_res_to_seq.
4752 As an exception allow such uses if there was a use of the
4753 same SSA name on the old stmt. */
5d75ad95
RS
4754 for (unsigned int i = 0; i < num_ops; ++i)
4755 if (TREE_CODE (ops[i]) == SSA_NAME
4756 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4757 && !has_use_on_stmt (ops[i], stmt))
4758 return false;
4759
4760 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4761 for (unsigned int i = 0; i < 2; ++i)
4762 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4763 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4764 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4765 return false;
e0ee10ed 4766
fec40d06
RS
4767 /* Don't insert new statements when INPLACE is true, even if we could
4768 reuse STMT for the final statement. */
4769 if (inplace && !gimple_seq_empty_p (*seq))
4770 return false;
4771
538dd0b7 4772 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 4773 {
5d75ad95
RS
4774 gcc_assert (res_op->code.is_tree_code ());
4775 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
4776 /* GIMPLE_CONDs condition may not throw. */
4777 && (!flag_exceptions
4778 || !cfun->can_throw_non_call_exceptions
5d75ad95 4779 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
4780 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4781 false, NULL_TREE)))
5d75ad95
RS
4782 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4783 else if (res_op->code == SSA_NAME)
538dd0b7 4784 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 4785 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 4786 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
4787 {
4788 if (integer_zerop (ops[0]))
538dd0b7 4789 gimple_cond_make_false (cond_stmt);
e0ee10ed 4790 else
538dd0b7 4791 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
4792 }
4793 else if (!inplace)
4794 {
5d75ad95 4795 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
4796 if (!res)
4797 return false;
538dd0b7 4798 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
4799 build_zero_cst (TREE_TYPE (res)));
4800 }
4801 else
4802 return false;
4803 if (dump_file && (dump_flags & TDF_DETAILS))
4804 {
4805 fprintf (dump_file, "gimple_simplified to ");
4806 if (!gimple_seq_empty_p (*seq))
4807 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4808 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4809 0, TDF_SLIM);
4810 }
4811 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4812 return true;
4813 }
4814 else if (is_gimple_assign (stmt)
5d75ad95 4815 && res_op->code.is_tree_code ())
e0ee10ed
RB
4816 {
4817 if (!inplace
5d75ad95 4818 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 4819 {
5d75ad95
RS
4820 maybe_build_generic_op (res_op);
4821 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4822 res_op->op_or_null (0),
4823 res_op->op_or_null (1),
4824 res_op->op_or_null (2));
e0ee10ed
RB
4825 if (dump_file && (dump_flags & TDF_DETAILS))
4826 {
4827 fprintf (dump_file, "gimple_simplified to ");
4828 if (!gimple_seq_empty_p (*seq))
4829 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4830 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4831 0, TDF_SLIM);
4832 }
4833 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4834 return true;
4835 }
4836 }
5d75ad95
RS
4837 else if (res_op->code.is_fn_code ()
4838 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 4839 {
5d75ad95
RS
4840 gcc_assert (num_ops == gimple_call_num_args (stmt));
4841 for (unsigned int i = 0; i < num_ops; ++i)
4842 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
4843 if (dump_file && (dump_flags & TDF_DETAILS))
4844 {
4845 fprintf (dump_file, "gimple_simplified to ");
4846 if (!gimple_seq_empty_p (*seq))
4847 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4848 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4849 }
4850 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
4851 return true;
4852 }
e0ee10ed
RB
4853 else if (!inplace)
4854 {
4855 if (gimple_has_lhs (stmt))
4856 {
4857 tree lhs = gimple_get_lhs (stmt);
5d75ad95 4858 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 4859 return false;
e0ee10ed
RB
4860 if (dump_file && (dump_flags & TDF_DETAILS))
4861 {
4862 fprintf (dump_file, "gimple_simplified to ");
4863 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4864 }
4865 gsi_replace_with_seq_vops (gsi, *seq);
4866 return true;
4867 }
4868 else
4869 gcc_unreachable ();
4870 }
4871
4872 return false;
4873}
4874
040292e7
RB
4875/* Canonicalize MEM_REFs invariant address operand after propagation. */
4876
4877static bool
fabe0ede 4878maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
040292e7
RB
4879{
4880 bool res = false;
fe8c8f1e 4881 tree *orig_t = t;
040292e7
RB
4882
4883 if (TREE_CODE (*t) == ADDR_EXPR)
4884 t = &TREE_OPERAND (*t, 0);
4885
f17a223d
RB
4886 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4887 generic vector extension. The actual vector referenced is
4888 view-converted to an array type for this purpose. If the index
4889 is constant the canonical representation in the middle-end is a
4890 BIT_FIELD_REF so re-write the former to the latter here. */
4891 if (TREE_CODE (*t) == ARRAY_REF
4892 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4893 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4894 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4895 {
4896 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4897 if (VECTOR_TYPE_P (vtype))
4898 {
4899 tree low = array_ref_low_bound (*t);
4900 if (TREE_CODE (low) == INTEGER_CST)
4901 {
4902 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4903 {
4904 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4905 wi::to_widest (low));
4906 idx = wi::mul (idx, wi::to_widest
4907 (TYPE_SIZE (TREE_TYPE (*t))));
4908 widest_int ext
4909 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4910 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4911 {
4912 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4913 TREE_TYPE (*t),
4914 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4915 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 4916 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
4917 res = true;
4918 }
4919 }
4920 }
4921 }
4922 }
4923
040292e7
RB
4924 while (handled_component_p (*t))
4925 t = &TREE_OPERAND (*t, 0);
4926
4927 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4928 of invariant addresses into a SSA name MEM_REF address. */
4929 if (TREE_CODE (*t) == MEM_REF
4930 || TREE_CODE (*t) == TARGET_MEM_REF)
4931 {
4932 tree addr = TREE_OPERAND (*t, 0);
4933 if (TREE_CODE (addr) == ADDR_EXPR
4934 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4935 || handled_component_p (TREE_OPERAND (addr, 0))))
4936 {
4937 tree base;
a90c8804 4938 poly_int64 coffset;
040292e7
RB
4939 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4940 &coffset);
4941 if (!base)
fabe0ede
JJ
4942 {
4943 if (is_debug)
4944 return false;
4945 gcc_unreachable ();
4946 }
040292e7
RB
4947
4948 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4949 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4950 TREE_OPERAND (*t, 1),
4951 size_int (coffset));
4952 res = true;
4953 }
4954 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4955 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4956 }
4957
4958 /* Canonicalize back MEM_REFs to plain reference trees if the object
4959 accessed is a decl that has the same access semantics as the MEM_REF. */
4960 if (TREE_CODE (*t) == MEM_REF
4961 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
4962 && integer_zerop (TREE_OPERAND (*t, 1))
4963 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
4964 {
4965 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4966 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4967 if (/* Same volatile qualification. */
4968 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4969 /* Same TBAA behavior with -fstrict-aliasing. */
4970 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4971 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4972 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4973 /* Same alignment. */
4974 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4975 /* We have to look out here to not drop a required conversion
4976 from the rhs to the lhs if *t appears on the lhs or vice-versa
4977 if it appears on the rhs. Thus require strict type
4978 compatibility. */
4979 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4980 {
4981 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4982 res = true;
4983 }
4984 }
4985
fe8c8f1e
RB
4986 else if (TREE_CODE (*orig_t) == ADDR_EXPR
4987 && TREE_CODE (*t) == MEM_REF
4988 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
4989 {
4990 tree base;
4991 poly_int64 coffset;
4992 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
4993 &coffset);
4994 if (base)
4995 {
4996 gcc_assert (TREE_CODE (base) == MEM_REF);
4997 poly_int64 moffset;
4998 if (mem_ref_offset (base).to_shwi (&moffset))
4999 {
5000 coffset += moffset;
5001 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
5002 {
5003 coffset += moffset;
5004 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
5005 return true;
5006 }
5007 }
5008 }
5009 }
5010
040292e7
RB
5011 /* Canonicalize TARGET_MEM_REF in particular with respect to
5012 the indexes becoming constant. */
5013 else if (TREE_CODE (*t) == TARGET_MEM_REF)
5014 {
5015 tree tem = maybe_fold_tmr (*t);
5016 if (tem)
5017 {
5018 *t = tem;
5019 res = true;
5020 }
5021 }
5022
5023 return res;
5024}
5025
cbdd87d4
RG
5026/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
5027 distinguishes both cases. */
5028
5029static bool
e0ee10ed 5030fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
5031{
5032 bool changed = false;
355fe088 5033 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 5034 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 5035 unsigned i;
a8b85ce9 5036 fold_defer_overflow_warnings ();
cbdd87d4 5037
040292e7
RB
5038 /* First do required canonicalization of [TARGET_]MEM_REF addresses
5039 after propagation.
5040 ??? This shouldn't be done in generic folding but in the
5041 propagation helpers which also know whether an address was
89a79e96
RB
5042 propagated.
5043 Also canonicalize operand order. */
040292e7
RB
5044 switch (gimple_code (stmt))
5045 {
5046 case GIMPLE_ASSIGN:
5047 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
5048 {
5049 tree *rhs = gimple_assign_rhs1_ptr (stmt);
5050 if ((REFERENCE_CLASS_P (*rhs)
5051 || TREE_CODE (*rhs) == ADDR_EXPR)
5052 && maybe_canonicalize_mem_ref_addr (rhs))
5053 changed = true;
5054 tree *lhs = gimple_assign_lhs_ptr (stmt);
5055 if (REFERENCE_CLASS_P (*lhs)
5056 && maybe_canonicalize_mem_ref_addr (lhs))
5057 changed = true;
5058 }
89a79e96
RB
5059 else
5060 {
5061 /* Canonicalize operand order. */
5062 enum tree_code code = gimple_assign_rhs_code (stmt);
5063 if (TREE_CODE_CLASS (code) == tcc_comparison
5064 || commutative_tree_code (code)
5065 || commutative_ternary_tree_code (code))
5066 {
5067 tree rhs1 = gimple_assign_rhs1 (stmt);
5068 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 5069 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
5070 {
5071 gimple_assign_set_rhs1 (stmt, rhs2);
5072 gimple_assign_set_rhs2 (stmt, rhs1);
5073 if (TREE_CODE_CLASS (code) == tcc_comparison)
5074 gimple_assign_set_rhs_code (stmt,
5075 swap_tree_comparison (code));
5076 changed = true;
5077 }
5078 }
5079 }
040292e7
RB
5080 break;
5081 case GIMPLE_CALL:
5082 {
5083 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5084 {
5085 tree *arg = gimple_call_arg_ptr (stmt, i);
5086 if (REFERENCE_CLASS_P (*arg)
5087 && maybe_canonicalize_mem_ref_addr (arg))
5088 changed = true;
5089 }
5090 tree *lhs = gimple_call_lhs_ptr (stmt);
5091 if (*lhs
5092 && REFERENCE_CLASS_P (*lhs)
5093 && maybe_canonicalize_mem_ref_addr (lhs))
5094 changed = true;
5095 break;
5096 }
5097 case GIMPLE_ASM:
5098 {
538dd0b7
DM
5099 gasm *asm_stmt = as_a <gasm *> (stmt);
5100 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 5101 {
538dd0b7 5102 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
5103 tree op = TREE_VALUE (link);
5104 if (REFERENCE_CLASS_P (op)
5105 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5106 changed = true;
5107 }
538dd0b7 5108 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 5109 {
538dd0b7 5110 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
5111 tree op = TREE_VALUE (link);
5112 if ((REFERENCE_CLASS_P (op)
5113 || TREE_CODE (op) == ADDR_EXPR)
5114 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5115 changed = true;
5116 }
5117 }
5118 break;
5119 case GIMPLE_DEBUG:
5120 if (gimple_debug_bind_p (stmt))
5121 {
5122 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5123 if (*val
5124 && (REFERENCE_CLASS_P (*val)
5125 || TREE_CODE (*val) == ADDR_EXPR)
fabe0ede 5126 && maybe_canonicalize_mem_ref_addr (val, true))
040292e7
RB
5127 changed = true;
5128 }
5129 break;
89a79e96
RB
5130 case GIMPLE_COND:
5131 {
5132 /* Canonicalize operand order. */
5133 tree lhs = gimple_cond_lhs (stmt);
5134 tree rhs = gimple_cond_rhs (stmt);
14e72812 5135 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
5136 {
5137 gcond *gc = as_a <gcond *> (stmt);
5138 gimple_cond_set_lhs (gc, rhs);
5139 gimple_cond_set_rhs (gc, lhs);
5140 gimple_cond_set_code (gc,
5141 swap_tree_comparison (gimple_cond_code (gc)));
5142 changed = true;
5143 }
5144 }
040292e7
RB
5145 default:;
5146 }
5147
e0ee10ed
RB
5148 /* Dispatch to pattern-based folding. */
5149 if (!inplace
5150 || is_gimple_assign (stmt)
5151 || gimple_code (stmt) == GIMPLE_COND)
5152 {
5153 gimple_seq seq = NULL;
5d75ad95
RS
5154 gimple_match_op res_op;
5155 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 5156 valueize, valueize))
e0ee10ed 5157 {
5d75ad95 5158 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
5159 changed = true;
5160 else
5161 gimple_seq_discard (seq);
5162 }
5163 }
5164
5165 stmt = gsi_stmt (*gsi);
5166
cbdd87d4
RG
5167 /* Fold the main computation performed by the statement. */
5168 switch (gimple_code (stmt))
5169 {
5170 case GIMPLE_ASSIGN:
5171 {
819ec64c
RB
5172 /* Try to canonicalize for boolean-typed X the comparisons
5173 X == 0, X == 1, X != 0, and X != 1. */
5174 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5175 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 5176 {
819ec64c
RB
5177 tree lhs = gimple_assign_lhs (stmt);
5178 tree op1 = gimple_assign_rhs1 (stmt);
5179 tree op2 = gimple_assign_rhs2 (stmt);
5180 tree type = TREE_TYPE (op1);
5181
5182 /* Check whether the comparison operands are of the same boolean
5183 type as the result type is.
5184 Check that second operand is an integer-constant with value
5185 one or zero. */
5186 if (TREE_CODE (op2) == INTEGER_CST
5187 && (integer_zerop (op2) || integer_onep (op2))
5188 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5189 {
5190 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5191 bool is_logical_not = false;
5192
5193 /* X == 0 and X != 1 is a logical-not.of X
5194 X == 1 and X != 0 is X */
5195 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5196 || (cmp_code == NE_EXPR && integer_onep (op2)))
5197 is_logical_not = true;
5198
5199 if (is_logical_not == false)
5200 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5201 /* Only for one-bit precision typed X the transformation
5202 !X -> ~X is valied. */
5203 else if (TYPE_PRECISION (type) == 1)
5204 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5205 /* Otherwise we use !X -> X ^ 1. */
5206 else
5207 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5208 build_int_cst (type, 1));
5209 changed = true;
5210 break;
5211 }
5fbcc0ed 5212 }
819ec64c
RB
5213
5214 unsigned old_num_ops = gimple_num_ops (stmt);
5215 tree lhs = gimple_assign_lhs (stmt);
5216 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
5217 if (new_rhs
5218 && !useless_type_conversion_p (TREE_TYPE (lhs),
5219 TREE_TYPE (new_rhs)))
5220 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5221 if (new_rhs
5222 && (!inplace
5223 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5224 {
5225 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5226 changed = true;
5227 }
5228 break;
5229 }
5230
cbdd87d4 5231 case GIMPLE_CALL:
ceeffab0 5232 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
5233 break;
5234
5235 case GIMPLE_ASM:
5236 /* Fold *& in asm operands. */
38384150 5237 {
538dd0b7 5238 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
5239 size_t noutputs;
5240 const char **oconstraints;
5241 const char *constraint;
5242 bool allows_mem, allows_reg;
5243
538dd0b7 5244 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
5245 oconstraints = XALLOCAVEC (const char *, noutputs);
5246
538dd0b7 5247 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 5248 {
538dd0b7 5249 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
5250 tree op = TREE_VALUE (link);
5251 oconstraints[i]
5252 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5253 if (REFERENCE_CLASS_P (op)
5254 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5255 {
5256 TREE_VALUE (link) = op;
5257 changed = true;
5258 }
5259 }
538dd0b7 5260 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 5261 {
538dd0b7 5262 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
5263 tree op = TREE_VALUE (link);
5264 constraint
5265 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5266 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5267 oconstraints, &allows_mem, &allows_reg);
5268 if (REFERENCE_CLASS_P (op)
5269 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5270 != NULL_TREE)
5271 {
5272 TREE_VALUE (link) = op;
5273 changed = true;
5274 }
5275 }
5276 }
cbdd87d4
RG
5277 break;
5278
bd422c4a
RG
5279 case GIMPLE_DEBUG:
5280 if (gimple_debug_bind_p (stmt))
5281 {
5282 tree val = gimple_debug_bind_get_value (stmt);
5283 if (val
5284 && REFERENCE_CLASS_P (val))
5285 {
5286 tree tem = maybe_fold_reference (val, false);
5287 if (tem)
5288 {
5289 gimple_debug_bind_set_value (stmt, tem);
5290 changed = true;
5291 }
5292 }
3e888a5e
RG
5293 else if (val
5294 && TREE_CODE (val) == ADDR_EXPR)
5295 {
5296 tree ref = TREE_OPERAND (val, 0);
5297 tree tem = maybe_fold_reference (ref, false);
5298 if (tem)
5299 {
5300 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5301 gimple_debug_bind_set_value (stmt, tem);
5302 changed = true;
5303 }
5304 }
bd422c4a
RG
5305 }
5306 break;
5307
cfe3d653
PK
5308 case GIMPLE_RETURN:
5309 {
5310 greturn *ret_stmt = as_a<greturn *> (stmt);
5311 tree ret = gimple_return_retval(ret_stmt);
5312
5313 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5314 {
5315 tree val = valueize (ret);
1af928db
RB
5316 if (val && val != ret
5317 && may_propagate_copy (ret, val))
cfe3d653
PK
5318 {
5319 gimple_return_set_retval (ret_stmt, val);
5320 changed = true;
5321 }
5322 }
5323 }
5324 break;
5325
cbdd87d4
RG
5326 default:;
5327 }
5328
5329 stmt = gsi_stmt (*gsi);
5330
37376165
RB
5331 /* Fold *& on the lhs. */
5332 if (gimple_has_lhs (stmt))
cbdd87d4
RG
5333 {
5334 tree lhs = gimple_get_lhs (stmt);
5335 if (lhs && REFERENCE_CLASS_P (lhs))
5336 {
5337 tree new_lhs = maybe_fold_reference (lhs, true);
5338 if (new_lhs)
5339 {
5340 gimple_set_lhs (stmt, new_lhs);
5341 changed = true;
5342 }
5343 }
5344 }
5345
a8b85ce9 5346 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
5347 return changed;
5348}
5349
e0ee10ed
RB
5350/* Valueziation callback that ends up not following SSA edges. */
5351
5352tree
5353no_follow_ssa_edges (tree)
5354{
5355 return NULL_TREE;
5356}
5357
45cc9f96
RB
5358/* Valueization callback that ends up following single-use SSA edges only. */
5359
5360tree
5361follow_single_use_edges (tree val)
5362{
5363 if (TREE_CODE (val) == SSA_NAME
5364 && !has_single_use (val))
5365 return NULL_TREE;
5366 return val;
5367}
5368
c566cc9f
RS
5369/* Valueization callback that follows all SSA edges. */
5370
5371tree
5372follow_all_ssa_edges (tree val)
5373{
5374 return val;
5375}
5376
cbdd87d4
RG
5377/* Fold the statement pointed to by GSI. In some cases, this function may
5378 replace the whole statement with a new one. Returns true iff folding
5379 makes any changes.
5380 The statement pointed to by GSI should be in valid gimple form but may
5381 be in unfolded state as resulting from for example constant propagation
5382 which can produce *&x = 0. */
5383
5384bool
5385fold_stmt (gimple_stmt_iterator *gsi)
5386{
e0ee10ed
RB
5387 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5388}
5389
5390bool
5391fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5392{
5393 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
5394}
5395
59401b92 5396/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
5397 *&x created by constant propagation are handled. The statement cannot
5398 be replaced with a new one. Return true if the statement was
5399 changed, false otherwise.
59401b92 5400 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
5401 be in unfolded state as resulting from for example constant propagation
5402 which can produce *&x = 0. */
5403
5404bool
59401b92 5405fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 5406{
355fe088 5407 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 5408 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 5409 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
5410 return changed;
5411}
5412
e89065a1
SL
5413/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5414 if EXPR is null or we don't know how.
5415 If non-null, the result always has boolean type. */
5416
5417static tree
5418canonicalize_bool (tree expr, bool invert)
5419{
5420 if (!expr)
5421 return NULL_TREE;
5422 else if (invert)
5423 {
5424 if (integer_nonzerop (expr))
5425 return boolean_false_node;
5426 else if (integer_zerop (expr))
5427 return boolean_true_node;
5428 else if (TREE_CODE (expr) == SSA_NAME)
5429 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5430 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5431 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5432 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5433 boolean_type_node,
5434 TREE_OPERAND (expr, 0),
5435 TREE_OPERAND (expr, 1));
5436 else
5437 return NULL_TREE;
5438 }
5439 else
5440 {
5441 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5442 return expr;
5443 if (integer_nonzerop (expr))
5444 return boolean_true_node;
5445 else if (integer_zerop (expr))
5446 return boolean_false_node;
5447 else if (TREE_CODE (expr) == SSA_NAME)
5448 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5449 build_int_cst (TREE_TYPE (expr), 0));
98209db3 5450 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
5451 return fold_build2 (TREE_CODE (expr),
5452 boolean_type_node,
5453 TREE_OPERAND (expr, 0),
5454 TREE_OPERAND (expr, 1));
5455 else
5456 return NULL_TREE;
5457 }
5458}
5459
5460/* Check to see if a boolean expression EXPR is logically equivalent to the
5461 comparison (OP1 CODE OP2). Check for various identities involving
5462 SSA_NAMEs. */
5463
5464static bool
5465same_bool_comparison_p (const_tree expr, enum tree_code code,
5466 const_tree op1, const_tree op2)
5467{
355fe088 5468 gimple *s;
e89065a1
SL
5469
5470 /* The obvious case. */
5471 if (TREE_CODE (expr) == code
5472 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5473 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5474 return true;
5475
5476 /* Check for comparing (name, name != 0) and the case where expr
5477 is an SSA_NAME with a definition matching the comparison. */
5478 if (TREE_CODE (expr) == SSA_NAME
5479 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5480 {
5481 if (operand_equal_p (expr, op1, 0))
5482 return ((code == NE_EXPR && integer_zerop (op2))
5483 || (code == EQ_EXPR && integer_nonzerop (op2)));
5484 s = SSA_NAME_DEF_STMT (expr);
5485 if (is_gimple_assign (s)
5486 && gimple_assign_rhs_code (s) == code
5487 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5488 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5489 return true;
5490 }
5491
5492 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5493 of name is a comparison, recurse. */
5494 if (TREE_CODE (op1) == SSA_NAME
5495 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5496 {
5497 s = SSA_NAME_DEF_STMT (op1);
5498 if (is_gimple_assign (s)
5499 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5500 {
5501 enum tree_code c = gimple_assign_rhs_code (s);
5502 if ((c == NE_EXPR && integer_zerop (op2))
5503 || (c == EQ_EXPR && integer_nonzerop (op2)))
5504 return same_bool_comparison_p (expr, c,
5505 gimple_assign_rhs1 (s),
5506 gimple_assign_rhs2 (s));
5507 if ((c == EQ_EXPR && integer_zerop (op2))
5508 || (c == NE_EXPR && integer_nonzerop (op2)))
5509 return same_bool_comparison_p (expr,
5510 invert_tree_comparison (c, false),
5511 gimple_assign_rhs1 (s),
5512 gimple_assign_rhs2 (s));
5513 }
5514 }
5515 return false;
5516}
5517
5518/* Check to see if two boolean expressions OP1 and OP2 are logically
5519 equivalent. */
5520
5521static bool
5522same_bool_result_p (const_tree op1, const_tree op2)
5523{
5524 /* Simple cases first. */
5525 if (operand_equal_p (op1, op2, 0))
5526 return true;
5527
5528 /* Check the cases where at least one of the operands is a comparison.
5529 These are a bit smarter than operand_equal_p in that they apply some
5530 identifies on SSA_NAMEs. */
98209db3 5531 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
5532 && same_bool_comparison_p (op1, TREE_CODE (op2),
5533 TREE_OPERAND (op2, 0),
5534 TREE_OPERAND (op2, 1)))
5535 return true;
98209db3 5536 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
5537 && same_bool_comparison_p (op2, TREE_CODE (op1),
5538 TREE_OPERAND (op1, 0),
5539 TREE_OPERAND (op1, 1)))
5540 return true;
5541
5542 /* Default case. */
5543 return false;
5544}
5545
5546/* Forward declarations for some mutually recursive functions. */
5547
5548static tree
5f487a34 5549and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5550 enum tree_code code2, tree op2a, tree op2b);
5551static tree
5f487a34 5552and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5553 enum tree_code code2, tree op2a, tree op2b);
5554static tree
5f487a34 5555and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5556 enum tree_code code2, tree op2a, tree op2b);
5557static tree
5f487a34 5558or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5559 enum tree_code code2, tree op2a, tree op2b);
5560static tree
5f487a34 5561or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
5562 enum tree_code code2, tree op2a, tree op2b);
5563static tree
5f487a34 5564or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
5565 enum tree_code code2, tree op2a, tree op2b);
5566
5567/* Helper function for and_comparisons_1: try to simplify the AND of the
5568 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5569 If INVERT is true, invert the value of the VAR before doing the AND.
5570 Return NULL_EXPR if we can't simplify this to a single expression. */
5571
5572static tree
5f487a34 5573and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5574 enum tree_code code2, tree op2a, tree op2b)
5575{
5576 tree t;
355fe088 5577 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5578
5579 /* We can only deal with variables whose definitions are assignments. */
5580 if (!is_gimple_assign (stmt))
5581 return NULL_TREE;
5582
5583 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5584 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5585 Then we only have to consider the simpler non-inverted cases. */
5586 if (invert)
5f487a34 5587 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
5588 invert_tree_comparison (code2, false),
5589 op2a, op2b);
5590 else
5f487a34 5591 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
5592 return canonicalize_bool (t, invert);
5593}
5594
5595/* Try to simplify the AND of the ssa variable defined by the assignment
5596 STMT with the comparison specified by (OP2A CODE2 OP2B).
5597 Return NULL_EXPR if we can't simplify this to a single expression. */
5598
5599static tree
5f487a34 5600and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
5601 enum tree_code code2, tree op2a, tree op2b)
5602{
5603 tree var = gimple_assign_lhs (stmt);
5604 tree true_test_var = NULL_TREE;
5605 tree false_test_var = NULL_TREE;
5606 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5607
5608 /* Check for identities like (var AND (var == 0)) => false. */
5609 if (TREE_CODE (op2a) == SSA_NAME
5610 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5611 {
5612 if ((code2 == NE_EXPR && integer_zerop (op2b))
5613 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5614 {
5615 true_test_var = op2a;
5616 if (var == true_test_var)
5617 return var;
5618 }
5619 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5620 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5621 {
5622 false_test_var = op2a;
5623 if (var == false_test_var)
5624 return boolean_false_node;
5625 }
5626 }
5627
5628 /* If the definition is a comparison, recurse on it. */
5629 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5630 {
5f487a34 5631 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
5632 gimple_assign_rhs1 (stmt),
5633 gimple_assign_rhs2 (stmt),
5634 code2,
5635 op2a,
5636 op2b);
5637 if (t)
5638 return t;
5639 }
5640
5641 /* If the definition is an AND or OR expression, we may be able to
5642 simplify by reassociating. */
eb9820c0
KT
5643 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5644 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
5645 {
5646 tree inner1 = gimple_assign_rhs1 (stmt);
5647 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 5648 gimple *s;
e89065a1
SL
5649 tree t;
5650 tree partial = NULL_TREE;
eb9820c0 5651 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
5652
5653 /* Check for boolean identities that don't require recursive examination
5654 of inner1/inner2:
5655 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5656 inner1 AND (inner1 OR inner2) => inner1
5657 !inner1 AND (inner1 AND inner2) => false
5658 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5659 Likewise for similar cases involving inner2. */
5660 if (inner1 == true_test_var)
5661 return (is_and ? var : inner1);
5662 else if (inner2 == true_test_var)
5663 return (is_and ? var : inner2);
5664 else if (inner1 == false_test_var)
5665 return (is_and
5666 ? boolean_false_node
5f487a34
LJH
5667 : and_var_with_comparison (type, inner2, false, code2, op2a,
5668 op2b));
e89065a1
SL
5669 else if (inner2 == false_test_var)
5670 return (is_and
5671 ? boolean_false_node
5f487a34
LJH
5672 : and_var_with_comparison (type, inner1, false, code2, op2a,
5673 op2b));
e89065a1
SL
5674
5675 /* Next, redistribute/reassociate the AND across the inner tests.
5676 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5677 if (TREE_CODE (inner1) == SSA_NAME
5678 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5679 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 5680 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
5681 gimple_assign_rhs1 (s),
5682 gimple_assign_rhs2 (s),
5683 code2, op2a, op2b)))
5684 {
5685 /* Handle the AND case, where we are reassociating:
5686 (inner1 AND inner2) AND (op2a code2 op2b)
5687 => (t AND inner2)
5688 If the partial result t is a constant, we win. Otherwise
5689 continue on to try reassociating with the other inner test. */
5690 if (is_and)
5691 {
5692 if (integer_onep (t))
5693 return inner2;
5694 else if (integer_zerop (t))
5695 return boolean_false_node;
5696 }
5697
5698 /* Handle the OR case, where we are redistributing:
5699 (inner1 OR inner2) AND (op2a code2 op2b)
5700 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
5701 else if (integer_onep (t))
5702 return boolean_true_node;
5703
5704 /* Save partial result for later. */
5705 partial = t;
e89065a1
SL
5706 }
5707
5708 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5709 if (TREE_CODE (inner2) == SSA_NAME
5710 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5711 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 5712 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
5713 gimple_assign_rhs1 (s),
5714 gimple_assign_rhs2 (s),
5715 code2, op2a, op2b)))
5716 {
5717 /* Handle the AND case, where we are reassociating:
5718 (inner1 AND inner2) AND (op2a code2 op2b)
5719 => (inner1 AND t) */
5720 if (is_and)
5721 {
5722 if (integer_onep (t))
5723 return inner1;
5724 else if (integer_zerop (t))
5725 return boolean_false_node;
8236c8eb
JJ
5726 /* If both are the same, we can apply the identity
5727 (x AND x) == x. */
5728 else if (partial && same_bool_result_p (t, partial))
5729 return t;
e89065a1
SL
5730 }
5731
5732 /* Handle the OR case. where we are redistributing:
5733 (inner1 OR inner2) AND (op2a code2 op2b)
5734 => (t OR (inner1 AND (op2a code2 op2b)))
5735 => (t OR partial) */
5736 else
5737 {
5738 if (integer_onep (t))
5739 return boolean_true_node;
5740 else if (partial)
5741 {
5742 /* We already got a simplification for the other
5743 operand to the redistributed OR expression. The
5744 interesting case is when at least one is false.
5745 Or, if both are the same, we can apply the identity
5746 (x OR x) == x. */
5747 if (integer_zerop (partial))
5748 return t;
5749 else if (integer_zerop (t))
5750 return partial;
5751 else if (same_bool_result_p (t, partial))
5752 return t;
5753 }
5754 }
5755 }
5756 }
5757 return NULL_TREE;
5758}
5759
5760/* Try to simplify the AND of two comparisons defined by
5761 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5762 If this can be done without constructing an intermediate value,
5763 return the resulting tree; otherwise NULL_TREE is returned.
5764 This function is deliberately asymmetric as it recurses on SSA_DEFs
5765 in the first comparison but not the second. */
5766
5767static tree
5f487a34 5768and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5769 enum tree_code code2, tree op2a, tree op2b)
5770{
ae22ac3c 5771 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 5772
e89065a1
SL
5773 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5774 if (operand_equal_p (op1a, op2a, 0)
5775 && operand_equal_p (op1b, op2b, 0))
5776 {
eb9820c0 5777 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5778 tree t = combine_comparisons (UNKNOWN_LOCATION,
5779 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 5780 truth_type, op1a, op1b);
e89065a1
SL
5781 if (t)
5782 return t;
5783 }
5784
5785 /* Likewise the swapped case of the above. */
5786 if (operand_equal_p (op1a, op2b, 0)
5787 && operand_equal_p (op1b, op2a, 0))
5788 {
eb9820c0 5789 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
5790 tree t = combine_comparisons (UNKNOWN_LOCATION,
5791 TRUTH_ANDIF_EXPR, code1,
5792 swap_tree_comparison (code2),
31ed6226 5793 truth_type, op1a, op1b);
e89065a1
SL
5794 if (t)
5795 return t;
5796 }
5797
e89065a1
SL
5798 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5799 NAME's definition is a truth value. See if there are any simplifications
5800 that can be done against the NAME's definition. */
5801 if (TREE_CODE (op1a) == SSA_NAME
5802 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5803 && (integer_zerop (op1b) || integer_onep (op1b)))
5804 {
5805 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5806 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 5807 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
5808 switch (gimple_code (stmt))
5809 {
5810 case GIMPLE_ASSIGN:
5811 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
5812 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5813 op2b);
e89065a1
SL
5814
5815 case GIMPLE_PHI:
5816 /* If every argument to the PHI produces the same result when
5817 ANDed with the second comparison, we win.
5818 Do not do this unless the type is bool since we need a bool
5819 result here anyway. */
5820 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5821 {
5822 tree result = NULL_TREE;
5823 unsigned i;
5824 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5825 {
5826 tree arg = gimple_phi_arg_def (stmt, i);
5827
5828 /* If this PHI has itself as an argument, ignore it.
5829 If all the other args produce the same result,
5830 we're still OK. */
5831 if (arg == gimple_phi_result (stmt))
5832 continue;
5833 else if (TREE_CODE (arg) == INTEGER_CST)
5834 {
5835 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5836 {
5837 if (!result)
5838 result = boolean_false_node;
5839 else if (!integer_zerop (result))
5840 return NULL_TREE;
5841 }
5842 else if (!result)
5843 result = fold_build2 (code2, boolean_type_node,
5844 op2a, op2b);
5845 else if (!same_bool_comparison_p (result,
5846 code2, op2a, op2b))
5847 return NULL_TREE;
5848 }
0e8b84ec
JJ
5849 else if (TREE_CODE (arg) == SSA_NAME
5850 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 5851 {
6c66f733 5852 tree temp;
355fe088 5853 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
5854 /* In simple cases we can look through PHI nodes,
5855 but we have to be careful with loops.
5856 See PR49073. */
5857 if (! dom_info_available_p (CDI_DOMINATORS)
5858 || gimple_bb (def_stmt) == gimple_bb (stmt)
5859 || dominated_by_p (CDI_DOMINATORS,
5860 gimple_bb (def_stmt),
5861 gimple_bb (stmt)))
5862 return NULL_TREE;
5f487a34 5863 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 5864 op2a, op2b);
e89065a1
SL
5865 if (!temp)
5866 return NULL_TREE;
5867 else if (!result)
5868 result = temp;
5869 else if (!same_bool_result_p (result, temp))
5870 return NULL_TREE;
5871 }
5872 else
5873 return NULL_TREE;
5874 }
5875 return result;
5876 }
5877
5878 default:
5879 break;
5880 }
5881 }
5882 return NULL_TREE;
5883}
5884
5f487a34
LJH
5885/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5886 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5887 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5888 simplify this to a single expression. As we are going to lower the cost
5889 of building SSA names / gimple stmts significantly, we need to allocate
5890 them ont the stack. This will cause the code to be a bit ugly. */
5891
5892static tree
5893maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5894 enum tree_code code1,
5895 tree op1a, tree op1b,
5896 enum tree_code code2, tree op2a,
5897 tree op2b)
5898{
5899 /* Allocate gimple stmt1 on the stack. */
5900 gassign *stmt1
5901 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5902 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5903 gimple_assign_set_rhs_code (stmt1, code1);
5904 gimple_assign_set_rhs1 (stmt1, op1a);
5905 gimple_assign_set_rhs2 (stmt1, op1b);
5906
5907 /* Allocate gimple stmt2 on the stack. */
5908 gassign *stmt2
5909 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5910 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5911 gimple_assign_set_rhs_code (stmt2, code2);
5912 gimple_assign_set_rhs1 (stmt2, op2a);
5913 gimple_assign_set_rhs2 (stmt2, op2b);
5914
5915 /* Allocate SSA names(lhs1) on the stack. */
5916 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5917 memset (lhs1, 0, sizeof (tree_ssa_name));
5918 TREE_SET_CODE (lhs1, SSA_NAME);
5919 TREE_TYPE (lhs1) = type;
5920 init_ssa_name_imm_use (lhs1);
5921
5922 /* Allocate SSA names(lhs2) on the stack. */
5923 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5924 memset (lhs2, 0, sizeof (tree_ssa_name));
5925 TREE_SET_CODE (lhs2, SSA_NAME);
5926 TREE_TYPE (lhs2) = type;
5927 init_ssa_name_imm_use (lhs2);
5928
5929 gimple_assign_set_lhs (stmt1, lhs1);
5930 gimple_assign_set_lhs (stmt2, lhs2);
5931
5932 gimple_match_op op (gimple_match_cond::UNCOND, code,
5933 type, gimple_assign_lhs (stmt1),
5934 gimple_assign_lhs (stmt2));
5935 if (op.resimplify (NULL, follow_all_ssa_edges))
5936 {
5937 if (gimple_simplified_result_is_gimple_val (&op))
5938 {
5939 tree res = op.ops[0];
5940 if (res == lhs1)
5941 return build2 (code1, type, op1a, op1b);
5942 else if (res == lhs2)
5943 return build2 (code2, type, op2a, op2b);
5944 else
5945 return res;
5946 }
ae9c3507
ML
5947 else if (op.code.is_tree_code ()
5948 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5949 {
5950 tree op0 = op.ops[0];
5951 tree op1 = op.ops[1];
5952 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5953 return NULL_TREE; /* not simple */
5954
5955 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5956 }
5f487a34
LJH
5957 }
5958
5959 return NULL_TREE;
5960}
5961
e89065a1
SL
5962/* Try to simplify the AND of two comparisons, specified by
5963 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5964 If this can be simplified to a single expression (without requiring
5965 introducing more SSA variables to hold intermediate values),
5966 return the resulting tree. Otherwise return NULL_TREE.
5967 If the result expression is non-null, it has boolean type. */
5968
5969tree
5f487a34
LJH
5970maybe_fold_and_comparisons (tree type,
5971 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
5972 enum tree_code code2, tree op2a, tree op2b)
5973{
5f487a34 5974 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 5975 return t;
5f487a34
LJH
5976
5977 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5978 return t;
5979
5980 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5981 op1a, op1b, code2, op2a,
5982 op2b))
5983 return t;
5984
5985 return NULL_TREE;
e89065a1
SL
5986}
5987
5988/* Helper function for or_comparisons_1: try to simplify the OR of the
5989 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5990 If INVERT is true, invert the value of VAR before doing the OR.
5991 Return NULL_EXPR if we can't simplify this to a single expression. */
5992
5993static tree
5f487a34 5994or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
5995 enum tree_code code2, tree op2a, tree op2b)
5996{
5997 tree t;
355fe088 5998 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
5999
6000 /* We can only deal with variables whose definitions are assignments. */
6001 if (!is_gimple_assign (stmt))
6002 return NULL_TREE;
6003
6004 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6005 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6006 Then we only have to consider the simpler non-inverted cases. */
6007 if (invert)
5f487a34 6008 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
6009 invert_tree_comparison (code2, false),
6010 op2a, op2b);
6011 else
5f487a34 6012 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6013 return canonicalize_bool (t, invert);
6014}
6015
6016/* Try to simplify the OR of the ssa variable defined by the assignment
6017 STMT with the comparison specified by (OP2A CODE2 OP2B).
6018 Return NULL_EXPR if we can't simplify this to a single expression. */
6019
6020static tree
5f487a34 6021or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6022 enum tree_code code2, tree op2a, tree op2b)
6023{
6024 tree var = gimple_assign_lhs (stmt);
6025 tree true_test_var = NULL_TREE;
6026 tree false_test_var = NULL_TREE;
6027 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6028
6029 /* Check for identities like (var OR (var != 0)) => true . */
6030 if (TREE_CODE (op2a) == SSA_NAME
6031 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6032 {
6033 if ((code2 == NE_EXPR && integer_zerop (op2b))
6034 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6035 {
6036 true_test_var = op2a;
6037 if (var == true_test_var)
6038 return var;
6039 }
6040 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6041 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6042 {
6043 false_test_var = op2a;
6044 if (var == false_test_var)
6045 return boolean_true_node;
6046 }
6047 }
6048
6049 /* If the definition is a comparison, recurse on it. */
6050 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6051 {
5f487a34 6052 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
6053 gimple_assign_rhs1 (stmt),
6054 gimple_assign_rhs2 (stmt),
6055 code2,
6056 op2a,
6057 op2b);
6058 if (t)
6059 return t;
6060 }
6061
6062 /* If the definition is an AND or OR expression, we may be able to
6063 simplify by reassociating. */
eb9820c0
KT
6064 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6065 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6066 {
6067 tree inner1 = gimple_assign_rhs1 (stmt);
6068 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6069 gimple *s;
e89065a1
SL
6070 tree t;
6071 tree partial = NULL_TREE;
eb9820c0 6072 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
6073
6074 /* Check for boolean identities that don't require recursive examination
6075 of inner1/inner2:
6076 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6077 inner1 OR (inner1 AND inner2) => inner1
6078 !inner1 OR (inner1 OR inner2) => true
6079 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6080 */
6081 if (inner1 == true_test_var)
6082 return (is_or ? var : inner1);
6083 else if (inner2 == true_test_var)
6084 return (is_or ? var : inner2);
6085 else if (inner1 == false_test_var)
6086 return (is_or
6087 ? boolean_true_node
5f487a34
LJH
6088 : or_var_with_comparison (type, inner2, false, code2, op2a,
6089 op2b));
e89065a1
SL
6090 else if (inner2 == false_test_var)
6091 return (is_or
6092 ? boolean_true_node
5f487a34
LJH
6093 : or_var_with_comparison (type, inner1, false, code2, op2a,
6094 op2b));
e89065a1
SL
6095
6096 /* Next, redistribute/reassociate the OR across the inner tests.
6097 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6098 if (TREE_CODE (inner1) == SSA_NAME
6099 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6100 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6101 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6102 gimple_assign_rhs1 (s),
6103 gimple_assign_rhs2 (s),
6104 code2, op2a, op2b)))
6105 {
6106 /* Handle the OR case, where we are reassociating:
6107 (inner1 OR inner2) OR (op2a code2 op2b)
6108 => (t OR inner2)
6109 If the partial result t is a constant, we win. Otherwise
6110 continue on to try reassociating with the other inner test. */
8236c8eb 6111 if (is_or)
e89065a1
SL
6112 {
6113 if (integer_onep (t))
6114 return boolean_true_node;
6115 else if (integer_zerop (t))
6116 return inner2;
6117 }
6118
6119 /* Handle the AND case, where we are redistributing:
6120 (inner1 AND inner2) OR (op2a code2 op2b)
6121 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
6122 else if (integer_zerop (t))
6123 return boolean_false_node;
6124
6125 /* Save partial result for later. */
6126 partial = t;
e89065a1
SL
6127 }
6128
6129 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6130 if (TREE_CODE (inner2) == SSA_NAME
6131 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6132 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6133 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6134 gimple_assign_rhs1 (s),
6135 gimple_assign_rhs2 (s),
6136 code2, op2a, op2b)))
6137 {
6138 /* Handle the OR case, where we are reassociating:
6139 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
6140 => (inner1 OR t)
6141 => (t OR partial) */
6142 if (is_or)
e89065a1
SL
6143 {
6144 if (integer_zerop (t))
6145 return inner1;
6146 else if (integer_onep (t))
6147 return boolean_true_node;
8236c8eb
JJ
6148 /* If both are the same, we can apply the identity
6149 (x OR x) == x. */
6150 else if (partial && same_bool_result_p (t, partial))
6151 return t;
e89065a1
SL
6152 }
6153
6154 /* Handle the AND case, where we are redistributing:
6155 (inner1 AND inner2) OR (op2a code2 op2b)
6156 => (t AND (inner1 OR (op2a code2 op2b)))
6157 => (t AND partial) */
6158 else
6159 {
6160 if (integer_zerop (t))
6161 return boolean_false_node;
6162 else if (partial)
6163 {
6164 /* We already got a simplification for the other
6165 operand to the redistributed AND expression. The
6166 interesting case is when at least one is true.
6167 Or, if both are the same, we can apply the identity
8236c8eb 6168 (x AND x) == x. */
e89065a1
SL
6169 if (integer_onep (partial))
6170 return t;
6171 else if (integer_onep (t))
6172 return partial;
6173 else if (same_bool_result_p (t, partial))
8236c8eb 6174 return t;
e89065a1
SL
6175 }
6176 }
6177 }
6178 }
6179 return NULL_TREE;
6180}
6181
6182/* Try to simplify the OR of two comparisons defined by
6183 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6184 If this can be done without constructing an intermediate value,
6185 return the resulting tree; otherwise NULL_TREE is returned.
6186 This function is deliberately asymmetric as it recurses on SSA_DEFs
6187 in the first comparison but not the second. */
6188
6189static tree
5f487a34 6190or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6191 enum tree_code code2, tree op2a, tree op2b)
6192{
ae22ac3c 6193 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6194
e89065a1
SL
6195 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6196 if (operand_equal_p (op1a, op2a, 0)
6197 && operand_equal_p (op1b, op2b, 0))
6198 {
eb9820c0 6199 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6200 tree t = combine_comparisons (UNKNOWN_LOCATION,
6201 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 6202 truth_type, op1a, op1b);
e89065a1
SL
6203 if (t)
6204 return t;
6205 }
6206
6207 /* Likewise the swapped case of the above. */
6208 if (operand_equal_p (op1a, op2b, 0)
6209 && operand_equal_p (op1b, op2a, 0))
6210 {
eb9820c0 6211 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6212 tree t = combine_comparisons (UNKNOWN_LOCATION,
6213 TRUTH_ORIF_EXPR, code1,
6214 swap_tree_comparison (code2),
31ed6226 6215 truth_type, op1a, op1b);
e89065a1
SL
6216 if (t)
6217 return t;
6218 }
6219
e89065a1
SL
6220 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6221 NAME's definition is a truth value. See if there are any simplifications
6222 that can be done against the NAME's definition. */
6223 if (TREE_CODE (op1a) == SSA_NAME
6224 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6225 && (integer_zerop (op1b) || integer_onep (op1b)))
6226 {
6227 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6228 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6229 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6230 switch (gimple_code (stmt))
6231 {
6232 case GIMPLE_ASSIGN:
6233 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6234 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6235 op2b);
e89065a1
SL
6236
6237 case GIMPLE_PHI:
6238 /* If every argument to the PHI produces the same result when
6239 ORed with the second comparison, we win.
6240 Do not do this unless the type is bool since we need a bool
6241 result here anyway. */
6242 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6243 {
6244 tree result = NULL_TREE;
6245 unsigned i;
6246 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6247 {
6248 tree arg = gimple_phi_arg_def (stmt, i);
6249
6250 /* If this PHI has itself as an argument, ignore it.
6251 If all the other args produce the same result,
6252 we're still OK. */
6253 if (arg == gimple_phi_result (stmt))
6254 continue;
6255 else if (TREE_CODE (arg) == INTEGER_CST)
6256 {
6257 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6258 {
6259 if (!result)
6260 result = boolean_true_node;
6261 else if (!integer_onep (result))
6262 return NULL_TREE;
6263 }
6264 else if (!result)
6265 result = fold_build2 (code2, boolean_type_node,
6266 op2a, op2b);
6267 else if (!same_bool_comparison_p (result,
6268 code2, op2a, op2b))
6269 return NULL_TREE;
6270 }
0e8b84ec
JJ
6271 else if (TREE_CODE (arg) == SSA_NAME
6272 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6273 {
6c66f733 6274 tree temp;
355fe088 6275 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6276 /* In simple cases we can look through PHI nodes,
6277 but we have to be careful with loops.
6278 See PR49073. */
6279 if (! dom_info_available_p (CDI_DOMINATORS)
6280 || gimple_bb (def_stmt) == gimple_bb (stmt)
6281 || dominated_by_p (CDI_DOMINATORS,
6282 gimple_bb (def_stmt),
6283 gimple_bb (stmt)))
6284 return NULL_TREE;
5f487a34 6285 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 6286 op2a, op2b);
e89065a1
SL
6287 if (!temp)
6288 return NULL_TREE;
6289 else if (!result)
6290 result = temp;
6291 else if (!same_bool_result_p (result, temp))
6292 return NULL_TREE;
6293 }
6294 else
6295 return NULL_TREE;
6296 }
6297 return result;
6298 }
6299
6300 default:
6301 break;
6302 }
6303 }
6304 return NULL_TREE;
6305}
6306
6307/* Try to simplify the OR of two comparisons, specified by
6308 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6309 If this can be simplified to a single expression (without requiring
6310 introducing more SSA variables to hold intermediate values),
6311 return the resulting tree. Otherwise return NULL_TREE.
6312 If the result expression is non-null, it has boolean type. */
6313
6314tree
5f487a34
LJH
6315maybe_fold_or_comparisons (tree type,
6316 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6317 enum tree_code code2, tree op2a, tree op2b)
6318{
5f487a34 6319 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6320 return t;
cfef45c8 6321
5f487a34
LJH
6322 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6323 return t;
6324
6325 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6326 op1a, op1b, code2, op2a,
6327 op2b))
6328 return t;
6329
6330 return NULL_TREE;
6331}
cfef45c8
RG
6332
6333/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6334
6335 Either NULL_TREE, a simplified but non-constant or a constant
6336 is returned.
6337
6338 ??? This should go into a gimple-fold-inline.h file to be eventually
6339 privatized with the single valueize function used in the various TUs
6340 to avoid the indirect function call overhead. */
6341
6342tree
355fe088 6343gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 6344 tree (*gvalueize) (tree))
cfef45c8 6345{
5d75ad95 6346 gimple_match_op res_op;
45cc9f96
RB
6347 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6348 edges if there are intermediate VARYING defs. For this reason
6349 do not follow SSA edges here even though SCCVN can technically
6350 just deal fine with that. */
5d75ad95 6351 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 6352 {
34050b6b 6353 tree res = NULL_TREE;
5d75ad95
RS
6354 if (gimple_simplified_result_is_gimple_val (&res_op))
6355 res = res_op.ops[0];
34050b6b 6356 else if (mprts_hook)
5d75ad95 6357 res = mprts_hook (&res_op);
34050b6b 6358 if (res)
45cc9f96 6359 {
34050b6b
RB
6360 if (dump_file && dump_flags & TDF_DETAILS)
6361 {
6362 fprintf (dump_file, "Match-and-simplified ");
6363 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6364 fprintf (dump_file, " to ");
ef6cb4c7 6365 print_generic_expr (dump_file, res);
34050b6b
RB
6366 fprintf (dump_file, "\n");
6367 }
6368 return res;
45cc9f96 6369 }
45cc9f96
RB
6370 }
6371
cfef45c8
RG
6372 location_t loc = gimple_location (stmt);
6373 switch (gimple_code (stmt))
6374 {
6375 case GIMPLE_ASSIGN:
6376 {
6377 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6378
6379 switch (get_gimple_rhs_class (subcode))
6380 {
6381 case GIMPLE_SINGLE_RHS:
6382 {
6383 tree rhs = gimple_assign_rhs1 (stmt);
6384 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6385
6386 if (TREE_CODE (rhs) == SSA_NAME)
6387 {
6388 /* If the RHS is an SSA_NAME, return its known constant value,
6389 if any. */
6390 return (*valueize) (rhs);
6391 }
6392 /* Handle propagating invariant addresses into address
6393 operations. */
6394 else if (TREE_CODE (rhs) == ADDR_EXPR
6395 && !is_gimple_min_invariant (rhs))
6396 {
a90c8804 6397 poly_int64 offset = 0;
cfef45c8
RG
6398 tree base;
6399 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6400 &offset,
6401 valueize);
6402 if (base
6403 && (CONSTANT_CLASS_P (base)
6404 || decl_address_invariant_p (base)))
6405 return build_invariant_address (TREE_TYPE (rhs),
6406 base, offset);
6407 }
6408 else if (TREE_CODE (rhs) == CONSTRUCTOR
6409 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
6410 && known_eq (CONSTRUCTOR_NELTS (rhs),
6411 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 6412 {
794e3180
RS
6413 unsigned i, nelts;
6414 tree val;
cfef45c8 6415
928686b1 6416 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 6417 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
6418 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6419 {
6420 val = (*valueize) (val);
6421 if (TREE_CODE (val) == INTEGER_CST
6422 || TREE_CODE (val) == REAL_CST
6423 || TREE_CODE (val) == FIXED_CST)
794e3180 6424 vec.quick_push (val);
cfef45c8
RG
6425 else
6426 return NULL_TREE;
6427 }
6428
5ebaa477 6429 return vec.build ();
cfef45c8 6430 }
bdf37f7a
JH
6431 if (subcode == OBJ_TYPE_REF)
6432 {
6433 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6434 /* If callee is constant, we can fold away the wrapper. */
6435 if (is_gimple_min_invariant (val))
6436 return val;
6437 }
cfef45c8
RG
6438
6439 if (kind == tcc_reference)
6440 {
6441 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6442 || TREE_CODE (rhs) == REALPART_EXPR
6443 || TREE_CODE (rhs) == IMAGPART_EXPR)
6444 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6445 {
6446 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6447 return fold_unary_loc (EXPR_LOCATION (rhs),
6448 TREE_CODE (rhs),
6449 TREE_TYPE (rhs), val);
6450 }
6451 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6452 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6453 {
6454 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6455 return fold_ternary_loc (EXPR_LOCATION (rhs),
6456 TREE_CODE (rhs),
6457 TREE_TYPE (rhs), val,
6458 TREE_OPERAND (rhs, 1),
6459 TREE_OPERAND (rhs, 2));
6460 }
6461 else if (TREE_CODE (rhs) == MEM_REF
6462 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6463 {
6464 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6465 if (TREE_CODE (val) == ADDR_EXPR
6466 && is_gimple_min_invariant (val))
6467 {
6468 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6469 unshare_expr (val),
6470 TREE_OPERAND (rhs, 1));
6471 if (tem)
6472 rhs = tem;
6473 }
6474 }
6475 return fold_const_aggregate_ref_1 (rhs, valueize);
6476 }
6477 else if (kind == tcc_declaration)
6478 return get_symbol_constant_value (rhs);
6479 return rhs;
6480 }
6481
6482 case GIMPLE_UNARY_RHS:
f3582e54 6483 return NULL_TREE;
cfef45c8
RG
6484
6485 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
6486 /* Translate &x + CST into an invariant form suitable for
6487 further propagation. */
6488 if (subcode == POINTER_PLUS_EXPR)
6489 {
4b1b9e64
RB
6490 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6491 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
6492 if (TREE_CODE (op0) == ADDR_EXPR
6493 && TREE_CODE (op1) == INTEGER_CST)
6494 {
6495 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
6496 return build1_loc
6497 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
6498 fold_build2 (MEM_REF,
6499 TREE_TYPE (TREE_TYPE (op0)),
6500 unshare_expr (op0), off));
6501 }
6502 }
59c20dc7
RB
6503 /* Canonicalize bool != 0 and bool == 0 appearing after
6504 valueization. While gimple_simplify handles this
6505 it can get confused by the ~X == 1 -> X == 0 transform
6506 which we cant reduce to a SSA name or a constant
6507 (and we have no way to tell gimple_simplify to not
6508 consider those transforms in the first place). */
6509 else if (subcode == EQ_EXPR
6510 || subcode == NE_EXPR)
6511 {
6512 tree lhs = gimple_assign_lhs (stmt);
6513 tree op0 = gimple_assign_rhs1 (stmt);
6514 if (useless_type_conversion_p (TREE_TYPE (lhs),
6515 TREE_TYPE (op0)))
6516 {
6517 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6518 op0 = (*valueize) (op0);
8861704d
RB
6519 if (TREE_CODE (op0) == INTEGER_CST)
6520 std::swap (op0, op1);
6521 if (TREE_CODE (op1) == INTEGER_CST
6522 && ((subcode == NE_EXPR && integer_zerop (op1))
6523 || (subcode == EQ_EXPR && integer_onep (op1))))
6524 return op0;
59c20dc7
RB
6525 }
6526 }
4b1b9e64 6527 return NULL_TREE;
cfef45c8
RG
6528
6529 case GIMPLE_TERNARY_RHS:
6530 {
6531 /* Handle ternary operators that can appear in GIMPLE form. */
6532 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6533 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6534 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
6535 return fold_ternary_loc (loc, subcode,
6536 gimple_expr_type (stmt), op0, op1, op2);
6537 }
6538
6539 default:
6540 gcc_unreachable ();
6541 }
6542 }
6543
6544 case GIMPLE_CALL:
6545 {
25583c4f 6546 tree fn;
538dd0b7 6547 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
6548
6549 if (gimple_call_internal_p (stmt))
31e071ae
MP
6550 {
6551 enum tree_code subcode = ERROR_MARK;
6552 switch (gimple_call_internal_fn (stmt))
6553 {
6554 case IFN_UBSAN_CHECK_ADD:
6555 subcode = PLUS_EXPR;
6556 break;
6557 case IFN_UBSAN_CHECK_SUB:
6558 subcode = MINUS_EXPR;
6559 break;
6560 case IFN_UBSAN_CHECK_MUL:
6561 subcode = MULT_EXPR;
6562 break;
68fa96d6
ML
6563 case IFN_BUILTIN_EXPECT:
6564 {
6565 tree arg0 = gimple_call_arg (stmt, 0);
6566 tree op0 = (*valueize) (arg0);
6567 if (TREE_CODE (op0) == INTEGER_CST)
6568 return op0;
6569 return NULL_TREE;
6570 }
31e071ae
MP
6571 default:
6572 return NULL_TREE;
6573 }
368b454d
JJ
6574 tree arg0 = gimple_call_arg (stmt, 0);
6575 tree arg1 = gimple_call_arg (stmt, 1);
6576 tree op0 = (*valueize) (arg0);
6577 tree op1 = (*valueize) (arg1);
31e071ae
MP
6578
6579 if (TREE_CODE (op0) != INTEGER_CST
6580 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
6581 {
6582 switch (subcode)
6583 {
6584 case MULT_EXPR:
6585 /* x * 0 = 0 * x = 0 without overflow. */
6586 if (integer_zerop (op0) || integer_zerop (op1))
6587 return build_zero_cst (TREE_TYPE (arg0));
6588 break;
6589 case MINUS_EXPR:
6590 /* y - y = 0 without overflow. */
6591 if (operand_equal_p (op0, op1, 0))
6592 return build_zero_cst (TREE_TYPE (arg0));
6593 break;
6594 default:
6595 break;
6596 }
6597 }
6598 tree res
6599 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
6600 if (res
6601 && TREE_CODE (res) == INTEGER_CST
6602 && !TREE_OVERFLOW (res))
6603 return res;
6604 return NULL_TREE;
6605 }
25583c4f
RS
6606
6607 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 6608 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 6609 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 6610 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
6611 && gimple_builtin_call_types_compatible_p (stmt,
6612 TREE_OPERAND (fn, 0)))
cfef45c8
RG
6613 {
6614 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 6615 tree retval;
cfef45c8
RG
6616 unsigned i;
6617 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6618 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 6619 retval = fold_builtin_call_array (loc,
538dd0b7 6620 gimple_call_return_type (call_stmt),
cfef45c8 6621 fn, gimple_call_num_args (stmt), args);
cfef45c8 6622 if (retval)
5c944c6c
RB
6623 {
6624 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6625 STRIP_NOPS (retval);
538dd0b7
DM
6626 retval = fold_convert (gimple_call_return_type (call_stmt),
6627 retval);
5c944c6c 6628 }
cfef45c8
RG
6629 return retval;
6630 }
6631 return NULL_TREE;
6632 }
6633
6634 default:
6635 return NULL_TREE;
6636 }
6637}
6638
6639/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6640 Returns NULL_TREE if folding to a constant is not possible, otherwise
6641 returns a constant according to is_gimple_min_invariant. */
6642
6643tree
355fe088 6644gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
6645{
6646 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6647 if (res && is_gimple_min_invariant (res))
6648 return res;
6649 return NULL_TREE;
6650}
6651
6652
6653/* The following set of functions are supposed to fold references using
6654 their constant initializers. */
6655
cfef45c8
RG
6656/* See if we can find constructor defining value of BASE.
6657 When we know the consructor with constant offset (such as
6658 base is array[40] and we do know constructor of array), then
6659 BIT_OFFSET is adjusted accordingly.
6660
6661 As a special case, return error_mark_node when constructor
6662 is not explicitly available, but it is known to be zero
6663 such as 'static const int a;'. */
6664static tree
588db50c 6665get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
6666 tree (*valueize)(tree))
6667{
588db50c 6668 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
6669 bool reverse;
6670
cfef45c8
RG
6671 if (TREE_CODE (base) == MEM_REF)
6672 {
6a5aca53
ML
6673 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6674 if (!boff.to_shwi (bit_offset))
6675 return NULL_TREE;
cfef45c8
RG
6676
6677 if (valueize
6678 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6679 base = valueize (TREE_OPERAND (base, 0));
6680 if (!base || TREE_CODE (base) != ADDR_EXPR)
6681 return NULL_TREE;
6682 base = TREE_OPERAND (base, 0);
6683 }
13e88953
RB
6684 else if (valueize
6685 && TREE_CODE (base) == SSA_NAME)
6686 base = valueize (base);
cfef45c8
RG
6687
6688 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6689 DECL_INITIAL. If BASE is a nested reference into another
6690 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6691 the inner reference. */
6692 switch (TREE_CODE (base))
6693 {
6694 case VAR_DECL:
cfef45c8 6695 case CONST_DECL:
6a6dac52
JH
6696 {
6697 tree init = ctor_for_folding (base);
6698
688010ba 6699 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
6700 NULL means unknown, while error_mark_node is 0. */
6701 if (init == error_mark_node)
6702 return NULL_TREE;
6703 if (!init)
6704 return error_mark_node;
6705 return init;
6706 }
cfef45c8 6707
13e88953
RB
6708 case VIEW_CONVERT_EXPR:
6709 return get_base_constructor (TREE_OPERAND (base, 0),
6710 bit_offset, valueize);
6711
cfef45c8
RG
6712 case ARRAY_REF:
6713 case COMPONENT_REF:
ee45a32d
EB
6714 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6715 &reverse);
588db50c 6716 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
6717 return NULL_TREE;
6718 *bit_offset += bit_offset2;
6719 return get_base_constructor (base, bit_offset, valueize);
6720
cfef45c8
RG
6721 case CONSTRUCTOR:
6722 return base;
6723
6724 default:
13e88953
RB
6725 if (CONSTANT_CLASS_P (base))
6726 return base;
6727
cfef45c8
RG
6728 return NULL_TREE;
6729 }
6730}
6731
35b4d3a6
MS
6732/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6733 to the memory at bit OFFSET. When non-null, TYPE is the expected
6734 type of the reference; otherwise the type of the referenced element
6735 is used instead. When SIZE is zero, attempt to fold a reference to
6736 the entire element which OFFSET refers to. Increment *SUBOFF by
6737 the bit offset of the accessed element. */
cfef45c8
RG
6738
6739static tree
6740fold_array_ctor_reference (tree type, tree ctor,
6741 unsigned HOST_WIDE_INT offset,
c44c2088 6742 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6743 tree from_decl,
6744 unsigned HOST_WIDE_INT *suboff)
cfef45c8 6745{
807e902e
KZ
6746 offset_int low_bound;
6747 offset_int elt_size;
807e902e 6748 offset_int access_index;
6a636014 6749 tree domain_type = NULL_TREE;
cfef45c8
RG
6750 HOST_WIDE_INT inner_offset;
6751
6752 /* Compute low bound and elt size. */
eb8f1123
RG
6753 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6754 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
6755 if (domain_type && TYPE_MIN_VALUE (domain_type))
6756 {
6aa238a1 6757 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
6758 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6759 return NULL_TREE;
807e902e 6760 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
6761 }
6762 else
807e902e 6763 low_bound = 0;
6aa238a1 6764 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
6765 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6766 return NULL_TREE;
807e902e 6767 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 6768
35b4d3a6 6769 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 6770 access of a multiple of the array element size. Avoid division
6aa238a1
MS
6771 by zero below when ELT_SIZE is zero, such as with the result of
6772 an initializer for a zero-length array or an empty struct. */
6773 if (elt_size == 0
6774 || (type
6775 && (!TYPE_SIZE_UNIT (type)
831e688a 6776 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
6777 return NULL_TREE;
6778
6779 /* Compute the array index we look for. */
807e902e
KZ
6780 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6781 elt_size);
27bcd47c 6782 access_index += low_bound;
cfef45c8
RG
6783
6784 /* And offset within the access. */
27bcd47c 6785 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 6786
3c076c96
JJ
6787 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
6788 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
6789 {
6790 /* native_encode_expr constraints. */
6791 if (size > MAX_BITSIZE_MODE_ANY_MODE
6792 || size % BITS_PER_UNIT != 0
3c076c96
JJ
6793 || inner_offset % BITS_PER_UNIT != 0
6794 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
6795 return NULL_TREE;
6796
6797 unsigned ctor_idx;
6798 tree val = get_array_ctor_element_at_index (ctor, access_index,
6799 &ctor_idx);
6800 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6801 return build_zero_cst (type);
6802
6803 /* native-encode adjacent ctor elements. */
6804 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6805 unsigned bufoff = 0;
6806 offset_int index = 0;
6807 offset_int max_index = access_index;
6808 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6809 if (!val)
6810 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6811 else if (!CONSTANT_CLASS_P (val))
6812 return NULL_TREE;
6813 if (!elt->index)
6814 ;
6815 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6816 {
6817 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6818 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6819 }
6820 else
6821 index = max_index = wi::to_offset (elt->index);
6822 index = wi::umax (index, access_index);
6823 do
6824 {
3c076c96
JJ
6825 if (bufoff + elt_sz > sizeof (buf))
6826 elt_sz = sizeof (buf) - bufoff;
6827 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 6828 inner_offset / BITS_PER_UNIT);
3c076c96 6829 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
6830 return NULL_TREE;
6831 inner_offset = 0;
6832 bufoff += len;
6833
6834 access_index += 1;
6835 if (wi::cmpu (access_index, index) == 0)
6836 val = elt->value;
6837 else if (wi::cmpu (access_index, max_index) > 0)
6838 {
6839 ctor_idx++;
6840 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6841 {
6842 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6843 ++max_index;
6844 }
6845 else
6846 {
6847 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6848 index = 0;
6849 max_index = access_index;
6850 if (!elt->index)
6851 ;
6852 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6853 {
6854 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6855 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6856 }
6857 else
6858 index = max_index = wi::to_offset (elt->index);
6859 index = wi::umax (index, access_index);
6860 if (wi::cmpu (access_index, index) == 0)
6861 val = elt->value;
6862 else
6863 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6864 }
6865 }
6866 }
6867 while (bufoff < size / BITS_PER_UNIT);
6868 *suboff += size;
6869 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6870 }
6871
6a636014 6872 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
6873 {
6874 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6875 {
6876 /* For the final reference to the entire accessed element
6877 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6878 may be null) in favor of the type of the element, and set
6879 SIZE to the size of the accessed element. */
6880 inner_offset = 0;
6881 type = TREE_TYPE (val);
6e41c27b 6882 size = elt_sz * BITS_PER_UNIT;
35b4d3a6 6883 }
6e41c27b
RB
6884 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
6885 && TREE_CODE (val) == CONSTRUCTOR
6886 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
6887 /* If this isn't the last element in the CTOR and a CTOR itself
6888 and it does not cover the whole object we are requesting give up
6889 since we're not set up for combining from multiple CTORs. */
6890 return NULL_TREE;
35b4d3a6 6891
6e41c27b 6892 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
35b4d3a6
MS
6893 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6894 suboff);
6895 }
cfef45c8 6896
35b4d3a6
MS
6897 /* Memory not explicitly mentioned in constructor is 0 (or
6898 the reference is out of range). */
6899 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
6900}
6901
35b4d3a6
MS
6902/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6903 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6904 is the expected type of the reference; otherwise the type of
6905 the referenced member is used instead. When SIZE is zero,
6906 attempt to fold a reference to the entire member which OFFSET
6907 refers to; in this case. Increment *SUBOFF by the bit offset
6908 of the accessed member. */
cfef45c8
RG
6909
6910static tree
6911fold_nonarray_ctor_reference (tree type, tree ctor,
6912 unsigned HOST_WIDE_INT offset,
c44c2088 6913 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
6914 tree from_decl,
6915 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
6916{
6917 unsigned HOST_WIDE_INT cnt;
6918 tree cfield, cval;
6919
6920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6921 cval)
6922 {
6923 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6924 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6925 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
6926
6927 if (!field_size)
6928 {
6929 /* Determine the size of the flexible array member from
6930 the size of the initializer provided for it. */
6931 field_size = TYPE_SIZE (TREE_TYPE (cval));
6932 }
cfef45c8
RG
6933
6934 /* Variable sized objects in static constructors makes no sense,
6935 but field_size can be NULL for flexible array members. */
6936 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6937 && TREE_CODE (byte_offset) == INTEGER_CST
6938 && (field_size != NULL_TREE
6939 ? TREE_CODE (field_size) == INTEGER_CST
6940 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6941
6942 /* Compute bit offset of the field. */
35b4d3a6
MS
6943 offset_int bitoffset
6944 = (wi::to_offset (field_offset)
6945 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 6946 /* Compute bit offset where the field ends. */
35b4d3a6 6947 offset_int bitoffset_end;
cfef45c8 6948 if (field_size != NULL_TREE)
807e902e 6949 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 6950 else
807e902e 6951 bitoffset_end = 0;
cfef45c8 6952
35b4d3a6
MS
6953 /* Compute the bit offset of the end of the desired access.
6954 As a special case, if the size of the desired access is
6955 zero, assume the access is to the entire field (and let
6956 the caller make any necessary adjustments by storing
6957 the actual bounds of the field in FIELDBOUNDS). */
6958 offset_int access_end = offset_int (offset);
6959 if (size)
6960 access_end += size;
6961 else
6962 access_end = bitoffset_end;
b8b2b009 6963
35b4d3a6
MS
6964 /* Is there any overlap between the desired access at
6965 [OFFSET, OFFSET+SIZE) and the offset of the field within
6966 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 6967 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 6968 && (field_size == NULL_TREE
807e902e 6969 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 6970 {
35b4d3a6
MS
6971 *suboff += bitoffset.to_uhwi ();
6972
6973 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6974 {
6975 /* For the final reference to the entire accessed member
6976 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6977 be null) in favor of the type of the member, and set
6978 SIZE to the size of the accessed member. */
6979 offset = bitoffset.to_uhwi ();
6980 type = TREE_TYPE (cval);
6981 size = (bitoffset_end - bitoffset).to_uhwi ();
6982 }
6983
6984 /* We do have overlap. Now see if the field is large enough
6985 to cover the access. Give up for accesses that extend
6986 beyond the end of the object or that span multiple fields. */
807e902e 6987 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 6988 return NULL_TREE;
032c80e9 6989 if (offset < bitoffset)
b8b2b009 6990 return NULL_TREE;
35b4d3a6
MS
6991
6992 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 6993 return fold_ctor_reference (type, cval,
27bcd47c 6994 inner_offset.to_uhwi (), size,
35b4d3a6 6995 from_decl, suboff);
cfef45c8
RG
6996 }
6997 }
14b7950f
MS
6998
6999 if (!type)
7000 return NULL_TREE;
7001
7002 return build_zero_cst (type);
cfef45c8
RG
7003}
7004
35b4d3a6 7005/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 7006 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
7007 is zero, attempt to fold a reference to the entire subobject
7008 which OFFSET refers to. This is used when folding accesses to
7009 string members of aggregates. When non-null, set *SUBOFF to
7010 the bit offset of the accessed subobject. */
cfef45c8 7011
8403c2cf 7012tree
35b4d3a6
MS
7013fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7014 const poly_uint64 &poly_size, tree from_decl,
7015 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
7016{
7017 tree ret;
7018
7019 /* We found the field with exact match. */
35b4d3a6
MS
7020 if (type
7021 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 7022 && known_eq (poly_offset, 0U))
9d60be38 7023 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 7024
30acf282
RS
7025 /* The remaining optimizations need a constant size and offset. */
7026 unsigned HOST_WIDE_INT size, offset;
7027 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
7028 return NULL_TREE;
7029
cfef45c8
RG
7030 /* We are at the end of walk, see if we can view convert the
7031 result. */
7032 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7033 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
7034 && !compare_tree_int (TYPE_SIZE (type), size)
7035 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 7036 {
9d60be38 7037 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 7038 if (ret)
672d9f8e
RB
7039 {
7040 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7041 if (ret)
7042 STRIP_USELESS_TYPE_CONVERSION (ret);
7043 }
cfef45c8
RG
7044 return ret;
7045 }
b2505143
RB
7046 /* For constants and byte-aligned/sized reads try to go through
7047 native_encode/interpret. */
7048 if (CONSTANT_CLASS_P (ctor)
7049 && BITS_PER_UNIT == 8
7050 && offset % BITS_PER_UNIT == 0
ea69031c 7051 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 7052 && size % BITS_PER_UNIT == 0
ea69031c
JJ
7053 && size <= MAX_BITSIZE_MODE_ANY_MODE
7054 && can_native_interpret_type_p (type))
b2505143
RB
7055 {
7056 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
7057 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7058 offset / BITS_PER_UNIT);
7059 if (len > 0)
7060 return native_interpret_expr (type, buf, len);
b2505143 7061 }
cfef45c8
RG
7062 if (TREE_CODE (ctor) == CONSTRUCTOR)
7063 {
35b4d3a6
MS
7064 unsigned HOST_WIDE_INT dummy = 0;
7065 if (!suboff)
7066 suboff = &dummy;
cfef45c8 7067
ea69031c 7068 tree ret;
eb8f1123
RG
7069 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7070 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
7071 ret = fold_array_ctor_reference (type, ctor, offset, size,
7072 from_decl, suboff);
7073 else
7074 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7075 from_decl, suboff);
7076
7077 /* Fall back to native_encode_initializer. Needs to be done
7078 only in the outermost fold_ctor_reference call (because it itself
7079 recurses into CONSTRUCTORs) and doesn't update suboff. */
7080 if (ret == NULL_TREE
7081 && suboff == &dummy
7082 && BITS_PER_UNIT == 8
7083 && offset % BITS_PER_UNIT == 0
7084 && offset / BITS_PER_UNIT <= INT_MAX
7085 && size % BITS_PER_UNIT == 0
7086 && size <= MAX_BITSIZE_MODE_ANY_MODE
7087 && can_native_interpret_type_p (type))
7088 {
7089 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7090 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7091 offset / BITS_PER_UNIT);
7092 if (len > 0)
7093 return native_interpret_expr (type, buf, len);
7094 }
35b4d3a6 7095
ea69031c 7096 return ret;
cfef45c8
RG
7097 }
7098
7099 return NULL_TREE;
7100}
7101
7102/* Return the tree representing the element referenced by T if T is an
7103 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7104 names using VALUEIZE. Return NULL_TREE otherwise. */
7105
7106tree
7107fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7108{
7109 tree ctor, idx, base;
588db50c 7110 poly_int64 offset, size, max_size;
cfef45c8 7111 tree tem;
ee45a32d 7112 bool reverse;
cfef45c8 7113
f8a7df45
RG
7114 if (TREE_THIS_VOLATILE (t))
7115 return NULL_TREE;
7116
3a65ee74 7117 if (DECL_P (t))
cfef45c8
RG
7118 return get_symbol_constant_value (t);
7119
7120 tem = fold_read_from_constant_string (t);
7121 if (tem)
7122 return tem;
7123
7124 switch (TREE_CODE (t))
7125 {
7126 case ARRAY_REF:
7127 case ARRAY_RANGE_REF:
7128 /* Constant indexes are handled well by get_base_constructor.
7129 Only special case variable offsets.
7130 FIXME: This code can't handle nested references with variable indexes
7131 (they will be handled only by iteration of ccp). Perhaps we can bring
7132 get_ref_base_and_extent here and make it use a valueize callback. */
7133 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7134 && valueize
7135 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 7136 && poly_int_tree_p (idx))
cfef45c8
RG
7137 {
7138 tree low_bound, unit_size;
7139
7140 /* If the resulting bit-offset is constant, track it. */
7141 if ((low_bound = array_ref_low_bound (t),
588db50c 7142 poly_int_tree_p (low_bound))
cfef45c8 7143 && (unit_size = array_ref_element_size (t),
807e902e 7144 tree_fits_uhwi_p (unit_size)))
cfef45c8 7145 {
588db50c
RS
7146 poly_offset_int woffset
7147 = wi::sext (wi::to_poly_offset (idx)
7148 - wi::to_poly_offset (low_bound),
807e902e 7149 TYPE_PRECISION (TREE_TYPE (idx)));
a9e6359a
RB
7150 woffset *= tree_to_uhwi (unit_size);
7151 woffset *= BITS_PER_UNIT;
588db50c 7152 if (woffset.to_shwi (&offset))
807e902e 7153 {
807e902e
KZ
7154 base = TREE_OPERAND (t, 0);
7155 ctor = get_base_constructor (base, &offset, valueize);
7156 /* Empty constructor. Always fold to 0. */
7157 if (ctor == error_mark_node)
7158 return build_zero_cst (TREE_TYPE (t));
7159 /* Out of bound array access. Value is undefined,
7160 but don't fold. */
588db50c 7161 if (maybe_lt (offset, 0))
807e902e 7162 return NULL_TREE;
67914693 7163 /* We cannot determine ctor. */
807e902e
KZ
7164 if (!ctor)
7165 return NULL_TREE;
7166 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7167 tree_to_uhwi (unit_size)
7168 * BITS_PER_UNIT,
7169 base);
7170 }
cfef45c8
RG
7171 }
7172 }
7173 /* Fallthru. */
7174
7175 case COMPONENT_REF:
7176 case BIT_FIELD_REF:
7177 case TARGET_MEM_REF:
7178 case MEM_REF:
ee45a32d 7179 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
7180 ctor = get_base_constructor (base, &offset, valueize);
7181
7182 /* Empty constructor. Always fold to 0. */
7183 if (ctor == error_mark_node)
7184 return build_zero_cst (TREE_TYPE (t));
7185 /* We do not know precise address. */
588db50c 7186 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 7187 return NULL_TREE;
67914693 7188 /* We cannot determine ctor. */
cfef45c8
RG
7189 if (!ctor)
7190 return NULL_TREE;
7191
7192 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 7193 if (maybe_lt (offset, 0))
cfef45c8
RG
7194 return NULL_TREE;
7195
e4f1cbc3
JJ
7196 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
7197 if (tem)
7198 return tem;
7199
7200 /* For bit field reads try to read the representative and
7201 adjust. */
7202 if (TREE_CODE (t) == COMPONENT_REF
7203 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
7204 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
7205 {
7206 HOST_WIDE_INT csize, coffset;
7207 tree field = TREE_OPERAND (t, 1);
7208 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
7209 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
7210 && size.is_constant (&csize)
7211 && offset.is_constant (&coffset)
7212 && (coffset % BITS_PER_UNIT != 0
7213 || csize % BITS_PER_UNIT != 0)
7214 && !reverse
7215 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
7216 {
7217 poly_int64 bitoffset;
7218 poly_uint64 field_offset, repr_offset;
7219 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
7220 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
7221 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
7222 else
7223 bitoffset = 0;
7224 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
7225 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
7226 HOST_WIDE_INT bitoff;
7227 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
7228 - TYPE_PRECISION (TREE_TYPE (field)));
7229 if (bitoffset.is_constant (&bitoff)
7230 && bitoff >= 0
7231 && bitoff <= diff)
7232 {
7233 offset -= bitoff;
7234 size = tree_to_uhwi (DECL_SIZE (repr));
7235
7236 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
7237 size, base);
7238 if (tem && TREE_CODE (tem) == INTEGER_CST)
7239 {
7240 if (!BYTES_BIG_ENDIAN)
7241 tem = wide_int_to_tree (TREE_TYPE (field),
7242 wi::lrshift (wi::to_wide (tem),
7243 bitoff));
7244 else
7245 tem = wide_int_to_tree (TREE_TYPE (field),
7246 wi::lrshift (wi::to_wide (tem),
7247 diff - bitoff));
7248 return tem;
7249 }
7250 }
7251 }
7252 }
7253 break;
cfef45c8
RG
7254
7255 case REALPART_EXPR:
7256 case IMAGPART_EXPR:
7257 {
7258 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7259 if (c && TREE_CODE (c) == COMPLEX_CST)
7260 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 7261 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
7262 break;
7263 }
7264
7265 default:
7266 break;
7267 }
7268
7269 return NULL_TREE;
7270}
7271
7272tree
7273fold_const_aggregate_ref (tree t)
7274{
7275 return fold_const_aggregate_ref_1 (t, NULL);
7276}
06bc3ec7 7277
85942f45 7278/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
7279 at OFFSET.
7280 Set CAN_REFER if non-NULL to false if method
7281 is not referable or if the virtual table is ill-formed (such as rewriten
7282 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
7283
7284tree
85942f45
JH
7285gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7286 tree v,
ec77d61f
JH
7287 unsigned HOST_WIDE_INT offset,
7288 bool *can_refer)
81fa35bd 7289{
85942f45
JH
7290 tree vtable = v, init, fn;
7291 unsigned HOST_WIDE_INT size;
8c311b50
JH
7292 unsigned HOST_WIDE_INT elt_size, access_index;
7293 tree domain_type;
81fa35bd 7294
ec77d61f
JH
7295 if (can_refer)
7296 *can_refer = true;
7297
9de2f554 7298 /* First of all double check we have virtual table. */
8813a647 7299 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 7300 {
ec77d61f
JH
7301 /* Pass down that we lost track of the target. */
7302 if (can_refer)
7303 *can_refer = false;
7304 return NULL_TREE;
7305 }
9de2f554 7306
2aa3da06
JH
7307 init = ctor_for_folding (v);
7308
9de2f554 7309 /* The virtual tables should always be born with constructors
2aa3da06
JH
7310 and we always should assume that they are avaialble for
7311 folding. At the moment we do not stream them in all cases,
7312 but it should never happen that ctor seem unreachable. */
7313 gcc_assert (init);
7314 if (init == error_mark_node)
7315 {
ec77d61f
JH
7316 /* Pass down that we lost track of the target. */
7317 if (can_refer)
7318 *can_refer = false;
2aa3da06
JH
7319 return NULL_TREE;
7320 }
81fa35bd 7321 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 7322 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 7323 offset *= BITS_PER_UNIT;
81fa35bd 7324 offset += token * size;
9de2f554 7325
8c311b50
JH
7326 /* Lookup the value in the constructor that is assumed to be array.
7327 This is equivalent to
7328 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7329 offset, size, NULL);
7330 but in a constant time. We expect that frontend produced a simple
7331 array without indexed initializers. */
7332
7333 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7334 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7335 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7336 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7337
7338 access_index = offset / BITS_PER_UNIT / elt_size;
7339 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7340
bf8d8309
MP
7341 /* The C++ FE can now produce indexed fields, and we check if the indexes
7342 match. */
8c311b50
JH
7343 if (access_index < CONSTRUCTOR_NELTS (init))
7344 {
7345 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
7346 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7347 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
7348 STRIP_NOPS (fn);
7349 }
7350 else
7351 fn = NULL;
9de2f554
JH
7352
7353 /* For type inconsistent program we may end up looking up virtual method
7354 in virtual table that does not contain TOKEN entries. We may overrun
7355 the virtual table and pick up a constant or RTTI info pointer.
7356 In any case the call is undefined. */
7357 if (!fn
7358 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7359 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7360 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7361 else
7362 {
7363 fn = TREE_OPERAND (fn, 0);
7364
7365 /* When cgraph node is missing and function is not public, we cannot
7366 devirtualize. This can happen in WHOPR when the actual method
7367 ends up in other partition, because we found devirtualization
7368 possibility too late. */
7369 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
7370 {
7371 if (can_refer)
7372 {
7373 *can_refer = false;
7374 return fn;
7375 }
7376 return NULL_TREE;
7377 }
9de2f554 7378 }
81fa35bd 7379
7501ca28
RG
7380 /* Make sure we create a cgraph node for functions we'll reference.
7381 They can be non-existent if the reference comes from an entry
7382 of an external vtable for example. */
d52f5295 7383 cgraph_node::get_create (fn);
7501ca28 7384
81fa35bd
MJ
7385 return fn;
7386}
7387
85942f45
JH
7388/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7389 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7390 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
7391 OBJ_TYPE_REF_OBJECT(REF).
7392 Set CAN_REFER if non-NULL to false if method
7393 is not referable or if the virtual table is ill-formed (such as rewriten
7394 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
7395
7396tree
ec77d61f
JH
7397gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7398 bool *can_refer)
85942f45
JH
7399{
7400 unsigned HOST_WIDE_INT offset;
7401 tree v;
7402
7403 v = BINFO_VTABLE (known_binfo);
7404 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7405 if (!v)
7406 return NULL_TREE;
7407
7408 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
7409 {
7410 if (can_refer)
7411 *can_refer = false;
7412 return NULL_TREE;
7413 }
7414 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
7415}
7416
737f500a
RB
7417/* Given a pointer value T, return a simplified version of an
7418 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
7419 possible. Note that the resulting type may be different from
7420 the type pointed to in the sense that it is still compatible
7421 from the langhooks point of view. */
7422
7423tree
7424gimple_fold_indirect_ref (tree t)
7425{
7426 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7427 tree sub = t;
7428 tree subtype;
7429
7430 STRIP_NOPS (sub);
7431 subtype = TREE_TYPE (sub);
737f500a
RB
7432 if (!POINTER_TYPE_P (subtype)
7433 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
7434 return NULL_TREE;
7435
7436 if (TREE_CODE (sub) == ADDR_EXPR)
7437 {
7438 tree op = TREE_OPERAND (sub, 0);
7439 tree optype = TREE_TYPE (op);
7440 /* *&p => p */
7441 if (useless_type_conversion_p (type, optype))
7442 return op;
7443
7444 /* *(foo *)&fooarray => fooarray[0] */
7445 if (TREE_CODE (optype) == ARRAY_TYPE
7446 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7447 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7448 {
7449 tree type_domain = TYPE_DOMAIN (optype);
7450 tree min_val = size_zero_node;
7451 if (type_domain && TYPE_MIN_VALUE (type_domain))
7452 min_val = TYPE_MIN_VALUE (type_domain);
7453 if (TREE_CODE (min_val) == INTEGER_CST)
7454 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7455 }
7456 /* *(foo *)&complexfoo => __real__ complexfoo */
7457 else if (TREE_CODE (optype) == COMPLEX_TYPE
7458 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7459 return fold_build1 (REALPART_EXPR, type, op);
7460 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7461 else if (TREE_CODE (optype) == VECTOR_TYPE
7462 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7463 {
7464 tree part_width = TYPE_SIZE (type);
7465 tree index = bitsize_int (0);
7466 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7467 }
7468 }
7469
7470 /* *(p + CST) -> ... */
7471 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7472 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7473 {
7474 tree addr = TREE_OPERAND (sub, 0);
7475 tree off = TREE_OPERAND (sub, 1);
7476 tree addrtype;
7477
7478 STRIP_NOPS (addr);
7479 addrtype = TREE_TYPE (addr);
7480
7481 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7482 if (TREE_CODE (addr) == ADDR_EXPR
7483 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7484 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 7485 && tree_fits_uhwi_p (off))
b184c8f1 7486 {
ae7e9ddd 7487 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
7488 tree part_width = TYPE_SIZE (type);
7489 unsigned HOST_WIDE_INT part_widthi
9439e9a1 7490 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
7491 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7492 tree index = bitsize_int (indexi);
928686b1
RS
7493 if (known_lt (offset / part_widthi,
7494 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
7495 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7496 part_width, index);
7497 }
7498
7499 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7500 if (TREE_CODE (addr) == ADDR_EXPR
7501 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7502 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7503 {
7504 tree size = TYPE_SIZE_UNIT (type);
7505 if (tree_int_cst_equal (size, off))
7506 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7507 }
7508
7509 /* *(p + CST) -> MEM_REF <p, CST>. */
7510 if (TREE_CODE (addr) != ADDR_EXPR
7511 || DECL_P (TREE_OPERAND (addr, 0)))
7512 return fold_build2 (MEM_REF, type,
7513 addr,
8e6cdc90 7514 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
7515 }
7516
7517 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7518 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7519 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7520 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7521 {
7522 tree type_domain;
7523 tree min_val = size_zero_node;
7524 tree osub = sub;
7525 sub = gimple_fold_indirect_ref (sub);
7526 if (! sub)
7527 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7528 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7529 if (type_domain && TYPE_MIN_VALUE (type_domain))
7530 min_val = TYPE_MIN_VALUE (type_domain);
7531 if (TREE_CODE (min_val) == INTEGER_CST)
7532 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7533 }
7534
7535 return NULL_TREE;
7536}
19e51b40
JJ
7537
7538/* Return true if CODE is an operation that when operating on signed
7539 integer types involves undefined behavior on overflow and the
7540 operation can be expressed with unsigned arithmetic. */
7541
7542bool
7543arith_code_with_undefined_signed_overflow (tree_code code)
7544{
7545 switch (code)
7546 {
8e2c037d 7547 case ABS_EXPR:
19e51b40
JJ
7548 case PLUS_EXPR:
7549 case MINUS_EXPR:
7550 case MULT_EXPR:
7551 case NEGATE_EXPR:
7552 case POINTER_PLUS_EXPR:
7553 return true;
7554 default:
7555 return false;
7556 }
7557}
7558
7559/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7560 operation that can be transformed to unsigned arithmetic by converting
7561 its operand, carrying out the operation in the corresponding unsigned
7562 type and converting the result back to the original type.
7563
7564 Returns a sequence of statements that replace STMT and also contain
7565 a modified form of STMT itself. */
7566
7567gimple_seq
355fe088 7568rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
7569{
7570 if (dump_file && (dump_flags & TDF_DETAILS))
7571 {
7572 fprintf (dump_file, "rewriting stmt with undefined signed "
7573 "overflow ");
7574 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7575 }
7576
7577 tree lhs = gimple_assign_lhs (stmt);
7578 tree type = unsigned_type_for (TREE_TYPE (lhs));
7579 gimple_seq stmts = NULL;
8e2c037d
RB
7580 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7581 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7582 else
7583 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7584 {
7585 tree op = gimple_op (stmt, i);
7586 op = gimple_convert (&stmts, type, op);
7587 gimple_set_op (stmt, i, op);
7588 }
19e51b40
JJ
7589 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7590 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7591 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 7592 gimple_set_modified (stmt, true);
19e51b40 7593 gimple_seq_add_stmt (&stmts, stmt);
355fe088 7594 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
7595 gimple_seq_add_stmt (&stmts, cvt);
7596
7597 return stmts;
7598}
d4f5cd5e 7599
3d2cf79f 7600
c26de36d
RB
7601/* The valueization hook we use for the gimple_build API simplification.
7602 This makes us match fold_buildN behavior by only combining with
7603 statements in the sequence(s) we are currently building. */
7604
7605static tree
7606gimple_build_valueize (tree op)
7607{
7608 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7609 return op;
7610 return NULL_TREE;
7611}
7612
3d2cf79f 7613/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 7614 simplifying it first if possible. Returns the built
3d2cf79f
RB
7615 expression value and appends statements possibly defining it
7616 to SEQ. */
7617
7618tree
7619gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7620 enum tree_code code, tree type, tree op0)
3d2cf79f 7621{
c26de36d 7622 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
7623 if (!res)
7624 {
a15ebbcd 7625 res = create_tmp_reg_or_ssa_name (type);
355fe088 7626 gimple *stmt;
3d2cf79f
RB
7627 if (code == REALPART_EXPR
7628 || code == IMAGPART_EXPR
7629 || code == VIEW_CONVERT_EXPR)
0d0e4a03 7630 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 7631 else
0d0e4a03 7632 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
7633 gimple_set_location (stmt, loc);
7634 gimple_seq_add_stmt_without_update (seq, stmt);
7635 }
7636 return res;
7637}
7638
7639/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 7640 simplifying it first if possible. Returns the built
3d2cf79f
RB
7641 expression value and appends statements possibly defining it
7642 to SEQ. */
7643
7644tree
7645gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7646 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 7647{
c26de36d 7648 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
7649 if (!res)
7650 {
a15ebbcd 7651 res = create_tmp_reg_or_ssa_name (type);
355fe088 7652 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
7653 gimple_set_location (stmt, loc);
7654 gimple_seq_add_stmt_without_update (seq, stmt);
7655 }
7656 return res;
7657}
7658
7659/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 7660 simplifying it first if possible. Returns the built
3d2cf79f
RB
7661 expression value and appends statements possibly defining it
7662 to SEQ. */
7663
7664tree
7665gimple_build (gimple_seq *seq, location_t loc,
c26de36d 7666 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
7667{
7668 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 7669 seq, gimple_build_valueize);
3d2cf79f
RB
7670 if (!res)
7671 {
a15ebbcd 7672 res = create_tmp_reg_or_ssa_name (type);
355fe088 7673 gimple *stmt;
3d2cf79f 7674 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
7675 stmt = gimple_build_assign (res, code,
7676 build3 (code, type, op0, op1, op2));
3d2cf79f 7677 else
0d0e4a03 7678 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
7679 gimple_set_location (stmt, loc);
7680 gimple_seq_add_stmt_without_update (seq, stmt);
7681 }
7682 return res;
7683}
7684
7685/* Build the call FN (ARG0) with a result of type TYPE
7686 (or no result if TYPE is void) with location LOC,
c26de36d 7687 simplifying it first if possible. Returns the built
3d2cf79f
RB
7688 expression value (or NULL_TREE if TYPE is void) and appends
7689 statements possibly defining it to SEQ. */
7690
7691tree
eb69361d
RS
7692gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7693 tree type, tree arg0)
3d2cf79f 7694{
c26de36d 7695 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
7696 if (!res)
7697 {
eb69361d
RS
7698 gcall *stmt;
7699 if (internal_fn_p (fn))
7700 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7701 else
7702 {
7703 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7704 stmt = gimple_build_call (decl, 1, arg0);
7705 }
3d2cf79f
RB
7706 if (!VOID_TYPE_P (type))
7707 {
a15ebbcd 7708 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7709 gimple_call_set_lhs (stmt, res);
7710 }
7711 gimple_set_location (stmt, loc);
7712 gimple_seq_add_stmt_without_update (seq, stmt);
7713 }
7714 return res;
7715}
7716
7717/* Build the call FN (ARG0, ARG1) with a result of type TYPE
7718 (or no result if TYPE is void) with location LOC,
c26de36d 7719 simplifying it first if possible. Returns the built
3d2cf79f
RB
7720 expression value (or NULL_TREE if TYPE is void) and appends
7721 statements possibly defining it to SEQ. */
7722
7723tree
eb69361d
RS
7724gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7725 tree type, tree arg0, tree arg1)
3d2cf79f 7726{
c26de36d 7727 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
7728 if (!res)
7729 {
eb69361d
RS
7730 gcall *stmt;
7731 if (internal_fn_p (fn))
7732 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7733 else
7734 {
7735 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7736 stmt = gimple_build_call (decl, 2, arg0, arg1);
7737 }
3d2cf79f
RB
7738 if (!VOID_TYPE_P (type))
7739 {
a15ebbcd 7740 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7741 gimple_call_set_lhs (stmt, res);
7742 }
7743 gimple_set_location (stmt, loc);
7744 gimple_seq_add_stmt_without_update (seq, stmt);
7745 }
7746 return res;
7747}
7748
7749/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7750 (or no result if TYPE is void) with location LOC,
c26de36d 7751 simplifying it first if possible. Returns the built
3d2cf79f
RB
7752 expression value (or NULL_TREE if TYPE is void) and appends
7753 statements possibly defining it to SEQ. */
7754
7755tree
eb69361d
RS
7756gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7757 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 7758{
c26de36d
RB
7759 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7760 seq, gimple_build_valueize);
3d2cf79f
RB
7761 if (!res)
7762 {
eb69361d
RS
7763 gcall *stmt;
7764 if (internal_fn_p (fn))
7765 stmt = gimple_build_call_internal (as_internal_fn (fn),
7766 3, arg0, arg1, arg2);
7767 else
7768 {
7769 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7770 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7771 }
3d2cf79f
RB
7772 if (!VOID_TYPE_P (type))
7773 {
a15ebbcd 7774 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
7775 gimple_call_set_lhs (stmt, res);
7776 }
7777 gimple_set_location (stmt, loc);
7778 gimple_seq_add_stmt_without_update (seq, stmt);
7779 }
7780 return res;
7781}
7782
7783/* Build the conversion (TYPE) OP with a result of type TYPE
7784 with location LOC if such conversion is neccesary in GIMPLE,
7785 simplifying it first.
7786 Returns the built expression value and appends
7787 statements possibly defining it to SEQ. */
d4f5cd5e
RB
7788
7789tree
7790gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7791{
7792 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7793 return op;
3d2cf79f 7794 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 7795}
68e57f04 7796
74e3c262
RB
7797/* Build the conversion (ptrofftype) OP with a result of a type
7798 compatible with ptrofftype with location LOC if such conversion
7799 is neccesary in GIMPLE, simplifying it first.
7800 Returns the built expression value and appends
7801 statements possibly defining it to SEQ. */
7802
7803tree
7804gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7805{
7806 if (ptrofftype_p (TREE_TYPE (op)))
7807 return op;
7808 return gimple_convert (seq, loc, sizetype, op);
7809}
7810
e7c45b66
RS
7811/* Build a vector of type TYPE in which each element has the value OP.
7812 Return a gimple value for the result, appending any new statements
7813 to SEQ. */
7814
7815tree
7816gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7817 tree op)
7818{
928686b1
RS
7819 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7820 && !CONSTANT_CLASS_P (op))
7821 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7822
e7c45b66
RS
7823 tree res, vec = build_vector_from_val (type, op);
7824 if (is_gimple_val (vec))
7825 return vec;
7826 if (gimple_in_ssa_p (cfun))
7827 res = make_ssa_name (type);
7828 else
7829 res = create_tmp_reg (type);
7830 gimple *stmt = gimple_build_assign (res, vec);
7831 gimple_set_location (stmt, loc);
7832 gimple_seq_add_stmt_without_update (seq, stmt);
7833 return res;
7834}
7835
abe73c3d
RS
7836/* Build a vector from BUILDER, handling the case in which some elements
7837 are non-constant. Return a gimple value for the result, appending any
7838 new instructions to SEQ.
7839
7840 BUILDER must not have a stepped encoding on entry. This is because
7841 the function is not geared up to handle the arithmetic that would
7842 be needed in the variable case, and any code building a vector that
7843 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
7844
7845tree
abe73c3d
RS
7846gimple_build_vector (gimple_seq *seq, location_t loc,
7847 tree_vector_builder *builder)
e7c45b66 7848{
abe73c3d
RS
7849 gcc_assert (builder->nelts_per_pattern () <= 2);
7850 unsigned int encoded_nelts = builder->encoded_nelts ();
7851 for (unsigned int i = 0; i < encoded_nelts; ++i)
7852 if (!TREE_CONSTANT ((*builder)[i]))
e7c45b66 7853 {
abe73c3d 7854 tree type = builder->type ();
928686b1 7855 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
7856 vec<constructor_elt, va_gc> *v;
7857 vec_alloc (v, nelts);
7858 for (i = 0; i < nelts; ++i)
abe73c3d 7859 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
7860
7861 tree res;
7862 if (gimple_in_ssa_p (cfun))
7863 res = make_ssa_name (type);
7864 else
7865 res = create_tmp_reg (type);
7866 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7867 gimple_set_location (stmt, loc);
7868 gimple_seq_add_stmt_without_update (seq, stmt);
7869 return res;
7870 }
abe73c3d 7871 return builder->build ();
e7c45b66
RS
7872}
7873
68e57f04
RS
7874/* Return true if the result of assignment STMT is known to be non-negative.
7875 If the return value is based on the assumption that signed overflow is
7876 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7877 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7878
7879static bool
7880gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7881 int depth)
7882{
7883 enum tree_code code = gimple_assign_rhs_code (stmt);
7884 switch (get_gimple_rhs_class (code))
7885 {
7886 case GIMPLE_UNARY_RHS:
7887 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7888 gimple_expr_type (stmt),
7889 gimple_assign_rhs1 (stmt),
7890 strict_overflow_p, depth);
7891 case GIMPLE_BINARY_RHS:
7892 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7893 gimple_expr_type (stmt),
7894 gimple_assign_rhs1 (stmt),
7895 gimple_assign_rhs2 (stmt),
7896 strict_overflow_p, depth);
7897 case GIMPLE_TERNARY_RHS:
7898 return false;
7899 case GIMPLE_SINGLE_RHS:
7900 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7901 strict_overflow_p, depth);
7902 case GIMPLE_INVALID_RHS:
7903 break;
7904 }
7905 gcc_unreachable ();
7906}
7907
7908/* Return true if return value of call STMT is known to be non-negative.
7909 If the return value is based on the assumption that signed overflow is
7910 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7911 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7912
7913static bool
7914gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7915 int depth)
7916{
7917 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7918 gimple_call_arg (stmt, 0) : NULL_TREE;
7919 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7920 gimple_call_arg (stmt, 1) : NULL_TREE;
7921
7922 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 7923 gimple_call_combined_fn (stmt),
68e57f04
RS
7924 arg0,
7925 arg1,
7926 strict_overflow_p, depth);
7927}
7928
4534c203
RB
7929/* Return true if return value of call STMT is known to be non-negative.
7930 If the return value is based on the assumption that signed overflow is
7931 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7932 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7933
7934static bool
7935gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7936 int depth)
7937{
7938 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7939 {
7940 tree arg = gimple_phi_arg_def (stmt, i);
7941 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7942 return false;
7943 }
7944 return true;
7945}
7946
68e57f04
RS
7947/* Return true if STMT is known to compute a non-negative value.
7948 If the return value is based on the assumption that signed overflow is
7949 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7950 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7951
7952bool
7953gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7954 int depth)
7955{
7956 switch (gimple_code (stmt))
7957 {
7958 case GIMPLE_ASSIGN:
7959 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7960 depth);
7961 case GIMPLE_CALL:
7962 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7963 depth);
4534c203
RB
7964 case GIMPLE_PHI:
7965 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7966 depth);
68e57f04
RS
7967 default:
7968 return false;
7969 }
7970}
67dbe582
RS
7971
7972/* Return true if the floating-point value computed by assignment STMT
7973 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 7974 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
7975
7976 DEPTH is the current nesting depth of the query. */
7977
7978static bool
7979gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7980{
7981 enum tree_code code = gimple_assign_rhs_code (stmt);
7982 switch (get_gimple_rhs_class (code))
7983 {
7984 case GIMPLE_UNARY_RHS:
7985 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7986 gimple_assign_rhs1 (stmt), depth);
7987 case GIMPLE_BINARY_RHS:
7988 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7989 gimple_assign_rhs1 (stmt),
7990 gimple_assign_rhs2 (stmt), depth);
7991 case GIMPLE_TERNARY_RHS:
7992 return false;
7993 case GIMPLE_SINGLE_RHS:
7994 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7995 case GIMPLE_INVALID_RHS:
7996 break;
7997 }
7998 gcc_unreachable ();
7999}
8000
8001/* Return true if the floating-point value computed by call STMT is known
8002 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 8003 considered integer values. Return false for signaling NaN.
67dbe582
RS
8004
8005 DEPTH is the current nesting depth of the query. */
8006
8007static bool
8008gimple_call_integer_valued_real_p (gimple *stmt, int depth)
8009{
8010 tree arg0 = (gimple_call_num_args (stmt) > 0
8011 ? gimple_call_arg (stmt, 0)
8012 : NULL_TREE);
8013 tree arg1 = (gimple_call_num_args (stmt) > 1
8014 ? gimple_call_arg (stmt, 1)
8015 : NULL_TREE);
1d9da71f 8016 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
8017 arg0, arg1, depth);
8018}
8019
8020/* Return true if the floating-point result of phi STMT is known to have
8021 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 8022 integer values. Return false for signaling NaN.
67dbe582
RS
8023
8024 DEPTH is the current nesting depth of the query. */
8025
8026static bool
8027gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
8028{
8029 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8030 {
8031 tree arg = gimple_phi_arg_def (stmt, i);
8032 if (!integer_valued_real_single_p (arg, depth + 1))
8033 return false;
8034 }
8035 return true;
8036}
8037
8038/* Return true if the floating-point value computed by STMT is known
8039 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 8040 considered integer values. Return false for signaling NaN.
67dbe582
RS
8041
8042 DEPTH is the current nesting depth of the query. */
8043
8044bool
8045gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
8046{
8047 switch (gimple_code (stmt))
8048 {
8049 case GIMPLE_ASSIGN:
8050 return gimple_assign_integer_valued_real_p (stmt, depth);
8051 case GIMPLE_CALL:
8052 return gimple_call_integer_valued_real_p (stmt, depth);
8053 case GIMPLE_PHI:
8054 return gimple_phi_integer_valued_real_p (stmt, depth);
8055 default:
8056 return false;
8057 }
8058}