]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Fortran - out of bounds in array constructor with implied do loop
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
99dee823 2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
2a837de2 33#include "gimple-ssa-warn-access.h"
cc8bea0a 34#include "gimple-ssa-warn-restrict.h"
c7131fb2 35#include "fold-const.h"
36566b39
PK
36#include "stmt.h"
37#include "expr.h"
38#include "stor-layout.h"
7ee2468b 39#include "dumpfile.h"
2fb9a547 40#include "gimple-fold.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
442b4905
AM
43#include "tree-into-ssa.h"
44#include "tree-dfa.h"
025d57f0 45#include "tree-object-size.h"
7a300452 46#include "tree-ssa.h"
cbdd87d4 47#include "tree-ssa-propagate.h"
450ad0cd 48#include "ipa-utils.h"
4484a35a 49#include "tree-ssa-address.h"
862d0b35 50#include "langhooks.h"
19e51b40 51#include "gimplify-me.h"
2b5f0895 52#include "dbgcnt.h"
9b2b7279 53#include "builtins.h"
e0ee10ed
RB
54#include "tree-eh.h"
55#include "gimple-match.h"
48126138 56#include "gomp-constants.h"
f869c12f 57#include "optabs-query.h"
629b3d75 58#include "omp-general.h"
abd3a68c 59#include "tree-cfg.h"
a918bfbf 60#include "fold-const-call.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
45b2222a 63#include "asan.h"
025d57f0
MS
64#include "diagnostic-core.h"
65#include "intl.h"
6a33d0ff 66#include "calls.h"
5ebaa477 67#include "tree-vector-builder.h"
5d0d5d68 68#include "tree-ssa-strlen.h"
e7868dc6 69#include "varasm.h"
cbdd87d4 70
598f7235
MS
71enum strlen_range_kind {
72 /* Compute the exact constant string length. */
73 SRK_STRLEN,
74 /* Compute the maximum constant string length. */
75 SRK_STRLENMAX,
76 /* Compute a range of string lengths bounded by object sizes. When
77 the length of a string cannot be determined, consider as the upper
78 bound the size of the enclosing object the string may be a member
79 or element of. Also determine the size of the largest character
80 array the string may refer to. */
81 SRK_LENRANGE,
598f7235
MS
82 /* Determine the integer value of the argument (not string length). */
83 SRK_INT_VALUE
84};
85
03c4a945
MS
86static bool
87get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 88
b3b9f3d0 89/* Return true when DECL can be referenced from current unit.
c44c2088
JH
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
92 reasons:
1389294c 93
1389294c
JH
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
99 set.
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
104 declaring the body.
105 3) COMDAT functions referred by external vtables that
3e89949e 106 we devirtualize only during final compilation stage.
b3b9f3d0
JH
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
109 directly. */
110
1389294c 111static bool
c44c2088 112can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 113{
2c8326a5 114 varpool_node *vnode;
1389294c 115 struct cgraph_node *node;
5e20cdc9 116 symtab_node *snode;
c44c2088 117
00de328a 118 if (DECL_ABSTRACT_P (decl))
1632a686
JH
119 return false;
120
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 123 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
124 return true;
125
d4babd37
JM
126 /* Static objects can be referred only if they are defined and not optimized
127 out yet. */
128 if (!TREE_PUBLIC (decl))
1632a686 129 {
d4babd37
JM
130 if (DECL_EXTERNAL (decl))
131 return false;
3aaf0529
JH
132 /* Before we start optimizing unreachable code we can be sure all
133 static objects are defined. */
3dafb85c 134 if (symtab->function_flags_ready)
3aaf0529 135 return true;
d52f5295 136 snode = symtab_node::get (decl);
3aaf0529 137 if (!snode || !snode->definition)
1632a686 138 return false;
7de90a6c 139 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 140 return !node || !node->inlined_to;
1632a686
JH
141 }
142
6da8be89 143 /* We will later output the initializer, so we can refer to it.
c44c2088 144 So we are concerned only when DECL comes from initializer of
3aaf0529 145 external var or var that has been optimized out. */
c44c2088 146 if (!from_decl
8813a647 147 || !VAR_P (from_decl)
3aaf0529 148 || (!DECL_EXTERNAL (from_decl)
9041d2e6 149 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 150 && vnode->definition)
6da8be89 151 || (flag_ltrans
9041d2e6 152 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 153 && vnode->in_other_partition))
c44c2088 154 return true;
c44c2088
JH
155 /* We are folding reference from external vtable. The vtable may reffer
156 to a symbol keyed to other compilation unit. The other compilation
157 unit may be in separate DSO and the symbol may be hidden. */
158 if (DECL_VISIBILITY_SPECIFIED (decl)
159 && DECL_EXTERNAL (decl)
a33a931b 160 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 161 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 162 return false;
b3b9f3d0
JH
163 /* When function is public, we always can introduce new reference.
164 Exception are the COMDAT functions where introducing a direct
165 reference imply need to include function body in the curren tunit. */
166 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
167 return true;
3aaf0529
JH
168 /* We have COMDAT. We are going to check if we still have definition
169 or if the definition is going to be output in other partition.
170 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
171
172 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 173 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
174 output elsewhere when corresponding vtable is output.
175 This is however not possible - ABI specify that COMDATs are output in
176 units where they are used and when the other unit was compiled with LTO
177 it is possible that vtable was kept public while the function itself
178 was privatized. */
3dafb85c 179 if (!symtab->function_flags_ready)
b3b9f3d0 180 return true;
c44c2088 181
d52f5295 182 snode = symtab_node::get (decl);
3aaf0529
JH
183 if (!snode
184 || ((!snode->definition || DECL_EXTERNAL (decl))
185 && (!snode->in_other_partition
186 || (!snode->forced_by_abi && !snode->force_output))))
187 return false;
188 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 189 return !node || !node->inlined_to;
1389294c
JH
190}
191
a15ebbcd
ML
192/* Create a temporary for TYPE for a statement STMT. If the current function
193 is in SSA form, a SSA name is created. Otherwise a temporary register
194 is made. */
195
edc19e03
WS
196tree
197create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
198{
199 if (gimple_in_ssa_p (cfun))
200 return make_ssa_name (type, stmt);
201 else
202 return create_tmp_reg (type);
203}
204
0038d4e0 205/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
206 acceptable form for is_gimple_min_invariant.
207 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
208
209tree
c44c2088 210canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 211{
37f808c4
RB
212 if (CONSTANT_CLASS_P (cval))
213 return cval;
214
50619002
EB
215 tree orig_cval = cval;
216 STRIP_NOPS (cval);
315f5f1b
RG
217 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
218 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 219 {
315f5f1b
RG
220 tree ptr = TREE_OPERAND (cval, 0);
221 if (is_gimple_min_invariant (ptr))
222 cval = build1_loc (EXPR_LOCATION (cval),
223 ADDR_EXPR, TREE_TYPE (ptr),
224 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
225 ptr,
226 fold_convert (ptr_type_node,
227 TREE_OPERAND (cval, 1))));
17f39a39
JH
228 }
229 if (TREE_CODE (cval) == ADDR_EXPR)
230 {
5a27a197
RG
231 tree base = NULL_TREE;
232 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
233 {
234 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
235 if (base)
236 TREE_OPERAND (cval, 0) = base;
237 }
5a27a197
RG
238 else
239 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
240 if (!base)
241 return NULL_TREE;
b3b9f3d0 242
8813a647 243 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 244 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 245 return NULL_TREE;
13f92e8d
JJ
246 if (TREE_TYPE (base) == error_mark_node)
247 return NULL_TREE;
8813a647 248 if (VAR_P (base))
a076632e
RB
249 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
250 but since the use can be in a debug stmt we can't. */
251 ;
7501ca28
RG
252 else if (TREE_CODE (base) == FUNCTION_DECL)
253 {
254 /* Make sure we create a cgraph node for functions we'll reference.
255 They can be non-existent if the reference comes from an entry
256 of an external vtable for example. */
d52f5295 257 cgraph_node::get_create (base);
7501ca28 258 }
0038d4e0 259 /* Fixup types in global initializers. */
73aef89e
RG
260 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
261 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
262
263 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
264 cval = fold_convert (TREE_TYPE (orig_cval), cval);
265 return cval;
17f39a39 266 }
37f808c4
RB
267 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
268 if (TREE_CODE (cval) == INTEGER_CST)
269 {
270 if (TREE_OVERFLOW_P (cval))
271 cval = drop_tree_overflow (cval);
272 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
273 cval = fold_convert (TREE_TYPE (orig_cval), cval);
274 return cval;
275 }
50619002 276 return orig_cval;
17f39a39 277}
cbdd87d4
RG
278
279/* If SYM is a constant variable with known value, return the value.
280 NULL_TREE is returned otherwise. */
281
282tree
283get_symbol_constant_value (tree sym)
284{
6a6dac52
JH
285 tree val = ctor_for_folding (sym);
286 if (val != error_mark_node)
cbdd87d4 287 {
cbdd87d4
RG
288 if (val)
289 {
9d60be38 290 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 291 if (val && is_gimple_min_invariant (val))
17f39a39 292 return val;
1389294c
JH
293 else
294 return NULL_TREE;
cbdd87d4
RG
295 }
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
299 if (!val
b8a8c472 300 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 301 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
302 }
303
304 return NULL_TREE;
305}
306
307
cbdd87d4 308
0bf8cd9d
RB
309/* Subroutine of fold_stmt. We perform constant folding of the
310 memory reference tree EXPR. */
cbdd87d4
RG
311
312static tree
0bf8cd9d 313maybe_fold_reference (tree expr)
cbdd87d4 314{
2301a394 315 tree result = NULL_TREE;
cbdd87d4 316
f0eddb90
RG
317 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
318 || TREE_CODE (expr) == REALPART_EXPR
319 || TREE_CODE (expr) == IMAGPART_EXPR)
320 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
2301a394 321 result = fold_unary_loc (EXPR_LOCATION (expr),
f0eddb90
RG
322 TREE_CODE (expr),
323 TREE_TYPE (expr),
2301a394
RB
324 TREE_OPERAND (expr, 0));
325 else if (TREE_CODE (expr) == BIT_FIELD_REF
326 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
327 result = fold_ternary_loc (EXPR_LOCATION (expr),
328 TREE_CODE (expr),
329 TREE_TYPE (expr),
330 TREE_OPERAND (expr, 0),
331 TREE_OPERAND (expr, 1),
332 TREE_OPERAND (expr, 2));
333 else
334 result = fold_const_aggregate_ref (expr);
f0eddb90 335
2301a394 336 if (result && is_gimple_min_invariant (result))
f0eddb90 337 return result;
cbdd87d4 338
cbdd87d4
RG
339 return NULL_TREE;
340}
341
52a5515e
RB
342/* Return true if EXPR is an acceptable right-hand-side for a
343 GIMPLE assignment. We validate the entire tree, not just
344 the root node, thus catching expressions that embed complex
345 operands that are not permitted in GIMPLE. This function
346 is needed because the folding routines in fold-const.c
347 may return such expressions in some cases, e.g., an array
348 access with an embedded index addition. It may make more
349 sense to have folding routines that are sensitive to the
350 constraints on GIMPLE operands, rather than abandoning any
351 any attempt to fold if the usual folding turns out to be too
352 aggressive. */
353
354bool
355valid_gimple_rhs_p (tree expr)
356{
357 enum tree_code code = TREE_CODE (expr);
358
359 switch (TREE_CODE_CLASS (code))
360 {
361 case tcc_declaration:
362 if (!is_gimple_variable (expr))
363 return false;
364 break;
365
366 case tcc_constant:
367 /* All constants are ok. */
368 break;
369
370 case tcc_comparison:
371 /* GENERIC allows comparisons with non-boolean types, reject
372 those for GIMPLE. Let vector-typed comparisons pass - rules
373 for GENERIC and GIMPLE are the same here. */
374 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
375 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
376 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
377 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
378 return false;
379
380 /* Fallthru. */
381 case tcc_binary:
382 if (!is_gimple_val (TREE_OPERAND (expr, 0))
383 || !is_gimple_val (TREE_OPERAND (expr, 1)))
384 return false;
385 break;
386
387 case tcc_unary:
388 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
389 return false;
390 break;
391
392 case tcc_expression:
393 switch (code)
394 {
395 case ADDR_EXPR:
396 {
397 tree t;
398 if (is_gimple_min_invariant (expr))
399 return true;
400 t = TREE_OPERAND (expr, 0);
401 while (handled_component_p (t))
402 {
403 /* ??? More checks needed, see the GIMPLE verifier. */
404 if ((TREE_CODE (t) == ARRAY_REF
405 || TREE_CODE (t) == ARRAY_RANGE_REF)
406 && !is_gimple_val (TREE_OPERAND (t, 1)))
407 return false;
408 t = TREE_OPERAND (t, 0);
409 }
410 if (!is_gimple_id (t))
411 return false;
412 }
413 break;
414
415 default:
416 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
417 {
418 if ((code == COND_EXPR
419 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
420 : !is_gimple_val (TREE_OPERAND (expr, 0)))
421 || !is_gimple_val (TREE_OPERAND (expr, 1))
422 || !is_gimple_val (TREE_OPERAND (expr, 2)))
423 return false;
424 break;
425 }
426 return false;
427 }
428 break;
429
430 case tcc_vl_exp:
431 return false;
432
433 case tcc_exceptional:
434 if (code == CONSTRUCTOR)
435 {
436 unsigned i;
437 tree elt;
438 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
439 if (!is_gimple_val (elt))
440 return false;
441 return true;
442 }
443 if (code != SSA_NAME)
444 return false;
445 break;
446
447 case tcc_reference:
448 if (code == BIT_FIELD_REF)
449 return is_gimple_val (TREE_OPERAND (expr, 0));
450 return false;
451
452 default:
453 return false;
454 }
455
456 return true;
457}
458
cbdd87d4
RG
459
460/* Attempt to fold an assignment statement pointed-to by SI. Returns a
461 replacement rhs for the statement or NULL_TREE if no simplification
462 could be made. It is assumed that the operands have been previously
463 folded. */
464
465static tree
466fold_gimple_assign (gimple_stmt_iterator *si)
467{
355fe088 468 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
469 enum tree_code subcode = gimple_assign_rhs_code (stmt);
470 location_t loc = gimple_location (stmt);
471
472 tree result = NULL_TREE;
473
474 switch (get_gimple_rhs_class (subcode))
475 {
476 case GIMPLE_SINGLE_RHS:
477 {
478 tree rhs = gimple_assign_rhs1 (stmt);
479
8c00ba08
JW
480 if (TREE_CLOBBER_P (rhs))
481 return NULL_TREE;
482
4e71066d 483 if (REFERENCE_CLASS_P (rhs))
0bf8cd9d 484 return maybe_fold_reference (rhs);
cbdd87d4 485
bdf37f7a
JH
486 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
487 {
488 tree val = OBJ_TYPE_REF_EXPR (rhs);
489 if (is_gimple_min_invariant (val))
490 return val;
f8a39967 491 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
492 {
493 bool final;
494 vec <cgraph_node *>targets
f8a39967 495 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 496 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 497 {
2b5f0895
XDL
498 if (dump_enabled_p ())
499 {
4f5b9c80 500 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
501 "resolving virtual function address "
502 "reference to function %s\n",
503 targets.length () == 1
504 ? targets[0]->name ()
3ef276e4 505 : "NULL");
2b5f0895 506 }
3ef276e4
RB
507 if (targets.length () == 1)
508 {
509 val = fold_convert (TREE_TYPE (val),
510 build_fold_addr_expr_loc
511 (loc, targets[0]->decl));
512 STRIP_USELESS_TYPE_CONVERSION (val);
513 }
514 else
67914693
SL
515 /* We cannot use __builtin_unreachable here because it
516 cannot have address taken. */
3ef276e4 517 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
518 return val;
519 }
520 }
bdf37f7a 521 }
7524f419 522
cbdd87d4
RG
523 else if (TREE_CODE (rhs) == ADDR_EXPR)
524 {
70f34814 525 tree ref = TREE_OPERAND (rhs, 0);
0bf8cd9d
RB
526 if (TREE_CODE (ref) == MEM_REF
527 && integer_zerop (TREE_OPERAND (ref, 1)))
7524f419 528 {
0bf8cd9d
RB
529 result = TREE_OPERAND (ref, 0);
530 if (!useless_type_conversion_p (TREE_TYPE (rhs),
531 TREE_TYPE (result)))
532 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
533 return result;
7524f419 534 }
cbdd87d4
RG
535 }
536
537 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 538 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
539 {
540 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
541 unsigned i;
542 tree val;
543
544 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 545 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
546 return NULL_TREE;
547
548 return build_vector_from_ctor (TREE_TYPE (rhs),
549 CONSTRUCTOR_ELTS (rhs));
550 }
551
ca8e8301
RB
552 else if (DECL_P (rhs)
553 && is_gimple_reg_type (TREE_TYPE (rhs)))
9d60be38 554 return get_symbol_constant_value (rhs);
cbdd87d4
RG
555 }
556 break;
557
558 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
559 break;
560
561 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
562 break;
563
0354c0c7 564 case GIMPLE_TERNARY_RHS:
5c099d40
RB
565 result = fold_ternary_loc (loc, subcode,
566 TREE_TYPE (gimple_assign_lhs (stmt)),
567 gimple_assign_rhs1 (stmt),
568 gimple_assign_rhs2 (stmt),
569 gimple_assign_rhs3 (stmt));
0354c0c7
BS
570
571 if (result)
572 {
573 STRIP_USELESS_TYPE_CONVERSION (result);
574 if (valid_gimple_rhs_p (result))
575 return result;
0354c0c7
BS
576 }
577 break;
578
cbdd87d4
RG
579 case GIMPLE_INVALID_RHS:
580 gcc_unreachable ();
581 }
582
583 return NULL_TREE;
584}
585
fef5a0d9
RB
586
587/* Replace a statement at *SI_P with a sequence of statements in STMTS,
588 adjusting the replacement stmts location and virtual operands.
589 If the statement has a lhs the last stmt in the sequence is expected
590 to assign to that lhs. */
591
592static void
593gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
594{
355fe088 595 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
596
597 if (gimple_has_location (stmt))
598 annotate_all_with_location (stmts, gimple_location (stmt));
599
600 /* First iterate over the replacement statements backward, assigning
601 virtual operands to their defining statements. */
355fe088 602 gimple *laststore = NULL;
fef5a0d9
RB
603 for (gimple_stmt_iterator i = gsi_last (stmts);
604 !gsi_end_p (i); gsi_prev (&i))
605 {
355fe088 606 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
607 if ((gimple_assign_single_p (new_stmt)
608 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
609 || (is_gimple_call (new_stmt)
610 && (gimple_call_flags (new_stmt)
611 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
612 {
613 tree vdef;
614 if (!laststore)
615 vdef = gimple_vdef (stmt);
616 else
617 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
618 gimple_set_vdef (new_stmt, vdef);
619 if (vdef && TREE_CODE (vdef) == SSA_NAME)
620 SSA_NAME_DEF_STMT (vdef) = new_stmt;
621 laststore = new_stmt;
622 }
623 }
624
625 /* Second iterate over the statements forward, assigning virtual
626 operands to their uses. */
627 tree reaching_vuse = gimple_vuse (stmt);
628 for (gimple_stmt_iterator i = gsi_start (stmts);
629 !gsi_end_p (i); gsi_next (&i))
630 {
355fe088 631 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
632 /* If the new statement possibly has a VUSE, update it with exact SSA
633 name we know will reach this one. */
634 if (gimple_has_mem_ops (new_stmt))
635 gimple_set_vuse (new_stmt, reaching_vuse);
636 gimple_set_modified (new_stmt, true);
637 if (gimple_vdef (new_stmt))
638 reaching_vuse = gimple_vdef (new_stmt);
639 }
640
641 /* If the new sequence does not do a store release the virtual
642 definition of the original statement. */
643 if (reaching_vuse
644 && reaching_vuse == gimple_vuse (stmt))
645 {
646 tree vdef = gimple_vdef (stmt);
647 if (vdef
648 && TREE_CODE (vdef) == SSA_NAME)
649 {
650 unlink_stmt_vdef (stmt);
651 release_ssa_name (vdef);
652 }
653 }
654
655 /* Finally replace the original statement with the sequence. */
656 gsi_replace_with_seq (si_p, stmts, false);
657}
658
52a5515e
RB
659/* Helper function for update_gimple_call and
660 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
661 with GIMPLE_CALL NEW_STMT. */
662
663static void
664finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
665 gimple *stmt)
666{
667 tree lhs = gimple_call_lhs (stmt);
668 gimple_call_set_lhs (new_stmt, lhs);
669 if (lhs && TREE_CODE (lhs) == SSA_NAME)
670 SSA_NAME_DEF_STMT (lhs) = new_stmt;
671 gimple_move_vops (new_stmt, stmt);
672 gimple_set_location (new_stmt, gimple_location (stmt));
673 if (gimple_block (new_stmt) == NULL_TREE)
674 gimple_set_block (new_stmt, gimple_block (stmt));
675 gsi_replace (si_p, new_stmt, false);
676}
677
678/* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
679 with number of arguments NARGS, where the arguments in GIMPLE form
680 follow NARGS argument. */
681
682bool
683update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
684{
685 va_list ap;
686 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
687
688 gcc_assert (is_gimple_call (stmt));
689 va_start (ap, nargs);
690 new_stmt = gimple_build_call_valist (fn, nargs, ap);
691 finish_update_gimple_call (si_p, new_stmt, stmt);
692 va_end (ap);
693 return true;
694}
695
696/* Return true if EXPR is a CALL_EXPR suitable for representation
697 as a single GIMPLE_CALL statement. If the arguments require
698 further gimplification, return false. */
699
700static bool
701valid_gimple_call_p (tree expr)
702{
703 unsigned i, nargs;
704
705 if (TREE_CODE (expr) != CALL_EXPR)
706 return false;
707
708 nargs = call_expr_nargs (expr);
709 for (i = 0; i < nargs; i++)
710 {
711 tree arg = CALL_EXPR_ARG (expr, i);
712 if (is_gimple_reg_type (TREE_TYPE (arg)))
713 {
714 if (!is_gimple_val (arg))
715 return false;
716 }
717 else
718 if (!is_gimple_lvalue (arg))
719 return false;
720 }
721
722 return true;
723}
724
cbdd87d4
RG
725/* Convert EXPR into a GIMPLE value suitable for substitution on the
726 RHS of an assignment. Insert the necessary statements before
727 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
728 is replaced. If the call is expected to produces a result, then it
729 is replaced by an assignment of the new RHS to the result variable.
730 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
731 GIMPLE_NOP. A proper VDEF chain is retained by making the first
732 VUSE and the last VDEF of the whole sequence be the same as the replaced
733 statement and using new SSA names for stores in between. */
cbdd87d4
RG
734
735void
736gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
737{
738 tree lhs;
355fe088 739 gimple *stmt, *new_stmt;
cbdd87d4 740 gimple_stmt_iterator i;
355a7673 741 gimple_seq stmts = NULL;
cbdd87d4
RG
742
743 stmt = gsi_stmt (*si_p);
744
745 gcc_assert (is_gimple_call (stmt));
746
52a5515e
RB
747 if (valid_gimple_call_p (expr))
748 {
749 /* The call has simplified to another call. */
750 tree fn = CALL_EXPR_FN (expr);
751 unsigned i;
752 unsigned nargs = call_expr_nargs (expr);
753 vec<tree> args = vNULL;
754 gcall *new_stmt;
755
756 if (nargs > 0)
757 {
758 args.create (nargs);
759 args.safe_grow_cleared (nargs, true);
760
761 for (i = 0; i < nargs; i++)
762 args[i] = CALL_EXPR_ARG (expr, i);
763 }
764
765 new_stmt = gimple_build_call_vec (fn, args);
766 finish_update_gimple_call (si_p, new_stmt, stmt);
767 args.release ();
768 return;
769 }
cbdd87d4 770
e256dfce 771 lhs = gimple_call_lhs (stmt);
cbdd87d4 772 if (lhs == NULL_TREE)
6e572326 773 {
52a5515e 774 push_gimplify_context (gimple_in_ssa_p (cfun));
6e572326 775 gimplify_and_add (expr, &stmts);
52a5515e
RB
776 pop_gimplify_context (NULL);
777
6e572326
RG
778 /* We can end up with folding a memcpy of an empty class assignment
779 which gets optimized away by C++ gimplification. */
780 if (gimple_seq_empty_p (stmts))
781 {
782 if (gimple_in_ssa_p (cfun))
783 {
784 unlink_stmt_vdef (stmt);
785 release_defs (stmt);
786 }
f6b4dc28 787 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
788 return;
789 }
790 }
cbdd87d4 791 else
e256dfce 792 {
381cdae4 793 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
794 new_stmt = gimple_build_assign (lhs, tmp);
795 i = gsi_last (stmts);
796 gsi_insert_after_without_update (&i, new_stmt,
797 GSI_CONTINUE_LINKING);
798 }
cbdd87d4 799
fef5a0d9
RB
800 gsi_replace_with_seq_vops (si_p, stmts);
801}
cbdd87d4 802
fef5a0d9
RB
803
804/* Replace the call at *GSI with the gimple value VAL. */
805
e3174bdf 806void
fef5a0d9
RB
807replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
808{
355fe088 809 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 810 tree lhs = gimple_call_lhs (stmt);
355fe088 811 gimple *repl;
fef5a0d9 812 if (lhs)
e256dfce 813 {
fef5a0d9
RB
814 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
815 val = fold_convert (TREE_TYPE (lhs), val);
816 repl = gimple_build_assign (lhs, val);
817 }
818 else
819 repl = gimple_build_nop ();
820 tree vdef = gimple_vdef (stmt);
821 if (vdef && TREE_CODE (vdef) == SSA_NAME)
822 {
823 unlink_stmt_vdef (stmt);
824 release_ssa_name (vdef);
825 }
f6b4dc28 826 gsi_replace (gsi, repl, false);
fef5a0d9
RB
827}
828
829/* Replace the call at *GSI with the new call REPL and fold that
830 again. */
831
832static void
355fe088 833replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 834{
355fe088 835 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
836 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
837 gimple_set_location (repl, gimple_location (stmt));
779724a5 838 gimple_move_vops (repl, stmt);
f6b4dc28 839 gsi_replace (gsi, repl, false);
fef5a0d9
RB
840 fold_stmt (gsi);
841}
842
843/* Return true if VAR is a VAR_DECL or a component thereof. */
844
845static bool
846var_decl_component_p (tree var)
847{
848 tree inner = var;
849 while (handled_component_p (inner))
850 inner = TREE_OPERAND (inner, 0);
47cac108
RB
851 return (DECL_P (inner)
852 || (TREE_CODE (inner) == MEM_REF
853 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
854}
855
c89af696
AH
856/* Return TRUE if the SIZE argument, representing the size of an
857 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
858
859static bool
860size_must_be_zero_p (tree size)
861{
862 if (integer_zerop (size))
863 return true;
864
3f27391f 865 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
866 return false;
867
6512c0f1
MS
868 tree type = TREE_TYPE (size);
869 int prec = TYPE_PRECISION (type);
870
6512c0f1
MS
871 /* Compute the value of SSIZE_MAX, the largest positive value that
872 can be stored in ssize_t, the signed counterpart of size_t. */
873 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 874 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
875 wide_int_to_tree (type, ssize_max));
876 value_range vr;
45f4e2b0
AH
877 if (cfun)
878 get_range_query (cfun)->range_of_expr (vr, size);
879 else
880 get_global_range_query ()->range_of_expr (vr, size);
881 if (vr.undefined_p ())
882 vr.set_varying (TREE_TYPE (size));
c89af696
AH
883 vr.intersect (&valid_range);
884 return vr.zero_p ();
6512c0f1
MS
885}
886
cc8bea0a
MS
887/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
888 diagnose (otherwise undefined) overlapping copies without preventing
889 folding. When folded, GCC guarantees that overlapping memcpy has
890 the same semantics as memmove. Call to the library memcpy need not
891 provide the same guarantee. Return false if no simplification can
892 be made. */
fef5a0d9
RB
893
894static bool
895gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 896 tree dest, tree src, enum built_in_function code)
fef5a0d9 897{
355fe088 898 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
899 tree lhs = gimple_call_lhs (stmt);
900 tree len = gimple_call_arg (stmt, 2);
fef5a0d9
RB
901 location_t loc = gimple_location (stmt);
902
6512c0f1
MS
903 /* If the LEN parameter is a constant zero or in range where
904 the only valid value is zero, return DEST. */
905 if (size_must_be_zero_p (len))
fef5a0d9 906 {
355fe088 907 gimple *repl;
fef5a0d9
RB
908 if (gimple_call_lhs (stmt))
909 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
910 else
911 repl = gimple_build_nop ();
912 tree vdef = gimple_vdef (stmt);
913 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 914 {
fef5a0d9
RB
915 unlink_stmt_vdef (stmt);
916 release_ssa_name (vdef);
917 }
f6b4dc28 918 gsi_replace (gsi, repl, false);
fef5a0d9
RB
919 return true;
920 }
921
922 /* If SRC and DEST are the same (and not volatile), return
923 DEST{,+LEN,+LEN-1}. */
924 if (operand_equal_p (src, dest, 0))
925 {
cc8bea0a
MS
926 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
927 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 928 32667). */
fef5a0d9
RB
929 unlink_stmt_vdef (stmt);
930 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 release_ssa_name (gimple_vdef (stmt));
932 if (!lhs)
933 {
f6b4dc28 934 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
935 return true;
936 }
937 goto done;
938 }
939 else
940 {
b541b871
EB
941 /* We cannot (easily) change the type of the copy if it is a storage
942 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 modify the storage order of objects (see storage_order_barrier_p). */
944 tree srctype
945 = POINTER_TYPE_P (TREE_TYPE (src))
946 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 tree desttype
948 = POINTER_TYPE_P (TREE_TYPE (dest))
949 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 tree destvar, srcvar, srcoff;
fef5a0d9 951 unsigned int src_align, dest_align;
d01b568a 952 unsigned HOST_WIDE_INT tmp_len;
b541b871 953 const char *tmp_str;
fef5a0d9
RB
954
955 /* Build accesses at offset zero with a ref-all character type. */
b541b871
EB
956 tree off0
957 = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 ptr_mode, true), 0);
fef5a0d9
RB
959
960 /* If we can perform the copy efficiently with first doing all loads
961 and then all stores inline it that way. Currently efficiently
962 means that we can load all the memory into a single integer
963 register which is what MOVE_MAX gives us. */
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (tree_fits_uhwi_p (len)
967 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
968 /* FIXME: Don't transform copies from strings with known length.
969 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 from being handled, and the case was XFAILed for that reason.
971 Now that it is handled and the XFAIL removed, as soon as other
972 strlenopt tests that rely on it for passing are adjusted, this
973 hack can be removed. */
974 && !c_strlen (src, 1)
866626ef 975 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
b541b871
EB
976 && memchr (tmp_str, 0, tmp_len) == NULL)
977 && !(srctype
978 && AGGREGATE_TYPE_P (srctype)
979 && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 && !(desttype
981 && AGGREGATE_TYPE_P (desttype)
982 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
983 {
984 unsigned ilen = tree_to_uhwi (len);
146ec50f 985 if (pow2p_hwi (ilen))
fef5a0d9 986 {
213694e5
MS
987 /* Detect out-of-bounds accesses without issuing warnings.
988 Avoid folding out-of-bounds copies but to avoid false
989 positives for unreachable code defer warning until after
990 DCE has worked its magic.
991 -Wrestrict is still diagnosed. */
992 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
993 dest, src, len, len,
994 false, false))
995 if (warning != OPT_Wrestrict)
996 return false;
cc8bea0a 997
64ab8765 998 scalar_int_mode mode;
fef5a0d9
RB
999 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
1000 if (type
64ab8765
RS
1001 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
1002 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
1003 /* If the destination pointer is not aligned we must be able
1004 to emit an unaligned store. */
64ab8765 1005 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1006 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 1007 || (optab_handler (movmisalign_optab, mode)
f869c12f 1008 != CODE_FOR_nothing)))
fef5a0d9
RB
1009 {
1010 tree srctype = type;
1011 tree desttype = type;
64ab8765 1012 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1013 srctype = build_aligned_type (type, src_align);
1014 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1015 tree tem = fold_const_aggregate_ref (srcmem);
1016 if (tem)
1017 srcmem = tem;
64ab8765 1018 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1019 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 1020 && (optab_handler (movmisalign_optab, mode)
f869c12f 1021 == CODE_FOR_nothing))
fef5a0d9
RB
1022 srcmem = NULL_TREE;
1023 if (srcmem)
1024 {
355fe088 1025 gimple *new_stmt;
fef5a0d9
RB
1026 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1027 {
1028 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
1029 srcmem
1030 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1031 new_stmt);
fef5a0d9
RB
1032 gimple_assign_set_lhs (new_stmt, srcmem);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1035 }
64ab8765 1036 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1037 desttype = build_aligned_type (type, dest_align);
1038 new_stmt
1039 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1040 dest, off0),
1041 srcmem);
779724a5 1042 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1043 if (!lhs)
1044 {
f6b4dc28 1045 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1046 return true;
1047 }
1048 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1049 goto done;
1050 }
1051 }
1052 }
1053 }
1054
0d67a510 1055 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
1056 {
1057 /* Both DEST and SRC must be pointer types.
1058 ??? This is what old code did. Is the testing for pointer types
1059 really mandatory?
1060
1061 If either SRC is readonly or length is 1, we can use memcpy. */
1062 if (!dest_align || !src_align)
1063 return false;
1064 if (readonly_data_expr (src)
1065 || (tree_fits_uhwi_p (len)
1066 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1067 >= tree_to_uhwi (len))))
1068 {
1069 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1070 if (!fn)
1071 return false;
1072 gimple_call_set_fndecl (stmt, fn);
1073 gimple_call_set_arg (stmt, 0, dest);
1074 gimple_call_set_arg (stmt, 1, src);
1075 fold_stmt (gsi);
1076 return true;
1077 }
1078
1079 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1080 if (TREE_CODE (src) == ADDR_EXPR
1081 && TREE_CODE (dest) == ADDR_EXPR)
1082 {
1083 tree src_base, dest_base, fn;
a90c8804
RS
1084 poly_int64 src_offset = 0, dest_offset = 0;
1085 poly_uint64 maxsize;
fef5a0d9
RB
1086
1087 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
1088 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1089 if (src_base == NULL)
1090 src_base = srcvar;
fef5a0d9 1091 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
1092 dest_base = get_addr_base_and_unit_offset (destvar,
1093 &dest_offset);
1094 if (dest_base == NULL)
1095 dest_base = destvar;
a90c8804 1096 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 1097 maxsize = -1;
fef5a0d9
RB
1098 if (SSA_VAR_P (src_base)
1099 && SSA_VAR_P (dest_base))
1100 {
1101 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
1102 && ranges_maybe_overlap_p (src_offset, maxsize,
1103 dest_offset, maxsize))
fef5a0d9
RB
1104 return false;
1105 }
1106 else if (TREE_CODE (src_base) == MEM_REF
1107 && TREE_CODE (dest_base) == MEM_REF)
1108 {
1109 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1110 TREE_OPERAND (dest_base, 0), 0))
1111 return false;
a90c8804
RS
1112 poly_offset_int full_src_offset
1113 = mem_ref_offset (src_base) + src_offset;
1114 poly_offset_int full_dest_offset
1115 = mem_ref_offset (dest_base) + dest_offset;
1116 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1117 full_dest_offset, maxsize))
fef5a0d9
RB
1118 return false;
1119 }
1120 else
1121 return false;
1122
1123 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1124 if (!fn)
1125 return false;
1126 gimple_call_set_fndecl (stmt, fn);
1127 gimple_call_set_arg (stmt, 0, dest);
1128 gimple_call_set_arg (stmt, 1, src);
1129 fold_stmt (gsi);
1130 return true;
1131 }
1132
1133 /* If the destination and source do not alias optimize into
1134 memcpy as well. */
1135 if ((is_gimple_min_invariant (dest)
1136 || TREE_CODE (dest) == SSA_NAME)
1137 && (is_gimple_min_invariant (src)
1138 || TREE_CODE (src) == SSA_NAME))
1139 {
1140 ao_ref destr, srcr;
1141 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1142 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1143 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1144 {
1145 tree fn;
1146 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1147 if (!fn)
1148 return false;
1149 gimple_call_set_fndecl (stmt, fn);
1150 gimple_call_set_arg (stmt, 0, dest);
1151 gimple_call_set_arg (stmt, 1, src);
1152 fold_stmt (gsi);
1153 return true;
1154 }
1155 }
1156
1157 return false;
1158 }
1159
1160 if (!tree_fits_shwi_p (len))
1161 return false;
b541b871
EB
1162 if (!srctype
1163 || (AGGREGATE_TYPE_P (srctype)
1164 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1165 return false;
1166 if (!desttype
1167 || (AGGREGATE_TYPE_P (desttype)
1168 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
1169 return false;
1170 /* In the following try to find a type that is most natural to be
1171 used for the memcpy source and destination and that allows
1172 the most optimization when memcpy is turned into a plain assignment
1173 using that type. In theory we could always use a char[len] type
1174 but that only gains us that the destination and source possibly
1175 no longer will have their address taken. */
fef5a0d9
RB
1176 if (TREE_CODE (srctype) == ARRAY_TYPE
1177 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 1178 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
1179 if (TREE_CODE (desttype) == ARRAY_TYPE
1180 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 1181 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
1182 if (TREE_ADDRESSABLE (srctype)
1183 || TREE_ADDRESSABLE (desttype))
1184 return false;
1185
1186 /* Make sure we are not copying using a floating-point mode or
1187 a type whose size possibly does not match its precision. */
1188 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1189 || TREE_CODE (desttype) == BOOLEAN_TYPE
1190 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1191 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1192 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1193 || TREE_CODE (srctype) == BOOLEAN_TYPE
1194 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1195 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1196 if (!srctype)
1197 srctype = desttype;
1198 if (!desttype)
1199 desttype = srctype;
1200 if (!srctype)
1201 return false;
1202
1203 src_align = get_pointer_alignment (src);
1204 dest_align = get_pointer_alignment (dest);
fef5a0d9 1205
5105b576
RB
1206 /* Choose between src and destination type for the access based
1207 on alignment, whether the access constitutes a register access
1208 and whether it may actually expose a declaration for SSA rewrite
e362a897
EB
1209 or SRA decomposition. Also try to expose a string constant, we
1210 might be able to concatenate several of them later into a single
1211 string store. */
42f74245 1212 destvar = NULL_TREE;
5105b576 1213 srcvar = NULL_TREE;
42f74245
RB
1214 if (TREE_CODE (dest) == ADDR_EXPR
1215 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
1216 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1217 && dest_align >= TYPE_ALIGN (desttype)
1218 && (is_gimple_reg_type (desttype)
1219 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1220 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1221 else if (TREE_CODE (src) == ADDR_EXPR
1222 && var_decl_component_p (TREE_OPERAND (src, 0))
1223 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1224 && src_align >= TYPE_ALIGN (srctype)
1225 && (is_gimple_reg_type (srctype)
1226 || dest_align >= TYPE_ALIGN (srctype)))
1227 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
e362a897
EB
1228 /* FIXME: Don't transform copies from strings with known original length.
1229 As soon as strlenopt tests that rely on it for passing are adjusted,
1230 this hack can be removed. */
1231 else if (gimple_call_alloca_for_var_p (stmt)
1232 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1233 && integer_zerop (srcoff)
1234 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1235 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1236 srctype = TREE_TYPE (srcvar);
1237 else
fef5a0d9
RB
1238 return false;
1239
5105b576
RB
1240 /* Now that we chose an access type express the other side in
1241 terms of it if the target allows that with respect to alignment
1242 constraints. */
fef5a0d9
RB
1243 if (srcvar == NULL_TREE)
1244 {
fef5a0d9
RB
1245 if (src_align >= TYPE_ALIGN (desttype))
1246 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1247 else
1248 {
1249 if (STRICT_ALIGNMENT)
1250 return false;
1251 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1252 src_align);
1253 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1254 }
1255 }
1256 else if (destvar == NULL_TREE)
1257 {
fef5a0d9
RB
1258 if (dest_align >= TYPE_ALIGN (srctype))
1259 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1260 else
1261 {
1262 if (STRICT_ALIGNMENT)
1263 return false;
1264 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1265 dest_align);
1266 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1267 }
1268 }
1269
213694e5
MS
1270 /* Same as above, detect out-of-bounds accesses without issuing
1271 warnings. Avoid folding out-of-bounds copies but to avoid
1272 false positives for unreachable code defer warning until
1273 after DCE has worked its magic.
1274 -Wrestrict is still diagnosed. */
1275 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1276 dest, src, len, len,
1277 false, false))
1278 if (warning != OPT_Wrestrict)
1279 return false;
cc8bea0a 1280
355fe088 1281 gimple *new_stmt;
fef5a0d9
RB
1282 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1283 {
921b13d0
RB
1284 tree tem = fold_const_aggregate_ref (srcvar);
1285 if (tem)
1286 srcvar = tem;
1287 if (! is_gimple_min_invariant (srcvar))
1288 {
1289 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1290 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1291 new_stmt);
921b13d0
RB
1292 gimple_assign_set_lhs (new_stmt, srcvar);
1293 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1294 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1295 }
d7257171
RB
1296 new_stmt = gimple_build_assign (destvar, srcvar);
1297 goto set_vop_and_replace;
fef5a0d9 1298 }
d7257171 1299
e362a897
EB
1300 /* We get an aggregate copy. If the source is a STRING_CST, then
1301 directly use its type to perform the copy. */
1302 if (TREE_CODE (srcvar) == STRING_CST)
1303 desttype = srctype;
1304
1305 /* Or else, use an unsigned char[] type to perform the copy in order
1306 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1307 types or float modes behavior on copying. */
1308 else
1309 {
1310 desttype = build_array_type_nelts (unsigned_char_type_node,
1311 tree_to_uhwi (len));
1312 srctype = desttype;
1313 if (src_align > TYPE_ALIGN (srctype))
1314 srctype = build_aligned_type (srctype, src_align);
1315 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1316 }
1317
d7257171
RB
1318 if (dest_align > TYPE_ALIGN (desttype))
1319 desttype = build_aligned_type (desttype, dest_align);
e362a897
EB
1320 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1321 new_stmt = gimple_build_assign (destvar, srcvar);
1322
d7257171 1323set_vop_and_replace:
779724a5 1324 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1325 if (!lhs)
1326 {
f6b4dc28 1327 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1328 return true;
1329 }
1330 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1331 }
1332
1333done:
74e3c262 1334 gimple_seq stmts = NULL;
0d67a510 1335 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1336 len = NULL_TREE;
0d67a510 1337 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1338 {
1339 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1340 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1341 TREE_TYPE (dest), dest, len);
1342 }
0d67a510
ML
1343 else
1344 gcc_unreachable ();
fef5a0d9 1345
74e3c262 1346 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1347 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1348 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1349 return true;
1350}
1351
b3d8d88e
MS
1352/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1353 to built-in memcmp (a, b, len). */
1354
1355static bool
1356gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1357{
1358 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1359
1360 if (!fn)
1361 return false;
1362
1363 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1364
1365 gimple *stmt = gsi_stmt (*gsi);
1366 tree a = gimple_call_arg (stmt, 0);
1367 tree b = gimple_call_arg (stmt, 1);
1368 tree len = gimple_call_arg (stmt, 2);
1369
1370 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1371 replace_call_with_call_and_fold (gsi, repl);
1372
1373 return true;
1374}
1375
1376/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1377 to built-in memmove (dest, src, len). */
1378
1379static bool
1380gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1381{
1382 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1383
1384 if (!fn)
1385 return false;
1386
1387 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1388 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1389 len) into memmove (dest, src, len). */
1390
1391 gimple *stmt = gsi_stmt (*gsi);
1392 tree src = gimple_call_arg (stmt, 0);
1393 tree dest = gimple_call_arg (stmt, 1);
1394 tree len = gimple_call_arg (stmt, 2);
1395
1396 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1397 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1398 replace_call_with_call_and_fold (gsi, repl);
1399
1400 return true;
1401}
1402
1403/* Transform a call to built-in bzero (dest, len) at *GSI into one
1404 to built-in memset (dest, 0, len). */
1405
1406static bool
1407gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1408{
1409 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1410
1411 if (!fn)
1412 return false;
1413
1414 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1415
1416 gimple *stmt = gsi_stmt (*gsi);
1417 tree dest = gimple_call_arg (stmt, 0);
1418 tree len = gimple_call_arg (stmt, 1);
1419
1420 gimple_seq seq = NULL;
1421 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1422 gimple_seq_add_stmt_without_update (&seq, repl);
1423 gsi_replace_with_seq_vops (gsi, seq);
1424 fold_stmt (gsi);
1425
1426 return true;
1427}
1428
fef5a0d9
RB
1429/* Fold function call to builtin memset or bzero at *GSI setting the
1430 memory of size LEN to VAL. Return whether a simplification was made. */
1431
1432static bool
1433gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1434{
355fe088 1435 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1436 tree etype;
1437 unsigned HOST_WIDE_INT length, cval;
1438
1439 /* If the LEN parameter is zero, return DEST. */
1440 if (integer_zerop (len))
1441 {
1442 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1443 return true;
1444 }
1445
1446 if (! tree_fits_uhwi_p (len))
1447 return false;
1448
1449 if (TREE_CODE (c) != INTEGER_CST)
1450 return false;
1451
1452 tree dest = gimple_call_arg (stmt, 0);
1453 tree var = dest;
1454 if (TREE_CODE (var) != ADDR_EXPR)
1455 return false;
1456
1457 var = TREE_OPERAND (var, 0);
1458 if (TREE_THIS_VOLATILE (var))
1459 return false;
1460
1461 etype = TREE_TYPE (var);
1462 if (TREE_CODE (etype) == ARRAY_TYPE)
1463 etype = TREE_TYPE (etype);
1464
1465 if (!INTEGRAL_TYPE_P (etype)
1466 && !POINTER_TYPE_P (etype))
1467 return NULL_TREE;
1468
1469 if (! var_decl_component_p (var))
1470 return NULL_TREE;
1471
1472 length = tree_to_uhwi (len);
7a504f33 1473 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1474 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1475 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1476 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1477 return NULL_TREE;
1478
1479 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1480 return NULL_TREE;
1481
1ba9acb1
RB
1482 if (!type_has_mode_precision_p (etype))
1483 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1484 TYPE_UNSIGNED (etype));
1485
fef5a0d9
RB
1486 if (integer_zerop (c))
1487 cval = 0;
1488 else
1489 {
1490 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1491 return NULL_TREE;
1492
1493 cval = TREE_INT_CST_LOW (c);
1494 cval &= 0xff;
1495 cval |= cval << 8;
1496 cval |= cval << 16;
1497 cval |= (cval << 31) << 1;
1498 }
1499
1500 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1501 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1502 gimple_move_vops (store, stmt);
fef5a0d9
RB
1503 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1504 if (gimple_call_lhs (stmt))
1505 {
355fe088 1506 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1507 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1508 }
1509 else
1510 {
1511 gimple_stmt_iterator gsi2 = *gsi;
1512 gsi_prev (gsi);
1513 gsi_remove (&gsi2, true);
1514 }
1515
1516 return true;
1517}
1518
fb471a13 1519/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1520
1521static bool
03c4a945
MS
1522get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1523 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1524{
fb471a13 1525 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1526
fb471a13
MS
1527 /* The length computed by this invocation of the function. */
1528 tree val = NULL_TREE;
1529
eef2da67
MS
1530 /* True if VAL is an optimistic (tight) bound determined from
1531 the size of the character array in which the string may be
1532 stored. In that case, the computed VAL is used to set
1533 PDATA->MAXBOUND. */
1534 bool tight_bound = false;
1535
fb471a13
MS
1536 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1537 if (TREE_CODE (arg) == ADDR_EXPR
1538 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1539 {
fb471a13
MS
1540 tree op = TREE_OPERAND (arg, 0);
1541 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1542 {
fb471a13
MS
1543 tree aop0 = TREE_OPERAND (op, 0);
1544 if (TREE_CODE (aop0) == INDIRECT_REF
1545 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1546 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1547 pdata, eltsize);
fef5a0d9 1548 }
598f7235 1549 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1550 && rkind == SRK_LENRANGE)
fef5a0d9 1551 {
fb471a13
MS
1552 /* Fail if an array is the last member of a struct object
1553 since it could be treated as a (fake) flexible array
1554 member. */
1555 tree idx = TREE_OPERAND (op, 1);
1556
1557 arg = TREE_OPERAND (op, 0);
1558 tree optype = TREE_TYPE (arg);
1559 if (tree dom = TYPE_DOMAIN (optype))
1560 if (tree bound = TYPE_MAX_VALUE (dom))
1561 if (TREE_CODE (bound) == INTEGER_CST
1562 && TREE_CODE (idx) == INTEGER_CST
1563 && tree_int_cst_lt (bound, idx))
1564 return false;
fef5a0d9 1565 }
fb471a13 1566 }
7d583f42 1567
598f7235 1568 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1569 {
1570 /* We are computing the maximum value (not string length). */
1571 val = arg;
1572 if (TREE_CODE (val) != INTEGER_CST
1573 || tree_int_cst_sgn (val) < 0)
1574 return false;
1575 }
1576 else
1577 {
1578 c_strlen_data lendata = { };
1579 val = c_strlen (arg, 1, &lendata, eltsize);
1580
fb471a13
MS
1581 if (!val && lendata.decl)
1582 {
03c4a945
MS
1583 /* ARG refers to an unterminated const character array.
1584 DATA.DECL with size DATA.LEN. */
1585 val = lendata.minlen;
730832cd 1586 pdata->decl = lendata.decl;
7d583f42 1587 }
fb471a13
MS
1588 }
1589
a7160771
MS
1590 /* Set if VAL represents the maximum length based on array size (set
1591 when exact length cannot be determined). */
1592 bool maxbound = false;
1593
84de9426 1594 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1595 {
1596 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1597 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1598 pdata, eltsize);
88d0c3f0 1599
fb471a13 1600 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1601 {
fb471a13 1602 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1603
fb471a13
MS
1604 /* Determine the "innermost" array type. */
1605 while (TREE_CODE (optype) == ARRAY_TYPE
1606 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1607 optype = TREE_TYPE (optype);
c42d0aa0 1608
fb471a13
MS
1609 /* Avoid arrays of pointers. */
1610 tree eltype = TREE_TYPE (optype);
1611 if (TREE_CODE (optype) != ARRAY_TYPE
1612 || !INTEGRAL_TYPE_P (eltype))
1613 return false;
c42d0aa0 1614
fb471a13
MS
1615 /* Fail when the array bound is unknown or zero. */
1616 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1617 if (!val
1618 || TREE_CODE (val) != INTEGER_CST
1619 || integer_zerop (val))
fb471a13 1620 return false;
1bfd6a00 1621
fb471a13
MS
1622 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1623 integer_one_node);
c42d0aa0 1624
fb471a13
MS
1625 /* Set the minimum size to zero since the string in
1626 the array could have zero length. */
730832cd 1627 pdata->minlen = ssize_int (0);
204a7ecb 1628
eef2da67 1629 tight_bound = true;
fb471a13
MS
1630 }
1631 else if (TREE_CODE (arg) == COMPONENT_REF
1632 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1633 == ARRAY_TYPE))
1634 {
1635 /* Use the type of the member array to determine the upper
1636 bound on the length of the array. This may be overly
1637 optimistic if the array itself isn't NUL-terminated and
1638 the caller relies on the subsequent member to contain
1639 the NUL but that would only be considered valid if
03c4a945 1640 the array were the last member of a struct. */
fb471a13
MS
1641
1642 tree fld = TREE_OPERAND (arg, 1);
1643
1644 tree optype = TREE_TYPE (fld);
1645
1646 /* Determine the "innermost" array type. */
1647 while (TREE_CODE (optype) == ARRAY_TYPE
1648 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1649 optype = TREE_TYPE (optype);
1650
1651 /* Fail when the array bound is unknown or zero. */
1652 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1653 if (!val
1654 || TREE_CODE (val) != INTEGER_CST
1655 || integer_zerop (val))
fb471a13
MS
1656 return false;
1657 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1658 integer_one_node);
1659
1660 /* Set the minimum size to zero since the string in
1661 the array could have zero length. */
730832cd 1662 pdata->minlen = ssize_int (0);
fb471a13 1663
eef2da67
MS
1664 /* The array size determined above is an optimistic bound
1665 on the length. If the array isn't nul-terminated the
1666 length computed by the library function would be greater.
1667 Even though using strlen to cross the subobject boundary
1668 is undefined, avoid drawing conclusions from the member
1669 type about the length here. */
1670 tight_bound = true;
1671 }
e7868dc6
MS
1672 else if (TREE_CODE (arg) == MEM_REF
1673 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1674 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1675 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1676 {
1677 /* Handle a MEM_REF into a DECL accessing an array of integers,
1678 being conservative about references to extern structures with
1679 flexible array members that can be initialized to arbitrary
1680 numbers of elements as an extension (static structs are okay).
1681 FIXME: Make this less conservative -- see
1682 component_ref_size in tree.c. */
1683 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1684 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1685 && (decl_binds_to_current_def_p (ref)
1686 || !array_at_struct_end_p (arg)))
1687 {
1688 /* Fail if the offset is out of bounds. Such accesses
1689 should be diagnosed at some point. */
1690 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1691 if (!val
1692 || TREE_CODE (val) != INTEGER_CST
1693 || integer_zerop (val))
e7868dc6
MS
1694 return false;
1695
1696 poly_offset_int psiz = wi::to_offset (val);
1697 poly_offset_int poff = mem_ref_offset (arg);
1698 if (known_le (psiz, poff))
1699 return false;
1700
1701 pdata->minlen = ssize_int (0);
1702
1703 /* Subtract the offset and one for the terminating nul. */
1704 psiz -= poff;
1705 psiz -= 1;
1706 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1707 /* Since VAL reflects the size of a declared object
1708 rather the type of the access it is not a tight bound. */
1709 }
1710 }
1711 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1712 {
eef2da67
MS
1713 /* Avoid handling pointers to arrays. GCC might misuse
1714 a pointer to an array of one bound to point to an array
1715 object of a greater bound. */
1716 tree argtype = TREE_TYPE (arg);
1717 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1718 {
eef2da67 1719 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1720 if (!val
1721 || TREE_CODE (val) != INTEGER_CST
1722 || integer_zerop (val))
88d0c3f0 1723 return false;
fb471a13
MS
1724 val = wide_int_to_tree (TREE_TYPE (val),
1725 wi::sub (wi::to_wide (val), 1));
1726
e495e31a
MS
1727 /* Set the minimum size to zero since the string in
1728 the array could have zero length. */
730832cd 1729 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1730 }
1731 }
a7160771 1732 maxbound = true;
fb471a13 1733 }
88d0c3f0 1734
fb471a13
MS
1735 if (!val)
1736 return false;
fef5a0d9 1737
fb471a13 1738 /* Adjust the lower bound on the string length as necessary. */
730832cd 1739 if (!pdata->minlen
598f7235 1740 || (rkind != SRK_STRLEN
730832cd 1741 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1742 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1743 && tree_int_cst_lt (val, pdata->minlen)))
1744 pdata->minlen = val;
88d0c3f0 1745
a7160771 1746 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1747 {
1748 /* Adjust the tighter (more optimistic) string length bound
1749 if necessary and proceed to adjust the more conservative
1750 bound. */
1751 if (TREE_CODE (val) == INTEGER_CST)
1752 {
a7160771
MS
1753 if (tree_int_cst_lt (pdata->maxbound, val))
1754 pdata->maxbound = val;
730832cd
MS
1755 }
1756 else
1757 pdata->maxbound = val;
1758 }
a7160771
MS
1759 else if (pdata->maxbound || maxbound)
1760 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1761 if VAL corresponds to the maximum length determined based
1762 on the type of the object. */
730832cd
MS
1763 pdata->maxbound = val;
1764
eef2da67
MS
1765 if (tight_bound)
1766 {
1767 /* VAL computed above represents an optimistically tight bound
1768 on the length of the string based on the referenced object's
1769 or subobject's type. Determine the conservative upper bound
1770 based on the enclosing object's size if possible. */
84de9426 1771 if (rkind == SRK_LENRANGE)
eef2da67
MS
1772 {
1773 poly_int64 offset;
1774 tree base = get_addr_base_and_unit_offset (arg, &offset);
1775 if (!base)
1776 {
1777 /* When the call above fails due to a non-constant offset
1778 assume the offset is zero and use the size of the whole
1779 enclosing object instead. */
1780 base = get_base_address (arg);
1781 offset = 0;
1782 }
1783 /* If the base object is a pointer no upper bound on the length
1784 can be determined. Otherwise the maximum length is equal to
1785 the size of the enclosing object minus the offset of
1786 the referenced subobject minus 1 (for the terminating nul). */
1787 tree type = TREE_TYPE (base);
1788 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1789 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1790 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1791 val = build_all_ones_cst (size_type_node);
1792 else
1793 {
1794 val = DECL_SIZE_UNIT (base);
1795 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1796 size_int (offset + 1));
1797 }
1798 }
1799 else
1800 return false;
1801 }
1802
730832cd 1803 if (pdata->maxlen)
fb471a13
MS
1804 {
1805 /* Adjust the more conservative bound if possible/necessary
1806 and fail otherwise. */
598f7235 1807 if (rkind != SRK_STRLEN)
fef5a0d9 1808 {
730832cd 1809 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1810 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1811 return false;
fef5a0d9 1812
730832cd
MS
1813 if (tree_int_cst_lt (pdata->maxlen, val))
1814 pdata->maxlen = val;
fb471a13
MS
1815 return true;
1816 }
730832cd 1817 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1818 {
1819 /* Fail if the length of this ARG is different from that
1820 previously determined from another ARG. */
1821 return false;
1822 }
fef5a0d9
RB
1823 }
1824
730832cd 1825 pdata->maxlen = val;
84de9426 1826 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1827}
1828
5d6655eb
MS
1829/* For an ARG referencing one or more strings, try to obtain the range
1830 of their lengths, or the size of the largest array ARG referes to if
1831 the range of lengths cannot be determined, and store all in *PDATA.
1832 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1833 the maximum constant value.
1834 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1835 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1836 length or if we are unable to determine the length, return false.
fb471a13 1837 VISITED is a bitmap of visited variables.
598f7235
MS
1838 RKIND determines the kind of value or range to obtain (see
1839 strlen_range_kind).
1840 Set PDATA->DECL if ARG refers to an unterminated constant array.
1841 On input, set ELTSIZE to 1 for normal single byte character strings,
1842 and either 2 or 4 for wide characer strings (the size of wchar_t).
1843 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1844
1845static bool
03c4a945
MS
1846get_range_strlen (tree arg, bitmap *visited,
1847 strlen_range_kind rkind,
1848 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1849{
1850
1851 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1852 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1853
fef5a0d9
RB
1854 /* If ARG is registered for SSA update we cannot look at its defining
1855 statement. */
1856 if (name_registered_for_update_p (arg))
1857 return false;
1858
1859 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1860 if (!*visited)
1861 *visited = BITMAP_ALLOC (NULL);
1862 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1863 return true;
1864
fb471a13
MS
1865 tree var = arg;
1866 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1867
fef5a0d9
RB
1868 switch (gimple_code (def_stmt))
1869 {
1870 case GIMPLE_ASSIGN:
598f7235
MS
1871 /* The RHS of the statement defining VAR must either have a
1872 constant length or come from another SSA_NAME with a constant
1873 length. */
fef5a0d9
RB
1874 if (gimple_assign_single_p (def_stmt)
1875 || gimple_assign_unary_nop_p (def_stmt))
1876 {
598f7235 1877 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1878 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1879 }
1880 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1881 {
c8602fe6
JJ
1882 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1883 gimple_assign_rhs3 (def_stmt) };
1884
1885 for (unsigned int i = 0; i < 2; i++)
03c4a945 1886 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1887 {
84de9426 1888 if (rkind != SRK_LENRANGE)
c8602fe6 1889 return false;
80c2bad6
MS
1890 /* Set the upper bound to the maximum to prevent
1891 it from being adjusted in the next iteration but
1892 leave MINLEN and the more conservative MAXBOUND
1893 determined so far alone (or leave them null if
1894 they haven't been set yet). That the MINLEN is
1895 in fact zero can be determined from MAXLEN being
1896 unbounded but the discovered minimum is used for
1897 diagnostics. */
730832cd 1898 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1899 }
1900 return true;
cc8bea0a 1901 }
fef5a0d9
RB
1902 return false;
1903
1904 case GIMPLE_PHI:
598f7235
MS
1905 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1906 must have a constant length. */
c8602fe6 1907 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1908 {
1909 tree arg = gimple_phi_arg (def_stmt, i)->def;
1910
1911 /* If this PHI has itself as an argument, we cannot
1912 determine the string length of this argument. However,
1913 if we can find a constant string length for the other
1914 PHI args then we can still be sure that this is a
1915 constant string length. So be optimistic and just
1916 continue with the next argument. */
1917 if (arg == gimple_phi_result (def_stmt))
1918 continue;
1919
03c4a945 1920 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1921 {
84de9426 1922 if (rkind != SRK_LENRANGE)
88d0c3f0 1923 return false;
80c2bad6
MS
1924 /* Set the upper bound to the maximum to prevent
1925 it from being adjusted in the next iteration but
1926 leave MINLEN and the more conservative MAXBOUND
1927 determined so far alone (or leave them null if
1928 they haven't been set yet). That the MINLEN is
1929 in fact zero can be determined from MAXLEN being
1930 unbounded but the discovered minimum is used for
1931 diagnostics. */
730832cd 1932 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1933 }
fef5a0d9 1934 }
fef5a0d9
RB
1935 return true;
1936
1937 default:
1938 return false;
1939 }
1940}
5d6655eb 1941
97623b52
MS
1942/* Try to obtain the range of the lengths of the string(s) referenced
1943 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1944 of lengths cannot be determined, and store all in *PDATA which must
1945 be zero-initialized on input except PDATA->MAXBOUND may be set to
1946 a non-null tree node other than INTEGER_CST to request to have it
1947 set to the length of the longest string in a PHI. ELTSIZE is
1948 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1949 some power of 2 for wide characters.
1950 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1951 for optimization. Returning false means that a nonzero PDATA->MINLEN
1952 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1953 is -1 (in that case, the actual range is indeterminate, i.e.,
1954 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1955
3f343040 1956bool
84de9426 1957get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1958{
1959 bitmap visited = NULL;
a7160771 1960 tree maxbound = pdata->maxbound;
88d0c3f0 1961
84de9426 1962 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1963 {
5d6655eb
MS
1964 /* On failure extend the length range to an impossible maximum
1965 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1966 members can stay unchanged regardless. */
1967 pdata->minlen = ssize_int (0);
1968 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1969 }
5d6655eb
MS
1970 else if (!pdata->minlen)
1971 pdata->minlen = ssize_int (0);
1972
a7160771
MS
1973 /* If it's unchanged from it initial non-null value, set the conservative
1974 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1975 if (maxbound && pdata->maxbound == maxbound)
1976 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1977
1978 if (visited)
1979 BITMAP_FREE (visited);
3f343040 1980
03c4a945 1981 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1982}
1983
5d6655eb
MS
1984/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1985 For ARG of pointer types, NONSTR indicates if the caller is prepared
1986 to handle unterminated strings. For integer ARG and when RKIND ==
1987 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1988
5d6655eb
MS
1989 If an unterminated array is discovered and our caller handles
1990 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1991 return the maximum size. Otherwise return NULL. */
1992
598f7235
MS
1993static tree
1994get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1995{
598f7235
MS
1996 /* A non-null NONSTR is meaningless when determining the maximum
1997 value of an integer ARG. */
1998 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1999 /* ARG must have an integral type when RKIND says so. */
2000 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2001
dcb7fae2 2002 bitmap visited = NULL;
3f343040 2003
5d6655eb
MS
2004 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2005 is unbounded. */
730832cd 2006 c_strlen_data lendata = { };
03c4a945 2007 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 2008 lendata.maxlen = NULL_TREE;
5d6655eb
MS
2009 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2010 lendata.maxlen = NULL_TREE;
2011
dcb7fae2
RB
2012 if (visited)
2013 BITMAP_FREE (visited);
2014
e08341bb
MS
2015 if (nonstr)
2016 {
2017 /* For callers prepared to handle unterminated arrays set
2018 *NONSTR to point to the declaration of the array and return
2019 the maximum length/size. */
730832cd
MS
2020 *nonstr = lendata.decl;
2021 return lendata.maxlen;
e08341bb
MS
2022 }
2023
2024 /* Fail if the constant array isn't nul-terminated. */
730832cd 2025 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
2026}
2027
fef5a0d9
RB
2028
2029/* Fold function call to builtin strcpy with arguments DEST and SRC.
2030 If LEN is not NULL, it represents the length of the string to be
2031 copied. Return NULL_TREE if no simplification can be made. */
2032
2033static bool
2034gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 2035 tree dest, tree src)
fef5a0d9 2036{
cc8bea0a
MS
2037 gimple *stmt = gsi_stmt (*gsi);
2038 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2039 tree fn;
2040
2041 /* If SRC and DEST are the same (and not volatile), return DEST. */
2042 if (operand_equal_p (src, dest, 0))
2043 {
8cd95cec
MS
2044 /* Issue -Wrestrict unless the pointers are null (those do
2045 not point to objects and so do not indicate an overlap;
2046 such calls could be the result of sanitization and jump
2047 threading). */
e9e2bad7 2048 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
2049 {
2050 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2051
e9b9fa4c
MS
2052 warning_at (loc, OPT_Wrestrict,
2053 "%qD source argument is the same as destination",
2054 func);
2055 }
cc8bea0a 2056
fef5a0d9
RB
2057 replace_call_with_value (gsi, dest);
2058 return true;
2059 }
2060
2061 if (optimize_function_for_size_p (cfun))
2062 return false;
2063
2064 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2065 if (!fn)
2066 return false;
2067
e08341bb
MS
2068 /* Set to non-null if ARG refers to an unterminated array. */
2069 tree nonstr = NULL;
598f7235 2070 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
2071
2072 if (nonstr)
2073 {
2074 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 2075 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
81d6cdd3 2076 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
e9e2bad7 2077 suppress_warning (stmt, OPT_Wstringop_overread);
e08341bb
MS
2078 return false;
2079 }
2080
fef5a0d9 2081 if (!len)
dcb7fae2 2082 return false;
fef5a0d9
RB
2083
2084 len = fold_convert_loc (loc, size_type_node, len);
2085 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2086 len = force_gimple_operand_gsi (gsi, len, true,
2087 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2088 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2089 replace_call_with_call_and_fold (gsi, repl);
2090 return true;
2091}
2092
2093/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2094 If SLEN is not NULL, it represents the length of the source string.
2095 Return NULL_TREE if no simplification can be made. */
2096
2097static bool
dcb7fae2
RB
2098gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2099 tree dest, tree src, tree len)
fef5a0d9 2100{
025d57f0
MS
2101 gimple *stmt = gsi_stmt (*gsi);
2102 location_t loc = gimple_location (stmt);
6a33d0ff 2103 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
2104
2105 /* If the LEN parameter is zero, return DEST. */
2106 if (integer_zerop (len))
2107 {
53b28abf 2108 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
2109 decorate with attribute nonstring. */
2110 if (!nonstring)
2111 {
2112 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
2113
2114 /* Warn about the lack of nul termination: the result is not
2115 a (nul-terminated) string. */
598f7235 2116 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
2117 if (slen && !integer_zerop (slen))
2118 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d 2119 "%qD destination unchanged after copying no bytes "
6a33d0ff 2120 "from a string of length %E",
6d3bab5d 2121 fndecl, slen);
6a33d0ff
MS
2122 else
2123 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d
MS
2124 "%qD destination unchanged after copying no bytes",
2125 fndecl);
6a33d0ff 2126 }
025d57f0 2127
fef5a0d9
RB
2128 replace_call_with_value (gsi, dest);
2129 return true;
2130 }
2131
2132 /* We can't compare slen with len as constants below if len is not a
2133 constant. */
dcb7fae2 2134 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
2135 return false;
2136
fef5a0d9 2137 /* Now, we must be passed a constant src ptr parameter. */
598f7235 2138 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 2139 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
2140 return false;
2141
025d57f0
MS
2142 /* The size of the source string including the terminating nul. */
2143 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
2144
2145 /* We do not support simplification of this case, though we do
2146 support it when expanding trees into RTL. */
2147 /* FIXME: generate a call to __builtin_memset. */
025d57f0 2148 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
2149 return false;
2150
5d0d5d68
MS
2151 /* Diagnose truncation that leaves the copy unterminated. */
2152 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 2153
fef5a0d9 2154 /* OK transform into builtin memcpy. */
025d57f0 2155 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
2156 if (!fn)
2157 return false;
2158
2159 len = fold_convert_loc (loc, size_type_node, len);
2160 len = force_gimple_operand_gsi (gsi, len, true,
2161 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2162 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 2163 replace_call_with_call_and_fold (gsi, repl);
025d57f0 2164
fef5a0d9
RB
2165 return true;
2166}
2167
71dea1dd
WD
2168/* Fold function call to builtin strchr or strrchr.
2169 If both arguments are constant, evaluate and fold the result,
2170 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
2171 In general strlen is significantly faster than strchr
2172 due to being a simpler operation. */
2173static bool
71dea1dd 2174gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
2175{
2176 gimple *stmt = gsi_stmt (*gsi);
2177 tree str = gimple_call_arg (stmt, 0);
2178 tree c = gimple_call_arg (stmt, 1);
2179 location_t loc = gimple_location (stmt);
71dea1dd
WD
2180 const char *p;
2181 char ch;
912d9ec3 2182
71dea1dd 2183 if (!gimple_call_lhs (stmt))
912d9ec3
WD
2184 return false;
2185
b5338fb3
MS
2186 /* Avoid folding if the first argument is not a nul-terminated array.
2187 Defer warning until later. */
2188 if (!check_nul_terminated_array (NULL_TREE, str))
2189 return false;
2190
71dea1dd
WD
2191 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2192 {
2193 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2194
2195 if (p1 == NULL)
2196 {
2197 replace_call_with_value (gsi, integer_zero_node);
2198 return true;
2199 }
2200
2201 tree len = build_int_cst (size_type_node, p1 - p);
2202 gimple_seq stmts = NULL;
2203 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2204 POINTER_PLUS_EXPR, str, len);
2205 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2206 gsi_replace_with_seq_vops (gsi, stmts);
2207 return true;
2208 }
2209
2210 if (!integer_zerop (c))
912d9ec3
WD
2211 return false;
2212
71dea1dd 2213 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 2214 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
2215 {
2216 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2217
c8952930 2218 if (strchr_fn)
71dea1dd
WD
2219 {
2220 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2221 replace_call_with_call_and_fold (gsi, repl);
2222 return true;
2223 }
2224
2225 return false;
2226 }
2227
912d9ec3
WD
2228 tree len;
2229 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2230
2231 if (!strlen_fn)
2232 return false;
2233
2234 /* Create newstr = strlen (str). */
2235 gimple_seq stmts = NULL;
2236 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2237 gimple_set_location (new_stmt, loc);
a15ebbcd 2238 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2239 gimple_call_set_lhs (new_stmt, len);
2240 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2241
2242 /* Create (str p+ strlen (str)). */
2243 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2244 POINTER_PLUS_EXPR, str, len);
2245 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2246 gsi_replace_with_seq_vops (gsi, stmts);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the strlen.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2 = *gsi;
2252 gsi_prev (&gsi2);
2253 fold_stmt (&gsi2);
2254 return true;
2255}
2256
c8952930
JJ
2257/* Fold function call to builtin strstr.
2258 If both arguments are constant, evaluate and fold the result,
2259 additionally fold strstr (x, "") into x and strstr (x, "c")
2260 into strchr (x, 'c'). */
2261static bool
2262gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2263{
2264 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2265 if (!gimple_call_lhs (stmt))
2266 return false;
2267
c8952930
JJ
2268 tree haystack = gimple_call_arg (stmt, 0);
2269 tree needle = gimple_call_arg (stmt, 1);
c8952930 2270
b5338fb3
MS
2271 /* Avoid folding if either argument is not a nul-terminated array.
2272 Defer warning until later. */
2273 if (!check_nul_terminated_array (NULL_TREE, haystack)
2274 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2275 return false;
2276
b5338fb3 2277 const char *q = c_getstr (needle);
c8952930
JJ
2278 if (q == NULL)
2279 return false;
2280
b5338fb3 2281 if (const char *p = c_getstr (haystack))
c8952930
JJ
2282 {
2283 const char *r = strstr (p, q);
2284
2285 if (r == NULL)
2286 {
2287 replace_call_with_value (gsi, integer_zero_node);
2288 return true;
2289 }
2290
2291 tree len = build_int_cst (size_type_node, r - p);
2292 gimple_seq stmts = NULL;
2293 gimple *new_stmt
2294 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2295 haystack, len);
2296 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2297 gsi_replace_with_seq_vops (gsi, stmts);
2298 return true;
2299 }
2300
2301 /* For strstr (x, "") return x. */
2302 if (q[0] == '\0')
2303 {
2304 replace_call_with_value (gsi, haystack);
2305 return true;
2306 }
2307
2308 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2309 if (q[1] == '\0')
2310 {
2311 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2312 if (strchr_fn)
2313 {
2314 tree c = build_int_cst (integer_type_node, q[0]);
2315 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2316 replace_call_with_call_and_fold (gsi, repl);
2317 return true;
2318 }
2319 }
2320
2321 return false;
2322}
2323
fef5a0d9
RB
2324/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2325 to the call.
2326
2327 Return NULL_TREE if no simplification was possible, otherwise return the
2328 simplified form of the call as a tree.
2329
2330 The simplified form may be a constant or other expression which
2331 computes the same value, but in a more efficient manner (including
2332 calls to other builtin functions).
2333
2334 The call may contain arguments which need to be evaluated, but
2335 which are not useful to determine the result of the call. In
2336 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2337 COMPOUND_EXPR will be an argument which must be evaluated.
2338 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2339 COMPOUND_EXPR in the chain will contain the tree for the simplified
2340 form of the builtin function call. */
2341
2342static bool
dcb7fae2 2343gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2344{
355fe088 2345 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2346 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2347
2348 const char *p = c_getstr (src);
2349
2350 /* If the string length is zero, return the dst parameter. */
2351 if (p && *p == '\0')
2352 {
2353 replace_call_with_value (gsi, dst);
2354 return true;
2355 }
2356
2357 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2358 return false;
2359
2360 /* See if we can store by pieces into (dst + strlen(dst)). */
2361 tree newdst;
2362 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2363 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2364
2365 if (!strlen_fn || !memcpy_fn)
2366 return false;
2367
2368 /* If the length of the source string isn't computable don't
2369 split strcat into strlen and memcpy. */
598f7235 2370 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2371 if (! len)
fef5a0d9
RB
2372 return false;
2373
2374 /* Create strlen (dst). */
2375 gimple_seq stmts = NULL, stmts2;
355fe088 2376 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2377 gimple_set_location (repl, loc);
a15ebbcd 2378 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2379 gimple_call_set_lhs (repl, newdst);
2380 gimple_seq_add_stmt_without_update (&stmts, repl);
2381
2382 /* Create (dst p+ strlen (dst)). */
2383 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2384 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2385 gimple_seq_add_seq_without_update (&stmts, stmts2);
2386
2387 len = fold_convert_loc (loc, size_type_node, len);
2388 len = size_binop_loc (loc, PLUS_EXPR, len,
2389 build_int_cst (size_type_node, 1));
2390 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2391 gimple_seq_add_seq_without_update (&stmts, stmts2);
2392
2393 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2394 gimple_seq_add_stmt_without_update (&stmts, repl);
2395 if (gimple_call_lhs (stmt))
2396 {
2397 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2398 gimple_seq_add_stmt_without_update (&stmts, repl);
2399 gsi_replace_with_seq_vops (gsi, stmts);
2400 /* gsi now points at the assignment to the lhs, get a
2401 stmt iterator to the memcpy call.
2402 ??? We can't use gsi_for_stmt as that doesn't work when the
2403 CFG isn't built yet. */
2404 gimple_stmt_iterator gsi2 = *gsi;
2405 gsi_prev (&gsi2);
2406 fold_stmt (&gsi2);
2407 }
2408 else
2409 {
2410 gsi_replace_with_seq_vops (gsi, stmts);
2411 fold_stmt (gsi);
2412 }
2413 return true;
2414}
2415
07f1cf56
RB
2416/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2417 are the arguments to the call. */
2418
2419static bool
2420gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2421{
355fe088 2422 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2423 tree dest = gimple_call_arg (stmt, 0);
2424 tree src = gimple_call_arg (stmt, 1);
2425 tree size = gimple_call_arg (stmt, 2);
2426 tree fn;
2427 const char *p;
2428
2429
2430 p = c_getstr (src);
2431 /* If the SRC parameter is "", return DEST. */
2432 if (p && *p == '\0')
2433 {
2434 replace_call_with_value (gsi, dest);
2435 return true;
2436 }
2437
2438 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2439 return false;
2440
2441 /* If __builtin_strcat_chk is used, assume strcat is available. */
2442 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2443 if (!fn)
2444 return false;
2445
355fe088 2446 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2447 replace_call_with_call_and_fold (gsi, repl);
2448 return true;
2449}
2450
ad03a744
RB
2451/* Simplify a call to the strncat builtin. */
2452
2453static bool
2454gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2455{
8a45b051 2456 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2457 tree dst = gimple_call_arg (stmt, 0);
2458 tree src = gimple_call_arg (stmt, 1);
2459 tree len = gimple_call_arg (stmt, 2);
2460
2461 const char *p = c_getstr (src);
2462
2463 /* If the requested length is zero, or the src parameter string
2464 length is zero, return the dst parameter. */
2465 if (integer_zerop (len) || (p && *p == '\0'))
2466 {
2467 replace_call_with_value (gsi, dst);
2468 return true;
2469 }
2470
025d57f0
MS
2471 if (TREE_CODE (len) != INTEGER_CST || !p)
2472 return false;
2473
2474 unsigned srclen = strlen (p);
2475
2476 int cmpsrc = compare_tree_int (len, srclen);
2477
2478 /* Return early if the requested len is less than the string length.
2479 Warnings will be issued elsewhere later. */
2480 if (cmpsrc < 0)
2481 return false;
2482
2483 unsigned HOST_WIDE_INT dstsize;
2484
e9e2bad7 2485 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
025d57f0
MS
2486
2487 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2488 {
025d57f0 2489 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2490
025d57f0
MS
2491 if (cmpdst >= 0)
2492 {
2493 tree fndecl = gimple_call_fndecl (stmt);
2494
2495 /* Strncat copies (at most) LEN bytes and always appends
2496 the terminating NUL so the specified bound should never
2497 be equal to (or greater than) the size of the destination.
2498 If it is, the copy could overflow. */
2499 location_t loc = gimple_location (stmt);
2500 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2501 cmpdst == 0
6d3bab5d 2502 ? G_("%qD specified bound %E equals "
025d57f0 2503 "destination size")
6d3bab5d 2504 : G_("%qD specified bound %E exceeds "
025d57f0 2505 "destination size %wu"),
6d3bab5d 2506 fndecl, len, dstsize);
025d57f0 2507 if (nowarn)
e9e2bad7 2508 suppress_warning (stmt, OPT_Wstringop_overflow_);
025d57f0
MS
2509 }
2510 }
ad03a744 2511
025d57f0
MS
2512 if (!nowarn && cmpsrc == 0)
2513 {
2514 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2515 location_t loc = gimple_location (stmt);
eec5f615
MS
2516
2517 /* To avoid possible overflow the specified bound should also
2518 not be equal to the length of the source, even when the size
2519 of the destination is unknown (it's not an uncommon mistake
2520 to specify as the bound to strncpy the length of the source). */
025d57f0 2521 if (warning_at (loc, OPT_Wstringop_overflow_,
6d3bab5d
MS
2522 "%qD specified bound %E equals source length",
2523 fndecl, len))
e9e2bad7 2524 suppress_warning (stmt, OPT_Wstringop_overflow_);
ad03a744
RB
2525 }
2526
025d57f0
MS
2527 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2528
2529 /* If the replacement _DECL isn't initialized, don't do the
2530 transformation. */
2531 if (!fn)
2532 return false;
2533
2534 /* Otherwise, emit a call to strcat. */
2535 gcall *repl = gimple_build_call (fn, 2, dst, src);
2536 replace_call_with_call_and_fold (gsi, repl);
2537 return true;
ad03a744
RB
2538}
2539
745583f9
RB
2540/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2541 LEN, and SIZE. */
2542
2543static bool
2544gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2545{
355fe088 2546 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2547 tree dest = gimple_call_arg (stmt, 0);
2548 tree src = gimple_call_arg (stmt, 1);
2549 tree len = gimple_call_arg (stmt, 2);
2550 tree size = gimple_call_arg (stmt, 3);
2551 tree fn;
2552 const char *p;
2553
2554 p = c_getstr (src);
2555 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2556 if ((p && *p == '\0')
2557 || integer_zerop (len))
2558 {
2559 replace_call_with_value (gsi, dest);
2560 return true;
2561 }
2562
2563 if (! tree_fits_uhwi_p (size))
2564 return false;
2565
2566 if (! integer_all_onesp (size))
2567 {
2568 tree src_len = c_strlen (src, 1);
2569 if (src_len
2570 && tree_fits_uhwi_p (src_len)
2571 && tree_fits_uhwi_p (len)
2572 && ! tree_int_cst_lt (len, src_len))
2573 {
2574 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2575 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2576 if (!fn)
2577 return false;
2578
355fe088 2579 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2580 replace_call_with_call_and_fold (gsi, repl);
2581 return true;
2582 }
2583 return false;
2584 }
2585
2586 /* If __builtin_strncat_chk is used, assume strncat is available. */
2587 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2588 if (!fn)
2589 return false;
2590
355fe088 2591 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2592 replace_call_with_call_and_fold (gsi, repl);
2593 return true;
2594}
2595
a918bfbf
ML
2596/* Build and append gimple statements to STMTS that would load a first
2597 character of a memory location identified by STR. LOC is location
2598 of the statement. */
2599
2600static tree
2601gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2602{
2603 tree var;
2604
2605 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2606 tree cst_uchar_ptr_node
2607 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2608 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2609
2610 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2611 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2612 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2613
2614 gimple_assign_set_lhs (stmt, var);
2615 gimple_seq_add_stmt_without_update (stmts, stmt);
2616
2617 return var;
2618}
2619
d2f8402a 2620/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2621
2622static bool
2623gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2624{
2625 gimple *stmt = gsi_stmt (*gsi);
2626 tree callee = gimple_call_fndecl (stmt);
2627 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2628
2629 tree type = integer_type_node;
2630 tree str1 = gimple_call_arg (stmt, 0);
2631 tree str2 = gimple_call_arg (stmt, 1);
2632 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2633
2634 tree bound_node = NULL_TREE;
d2f8402a 2635 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2636
2637 /* Handle strncmp and strncasecmp functions. */
2638 if (gimple_call_num_args (stmt) == 3)
2639 {
d86d8b35
MS
2640 bound_node = gimple_call_arg (stmt, 2);
2641 if (tree_fits_uhwi_p (bound_node))
2642 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2643 }
2644
d86d8b35 2645 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2646 if (bound == 0)
a918bfbf
ML
2647 {
2648 replace_call_with_value (gsi, integer_zero_node);
2649 return true;
2650 }
2651
2652 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2653 if (operand_equal_p (str1, str2, 0))
2654 {
2655 replace_call_with_value (gsi, integer_zero_node);
2656 return true;
2657 }
2658
d2f8402a
MS
2659 /* Initially set to the number of characters, including the terminating
2660 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2661 the array Sx is not terminated by a nul.
2662 For nul-terminated strings then adjusted to their length so that
2663 LENx == NULPOSx holds. */
2664 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
866626ef
MS
2665 const char *p1 = getbyterep (str1, &len1);
2666 const char *p2 = getbyterep (str2, &len2);
d2f8402a
MS
2667
2668 /* The position of the terminating nul character if one exists, otherwise
2669 a value greater than LENx. */
2670 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2671
2672 if (p1)
2673 {
2674 size_t n = strnlen (p1, len1);
2675 if (n < len1)
2676 len1 = nulpos1 = n;
2677 }
2678
2679 if (p2)
2680 {
2681 size_t n = strnlen (p2, len2);
2682 if (n < len2)
2683 len2 = nulpos2 = n;
2684 }
a918bfbf
ML
2685
2686 /* For known strings, return an immediate value. */
2687 if (p1 && p2)
2688 {
2689 int r = 0;
2690 bool known_result = false;
2691
2692 switch (fcode)
2693 {
2694 case BUILT_IN_STRCMP:
8b0b334a 2695 case BUILT_IN_STRCMP_EQ:
d2f8402a 2696 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2697 break;
d2f8402a
MS
2698
2699 r = strcmp (p1, p2);
2700 known_result = true;
2701 break;
2702
a918bfbf 2703 case BUILT_IN_STRNCMP:
8b0b334a 2704 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2705 {
d86d8b35
MS
2706 if (bound == HOST_WIDE_INT_M1U)
2707 break;
2708
d2f8402a
MS
2709 /* Reduce the bound to be no more than the length
2710 of the shorter of the two strings, or the sizes
2711 of the unterminated arrays. */
2712 unsigned HOST_WIDE_INT n = bound;
2713
2714 if (len1 == nulpos1 && len1 < n)
2715 n = len1 + 1;
2716 if (len2 == nulpos2 && len2 < n)
2717 n = len2 + 1;
2718
2719 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2720 break;
d2f8402a
MS
2721
2722 r = strncmp (p1, p2, n);
a918bfbf
ML
2723 known_result = true;
2724 break;
2725 }
2726 /* Only handleable situation is where the string are equal (result 0),
2727 which is already handled by operand_equal_p case. */
2728 case BUILT_IN_STRCASECMP:
2729 break;
2730 case BUILT_IN_STRNCASECMP:
2731 {
d2f8402a 2732 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2733 break;
d2f8402a 2734 r = strncmp (p1, p2, bound);
a918bfbf
ML
2735 if (r == 0)
2736 known_result = true;
5de73c05 2737 break;
a918bfbf
ML
2738 }
2739 default:
2740 gcc_unreachable ();
2741 }
2742
2743 if (known_result)
2744 {
2745 replace_call_with_value (gsi, build_cmp_result (type, r));
2746 return true;
2747 }
2748 }
2749
d2f8402a 2750 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2751 || fcode == BUILT_IN_STRCMP
8b0b334a 2752 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2753 || fcode == BUILT_IN_STRCASECMP;
2754
2755 location_t loc = gimple_location (stmt);
2756
2757 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2758 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2759 {
2760 gimple_seq stmts = NULL;
2761 tree var = gimple_load_first_char (loc, str1, &stmts);
2762 if (lhs)
2763 {
2764 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2765 gimple_seq_add_stmt_without_update (&stmts, stmt);
2766 }
2767
2768 gsi_replace_with_seq_vops (gsi, stmts);
2769 return true;
2770 }
2771
2772 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2773 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2774 {
2775 gimple_seq stmts = NULL;
2776 tree var = gimple_load_first_char (loc, str2, &stmts);
2777
2778 if (lhs)
2779 {
2780 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2781 stmt = gimple_build_assign (c, NOP_EXPR, var);
2782 gimple_seq_add_stmt_without_update (&stmts, stmt);
2783
2784 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2785 gimple_seq_add_stmt_without_update (&stmts, stmt);
2786 }
2787
2788 gsi_replace_with_seq_vops (gsi, stmts);
2789 return true;
2790 }
2791
d2f8402a 2792 /* If BOUND is one, return an expression corresponding to
a918bfbf 2793 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2794 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2795 {
2796 gimple_seq stmts = NULL;
2797 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2798 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2799
2800 if (lhs)
2801 {
2802 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2803 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2804 gimple_seq_add_stmt_without_update (&stmts, convert1);
2805
2806 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2807 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2808 gimple_seq_add_stmt_without_update (&stmts, convert2);
2809
2810 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2811 gimple_seq_add_stmt_without_update (&stmts, stmt);
2812 }
2813
2814 gsi_replace_with_seq_vops (gsi, stmts);
2815 return true;
2816 }
2817
d2f8402a
MS
2818 /* If BOUND is greater than the length of one constant string,
2819 and the other argument is also a nul-terminated string, replace
2820 strncmp with strcmp. */
2821 if (fcode == BUILT_IN_STRNCMP
2822 && bound > 0 && bound < HOST_WIDE_INT_M1U
2823 && ((p2 && len2 < bound && len2 == nulpos2)
2824 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2825 {
2826 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2827 if (!fn)
2828 return false;
2829 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2830 replace_call_with_call_and_fold (gsi, repl);
2831 return true;
2832 }
2833
a918bfbf
ML
2834 return false;
2835}
2836
488c6247
ML
2837/* Fold a call to the memchr pointed by GSI iterator. */
2838
2839static bool
2840gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2841{
2842 gimple *stmt = gsi_stmt (*gsi);
2843 tree lhs = gimple_call_lhs (stmt);
2844 tree arg1 = gimple_call_arg (stmt, 0);
2845 tree arg2 = gimple_call_arg (stmt, 1);
2846 tree len = gimple_call_arg (stmt, 2);
2847
2848 /* If the LEN parameter is zero, return zero. */
2849 if (integer_zerop (len))
2850 {
2851 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2852 return true;
2853 }
2854
2855 char c;
2856 if (TREE_CODE (arg2) != INTEGER_CST
2857 || !tree_fits_uhwi_p (len)
2858 || !target_char_cst_p (arg2, &c))
2859 return false;
2860
2861 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2862 unsigned HOST_WIDE_INT string_length;
866626ef 2863 const char *p1 = getbyterep (arg1, &string_length);
488c6247
ML
2864
2865 if (p1)
2866 {
2867 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2868 if (r == NULL)
2869 {
5fd336bb 2870 tree mem_size, offset_node;
bb04901d 2871 byte_representation (arg1, &offset_node, &mem_size, NULL);
5fd336bb
JM
2872 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2873 ? 0 : tree_to_uhwi (offset_node);
2874 /* MEM_SIZE is the size of the array the string literal
2875 is stored in. */
2876 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2877 gcc_checking_assert (string_length <= string_size);
2878 if (length <= string_size)
488c6247
ML
2879 {
2880 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2881 return true;
2882 }
2883 }
2884 else
2885 {
2886 unsigned HOST_WIDE_INT offset = r - p1;
2887 gimple_seq stmts = NULL;
2888 if (lhs != NULL_TREE)
2889 {
aec2d684 2890 tree offset_cst = build_int_cst (sizetype, offset);
488c6247
ML
2891 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2892 arg1, offset_cst);
2893 gimple_seq_add_stmt_without_update (&stmts, stmt);
2894 }
2895 else
2896 gimple_seq_add_stmt_without_update (&stmts,
2897 gimple_build_nop ());
2898
2899 gsi_replace_with_seq_vops (gsi, stmts);
2900 return true;
2901 }
2902 }
2903
2904 return false;
2905}
a918bfbf 2906
fef5a0d9
RB
2907/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2908 to the call. IGNORE is true if the value returned
2909 by the builtin will be ignored. UNLOCKED is true is true if this
2910 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2911 the known length of the string. Return NULL_TREE if no simplification
2912 was possible. */
2913
2914static bool
2915gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2916 tree arg0, tree arg1,
dcb7fae2 2917 bool unlocked)
fef5a0d9 2918{
355fe088 2919 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2920
fef5a0d9
RB
2921 /* If we're using an unlocked function, assume the other unlocked
2922 functions exist explicitly. */
2923 tree const fn_fputc = (unlocked
2924 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2925 : builtin_decl_implicit (BUILT_IN_FPUTC));
2926 tree const fn_fwrite = (unlocked
2927 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2928 : builtin_decl_implicit (BUILT_IN_FWRITE));
2929
2930 /* If the return value is used, don't do the transformation. */
dcb7fae2 2931 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2932 return false;
2933
fef5a0d9
RB
2934 /* Get the length of the string passed to fputs. If the length
2935 can't be determined, punt. */
598f7235 2936 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2937 if (!len
2938 || TREE_CODE (len) != INTEGER_CST)
2939 return false;
2940
2941 switch (compare_tree_int (len, 1))
2942 {
2943 case -1: /* length is 0, delete the call entirely . */
2944 replace_call_with_value (gsi, integer_zero_node);
2945 return true;
2946
2947 case 0: /* length is 1, call fputc. */
2948 {
2949 const char *p = c_getstr (arg0);
2950 if (p != NULL)
2951 {
2952 if (!fn_fputc)
2953 return false;
2954
355fe088 2955 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2956 build_int_cst
2957 (integer_type_node, p[0]), arg1);
2958 replace_call_with_call_and_fold (gsi, repl);
2959 return true;
2960 }
2961 }
2962 /* FALLTHROUGH */
2963 case 1: /* length is greater than 1, call fwrite. */
2964 {
2965 /* If optimizing for size keep fputs. */
2966 if (optimize_function_for_size_p (cfun))
2967 return false;
2968 /* New argument list transforming fputs(string, stream) to
2969 fwrite(string, 1, len, stream). */
2970 if (!fn_fwrite)
2971 return false;
2972
355fe088 2973 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2974 size_one_node, len, arg1);
2975 replace_call_with_call_and_fold (gsi, repl);
2976 return true;
2977 }
2978 default:
2979 gcc_unreachable ();
2980 }
2981 return false;
2982}
2983
2984/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2985 DEST, SRC, LEN, and SIZE are the arguments to the call.
2986 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2987 code of the builtin. If MAXLEN is not NULL, it is maximum length
2988 passed as third argument. */
2989
2990static bool
2991gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2992 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2993 enum built_in_function fcode)
2994{
355fe088 2995 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2996 location_t loc = gimple_location (stmt);
2997 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2998 tree fn;
2999
3000 /* If SRC and DEST are the same (and not volatile), return DEST
3001 (resp. DEST+LEN for __mempcpy_chk). */
3002 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3003 {
3004 if (fcode != BUILT_IN_MEMPCPY_CHK)
3005 {
3006 replace_call_with_value (gsi, dest);
3007 return true;
3008 }
3009 else
3010 {
74e3c262
RB
3011 gimple_seq stmts = NULL;
3012 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
3013 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3014 TREE_TYPE (dest), dest, len);
74e3c262 3015 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
3016 replace_call_with_value (gsi, temp);
3017 return true;
3018 }
3019 }
3020
3021 if (! tree_fits_uhwi_p (size))
3022 return false;
3023
598f7235 3024 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
3025 if (! integer_all_onesp (size))
3026 {
3027 if (! tree_fits_uhwi_p (len))
3028 {
3029 /* If LEN is not constant, try MAXLEN too.
3030 For MAXLEN only allow optimizing into non-_ocs function
3031 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3032 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3033 {
3034 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3035 {
3036 /* (void) __mempcpy_chk () can be optimized into
3037 (void) __memcpy_chk (). */
3038 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3039 if (!fn)
3040 return false;
3041
355fe088 3042 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3043 replace_call_with_call_and_fold (gsi, repl);
3044 return true;
3045 }
3046 return false;
3047 }
3048 }
3049 else
3050 maxlen = len;
3051
3052 if (tree_int_cst_lt (size, maxlen))
3053 return false;
3054 }
3055
3056 fn = NULL_TREE;
3057 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3058 mem{cpy,pcpy,move,set} is available. */
3059 switch (fcode)
3060 {
3061 case BUILT_IN_MEMCPY_CHK:
3062 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3063 break;
3064 case BUILT_IN_MEMPCPY_CHK:
3065 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3066 break;
3067 case BUILT_IN_MEMMOVE_CHK:
3068 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3069 break;
3070 case BUILT_IN_MEMSET_CHK:
3071 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3072 break;
3073 default:
3074 break;
3075 }
3076
3077 if (!fn)
3078 return false;
3079
355fe088 3080 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3081 replace_call_with_call_and_fold (gsi, repl);
3082 return true;
3083}
3084
3085/* Fold a call to the __st[rp]cpy_chk builtin.
3086 DEST, SRC, and SIZE are the arguments to the call.
3087 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3088 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3089 strings passed as second argument. */
3090
3091static bool
3092gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3093 tree dest,
fef5a0d9 3094 tree src, tree size,
fef5a0d9
RB
3095 enum built_in_function fcode)
3096{
355fe088 3097 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
3098 location_t loc = gimple_location (stmt);
3099 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3100 tree len, fn;
3101
3102 /* If SRC and DEST are the same (and not volatile), return DEST. */
3103 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3104 {
8cd95cec
MS
3105 /* Issue -Wrestrict unless the pointers are null (those do
3106 not point to objects and so do not indicate an overlap;
3107 such calls could be the result of sanitization and jump
3108 threading). */
e9e2bad7
MS
3109 if (!integer_zerop (dest)
3110 && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
3111 {
3112 tree func = gimple_call_fndecl (stmt);
cc8bea0a 3113
e9b9fa4c
MS
3114 warning_at (loc, OPT_Wrestrict,
3115 "%qD source argument is the same as destination",
3116 func);
3117 }
cc8bea0a 3118
fef5a0d9
RB
3119 replace_call_with_value (gsi, dest);
3120 return true;
3121 }
3122
3123 if (! tree_fits_uhwi_p (size))
3124 return false;
3125
598f7235 3126 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
3127 if (! integer_all_onesp (size))
3128 {
3129 len = c_strlen (src, 1);
3130 if (! len || ! tree_fits_uhwi_p (len))
3131 {
3132 /* If LEN is not constant, try MAXLEN too.
3133 For MAXLEN only allow optimizing into non-_ocs function
3134 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3135 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3136 {
3137 if (fcode == BUILT_IN_STPCPY_CHK)
3138 {
3139 if (! ignore)
3140 return false;
3141
3142 /* If return value of __stpcpy_chk is ignored,
3143 optimize into __strcpy_chk. */
3144 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3145 if (!fn)
3146 return false;
3147
355fe088 3148 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
3149 replace_call_with_call_and_fold (gsi, repl);
3150 return true;
3151 }
3152
3153 if (! len || TREE_SIDE_EFFECTS (len))
3154 return false;
3155
3156 /* If c_strlen returned something, but not a constant,
3157 transform __strcpy_chk into __memcpy_chk. */
3158 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3159 if (!fn)
3160 return false;
3161
74e3c262 3162 gimple_seq stmts = NULL;
770fe3a3 3163 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
3164 len = gimple_convert (&stmts, loc, size_type_node, len);
3165 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3166 build_int_cst (size_type_node, 1));
3167 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 3168 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3169 replace_call_with_call_and_fold (gsi, repl);
3170 return true;
3171 }
e256dfce 3172 }
fef5a0d9
RB
3173 else
3174 maxlen = len;
3175
3176 if (! tree_int_cst_lt (maxlen, size))
3177 return false;
e256dfce
RG
3178 }
3179
fef5a0d9
RB
3180 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3181 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3182 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3183 if (!fn)
3184 return false;
3185
355fe088 3186 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
3187 replace_call_with_call_and_fold (gsi, repl);
3188 return true;
3189}
3190
3191/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3192 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3193 length passed as third argument. IGNORE is true if return value can be
3194 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3195
3196static bool
3197gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3198 tree dest, tree src,
dcb7fae2 3199 tree len, tree size,
fef5a0d9
RB
3200 enum built_in_function fcode)
3201{
355fe088 3202 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3203 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3204 tree fn;
3205
3206 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 3207 {
fef5a0d9
RB
3208 /* If return value of __stpncpy_chk is ignored,
3209 optimize into __strncpy_chk. */
3210 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3211 if (fn)
3212 {
355fe088 3213 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3214 replace_call_with_call_and_fold (gsi, repl);
3215 return true;
3216 }
cbdd87d4
RG
3217 }
3218
fef5a0d9
RB
3219 if (! tree_fits_uhwi_p (size))
3220 return false;
3221
598f7235 3222 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3223 if (! integer_all_onesp (size))
cbdd87d4 3224 {
fef5a0d9 3225 if (! tree_fits_uhwi_p (len))
fe2ef088 3226 {
fef5a0d9
RB
3227 /* If LEN is not constant, try MAXLEN too.
3228 For MAXLEN only allow optimizing into non-_ocs function
3229 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3230 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3231 return false;
8a1561bc 3232 }
fef5a0d9
RB
3233 else
3234 maxlen = len;
3235
3236 if (tree_int_cst_lt (size, maxlen))
3237 return false;
cbdd87d4
RG
3238 }
3239
fef5a0d9
RB
3240 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3241 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3242 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3243 if (!fn)
3244 return false;
3245
355fe088 3246 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3247 replace_call_with_call_and_fold (gsi, repl);
3248 return true;
cbdd87d4
RG
3249}
3250
2625bb5d
RB
3251/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3252 Return NULL_TREE if no simplification can be made. */
3253
3254static bool
3255gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3256{
3257 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3258 location_t loc = gimple_location (stmt);
3259 tree dest = gimple_call_arg (stmt, 0);
3260 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3261 tree fn, lenp1;
2625bb5d
RB
3262
3263 /* If the result is unused, replace stpcpy with strcpy. */
3264 if (gimple_call_lhs (stmt) == NULL_TREE)
3265 {
3266 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3267 if (!fn)
3268 return false;
3269 gimple_call_set_fndecl (stmt, fn);
3270 fold_stmt (gsi);
3271 return true;
3272 }
3273
01b0acb7 3274 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3275 c_strlen_data data = { };
d14c547a
MS
3276 /* The size of the unterminated array if SRC referes to one. */
3277 tree size;
3278 /* True if the size is exact/constant, false if it's the lower bound
3279 of a range. */
3280 bool exact;
7d583f42 3281 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3282 if (!len
3283 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3284 {
d14c547a 3285 data.decl = unterminated_array (src, &size, &exact);
7d583f42 3286 if (!data.decl)
01b0acb7
MS
3287 return false;
3288 }
3289
7d583f42 3290 if (data.decl)
01b0acb7
MS
3291 {
3292 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 3293 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
81d6cdd3 3294 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
d14c547a 3295 exact);
e9e2bad7 3296 suppress_warning (stmt, OPT_Wstringop_overread);
01b0acb7
MS
3297 return false;
3298 }
2625bb5d
RB
3299
3300 if (optimize_function_for_size_p (cfun)
3301 /* If length is zero it's small enough. */
3302 && !integer_zerop (len))
3303 return false;
3304
3305 /* If the source has a known length replace stpcpy with memcpy. */
3306 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3307 if (!fn)
3308 return false;
3309
3310 gimple_seq stmts = NULL;
3311 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3312 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3313 tem, build_int_cst (size_type_node, 1));
3314 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3315 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3316 gimple_move_vops (repl, stmt);
2625bb5d
RB
3317 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3318 /* Replace the result with dest + len. */
3319 stmts = NULL;
3320 tem = gimple_convert (&stmts, loc, sizetype, len);
3321 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3322 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3323 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3324 gsi_replace (gsi, ret, false);
2625bb5d
RB
3325 /* Finally fold the memcpy call. */
3326 gimple_stmt_iterator gsi2 = *gsi;
3327 gsi_prev (&gsi2);
3328 fold_stmt (&gsi2);
3329 return true;
3330}
3331
fef5a0d9
RB
3332/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3333 NULL_TREE if a normal call should be emitted rather than expanding
3334 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3335 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3336 passed as second argument. */
cbdd87d4
RG
3337
3338static bool
fef5a0d9 3339gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3340 enum built_in_function fcode)
cbdd87d4 3341{
538dd0b7 3342 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3343 tree dest, size, len, fn, fmt, flag;
3344 const char *fmt_str;
cbdd87d4 3345
fef5a0d9
RB
3346 /* Verify the required arguments in the original call. */
3347 if (gimple_call_num_args (stmt) < 5)
3348 return false;
cbdd87d4 3349
fef5a0d9
RB
3350 dest = gimple_call_arg (stmt, 0);
3351 len = gimple_call_arg (stmt, 1);
3352 flag = gimple_call_arg (stmt, 2);
3353 size = gimple_call_arg (stmt, 3);
3354 fmt = gimple_call_arg (stmt, 4);
3355
3356 if (! tree_fits_uhwi_p (size))
3357 return false;
3358
3359 if (! integer_all_onesp (size))
3360 {
598f7235 3361 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3362 if (! tree_fits_uhwi_p (len))
cbdd87d4 3363 {
fef5a0d9
RB
3364 /* If LEN is not constant, try MAXLEN too.
3365 For MAXLEN only allow optimizing into non-_ocs function
3366 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3367 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3368 return false;
3369 }
3370 else
fef5a0d9 3371 maxlen = len;
cbdd87d4 3372
fef5a0d9
RB
3373 if (tree_int_cst_lt (size, maxlen))
3374 return false;
3375 }
cbdd87d4 3376
fef5a0d9
RB
3377 if (!init_target_chars ())
3378 return false;
cbdd87d4 3379
fef5a0d9
RB
3380 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3381 or if format doesn't contain % chars or is "%s". */
3382 if (! integer_zerop (flag))
3383 {
3384 fmt_str = c_getstr (fmt);
3385 if (fmt_str == NULL)
3386 return false;
3387 if (strchr (fmt_str, target_percent) != NULL
3388 && strcmp (fmt_str, target_percent_s))
3389 return false;
cbdd87d4
RG
3390 }
3391
fef5a0d9
RB
3392 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3393 available. */
3394 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3395 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3396 if (!fn)
491e0b9b
RG
3397 return false;
3398
fef5a0d9
RB
3399 /* Replace the called function and the first 5 argument by 3 retaining
3400 trailing varargs. */
3401 gimple_call_set_fndecl (stmt, fn);
3402 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3403 gimple_call_set_arg (stmt, 0, dest);
3404 gimple_call_set_arg (stmt, 1, len);
3405 gimple_call_set_arg (stmt, 2, fmt);
3406 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3407 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3408 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3409 fold_stmt (gsi);
3410 return true;
3411}
cbdd87d4 3412
fef5a0d9
RB
3413/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3414 Return NULL_TREE if a normal call should be emitted rather than
3415 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3416 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3417
fef5a0d9
RB
3418static bool
3419gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3420 enum built_in_function fcode)
3421{
538dd0b7 3422 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3423 tree dest, size, len, fn, fmt, flag;
3424 const char *fmt_str;
3425 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3426
fef5a0d9
RB
3427 /* Verify the required arguments in the original call. */
3428 if (nargs < 4)
3429 return false;
3430 dest = gimple_call_arg (stmt, 0);
3431 flag = gimple_call_arg (stmt, 1);
3432 size = gimple_call_arg (stmt, 2);
3433 fmt = gimple_call_arg (stmt, 3);
3434
3435 if (! tree_fits_uhwi_p (size))
3436 return false;
3437
3438 len = NULL_TREE;
3439
3440 if (!init_target_chars ())
3441 return false;
3442
3443 /* Check whether the format is a literal string constant. */
3444 fmt_str = c_getstr (fmt);
3445 if (fmt_str != NULL)
3446 {
3447 /* If the format doesn't contain % args or %%, we know the size. */
3448 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3449 {
fef5a0d9
RB
3450 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3451 len = build_int_cstu (size_type_node, strlen (fmt_str));
3452 }
3453 /* If the format is "%s" and first ... argument is a string literal,
3454 we know the size too. */
3455 else if (fcode == BUILT_IN_SPRINTF_CHK
3456 && strcmp (fmt_str, target_percent_s) == 0)
3457 {
3458 tree arg;
cbdd87d4 3459
fef5a0d9
RB
3460 if (nargs == 5)
3461 {
3462 arg = gimple_call_arg (stmt, 4);
3463 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3464 {
3465 len = c_strlen (arg, 1);
3466 if (! len || ! tree_fits_uhwi_p (len))
3467 len = NULL_TREE;
3468 }
3469 }
3470 }
3471 }
cbdd87d4 3472
fef5a0d9
RB
3473 if (! integer_all_onesp (size))
3474 {
3475 if (! len || ! tree_int_cst_lt (len, size))
3476 return false;
3477 }
cbdd87d4 3478
fef5a0d9
RB
3479 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3480 or if format doesn't contain % chars or is "%s". */
3481 if (! integer_zerop (flag))
3482 {
3483 if (fmt_str == NULL)
3484 return false;
3485 if (strchr (fmt_str, target_percent) != NULL
3486 && strcmp (fmt_str, target_percent_s))
3487 return false;
3488 }
cbdd87d4 3489
fef5a0d9
RB
3490 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3491 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3492 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3493 if (!fn)
3494 return false;
3495
3496 /* Replace the called function and the first 4 argument by 2 retaining
3497 trailing varargs. */
3498 gimple_call_set_fndecl (stmt, fn);
3499 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3500 gimple_call_set_arg (stmt, 0, dest);
3501 gimple_call_set_arg (stmt, 1, fmt);
3502 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3503 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3504 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3505 fold_stmt (gsi);
3506 return true;
3507}
3508
35770bb2
RB
3509/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3510 ORIG may be null if this is a 2-argument call. We don't attempt to
3511 simplify calls with more than 3 arguments.
3512
a104bd88 3513 Return true if simplification was possible, otherwise false. */
35770bb2 3514
a104bd88 3515bool
dcb7fae2 3516gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3517{
355fe088 3518 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3519
3520 /* Verify the required arguments in the original call. We deal with two
3521 types of sprintf() calls: 'sprintf (str, fmt)' and
3522 'sprintf (dest, "%s", orig)'. */
3523 if (gimple_call_num_args (stmt) > 3)
3524 return false;
3525
9816f509 3526 tree orig = NULL_TREE;
35770bb2
RB
3527 if (gimple_call_num_args (stmt) == 3)
3528 orig = gimple_call_arg (stmt, 2);
3529
3530 /* Check whether the format is a literal string constant. */
9816f509
MS
3531 tree fmt = gimple_call_arg (stmt, 1);
3532 const char *fmt_str = c_getstr (fmt);
35770bb2
RB
3533 if (fmt_str == NULL)
3534 return false;
3535
9816f509
MS
3536 tree dest = gimple_call_arg (stmt, 0);
3537
35770bb2
RB
3538 if (!init_target_chars ())
3539 return false;
3540
9816f509
MS
3541 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3542 if (!fn)
3543 return false;
3544
35770bb2
RB
3545 /* If the format doesn't contain % args or %%, use strcpy. */
3546 if (strchr (fmt_str, target_percent) == NULL)
3547 {
35770bb2
RB
3548 /* Don't optimize sprintf (buf, "abc", ptr++). */
3549 if (orig)
3550 return false;
3551
3552 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3553 'format' is known to contain no % formats. */
3554 gimple_seq stmts = NULL;
355fe088 3555 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3556
3557 /* Propagate the NO_WARNING bit to avoid issuing the same
3558 warning more than once. */
e9e2bad7 3559 copy_warning (repl, stmt);
01b0acb7 3560
35770bb2 3561 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3562 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3563 {
a73468e8
JJ
3564 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3565 strlen (fmt_str)));
35770bb2
RB
3566 gimple_seq_add_stmt_without_update (&stmts, repl);
3567 gsi_replace_with_seq_vops (gsi, stmts);
3568 /* gsi now points at the assignment to the lhs, get a
3569 stmt iterator to the memcpy call.
3570 ??? We can't use gsi_for_stmt as that doesn't work when the
3571 CFG isn't built yet. */
3572 gimple_stmt_iterator gsi2 = *gsi;
3573 gsi_prev (&gsi2);
3574 fold_stmt (&gsi2);
3575 }
3576 else
3577 {
3578 gsi_replace_with_seq_vops (gsi, stmts);
3579 fold_stmt (gsi);
3580 }
3581 return true;
3582 }
3583
3584 /* If the format is "%s", use strcpy if the result isn't used. */
3585 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3586 {
35770bb2
RB
3587 /* Don't crash on sprintf (str1, "%s"). */
3588 if (!orig)
3589 return false;
3590
9816f509
MS
3591 /* Don't fold calls with source arguments of invalid (nonpointer)
3592 types. */
3593 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3594 return false;
3595
dcb7fae2
RB
3596 tree orig_len = NULL_TREE;
3597 if (gimple_call_lhs (stmt))
35770bb2 3598 {
598f7235 3599 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3600 if (!orig_len)
35770bb2
RB
3601 return false;
3602 }
3603
3604 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3605 gimple_seq stmts = NULL;
355fe088 3606 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3607
3608 /* Propagate the NO_WARNING bit to avoid issuing the same
3609 warning more than once. */
e9e2bad7 3610 copy_warning (repl, stmt);
01b0acb7 3611
35770bb2 3612 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3613 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3614 {
a73468e8 3615 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3616 TREE_TYPE (orig_len)))
a73468e8
JJ
3617 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3618 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3619 gimple_seq_add_stmt_without_update (&stmts, repl);
3620 gsi_replace_with_seq_vops (gsi, stmts);
3621 /* gsi now points at the assignment to the lhs, get a
3622 stmt iterator to the memcpy call.
3623 ??? We can't use gsi_for_stmt as that doesn't work when the
3624 CFG isn't built yet. */
3625 gimple_stmt_iterator gsi2 = *gsi;
3626 gsi_prev (&gsi2);
3627 fold_stmt (&gsi2);
3628 }
3629 else
3630 {
3631 gsi_replace_with_seq_vops (gsi, stmts);
3632 fold_stmt (gsi);
3633 }
3634 return true;
3635 }
3636 return false;
3637}
3638
d7e78447
RB
3639/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3640 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3641 attempt to simplify calls with more than 4 arguments.
35770bb2 3642
a104bd88 3643 Return true if simplification was possible, otherwise false. */
d7e78447 3644
a104bd88 3645bool
dcb7fae2 3646gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3647{
538dd0b7 3648 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3649 tree dest = gimple_call_arg (stmt, 0);
3650 tree destsize = gimple_call_arg (stmt, 1);
3651 tree fmt = gimple_call_arg (stmt, 2);
3652 tree orig = NULL_TREE;
3653 const char *fmt_str = NULL;
3654
3655 if (gimple_call_num_args (stmt) > 4)
3656 return false;
3657
3658 if (gimple_call_num_args (stmt) == 4)
3659 orig = gimple_call_arg (stmt, 3);
3660
3661 if (!tree_fits_uhwi_p (destsize))
3662 return false;
3663 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3664
3665 /* Check whether the format is a literal string constant. */
3666 fmt_str = c_getstr (fmt);
3667 if (fmt_str == NULL)
3668 return false;
3669
3670 if (!init_target_chars ())
3671 return false;
3672
3673 /* If the format doesn't contain % args or %%, use strcpy. */
3674 if (strchr (fmt_str, target_percent) == NULL)
3675 {
3676 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3677 if (!fn)
3678 return false;
3679
3680 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3681 if (orig)
3682 return false;
3683
3684 /* We could expand this as
3685 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3686 or to
3687 memcpy (str, fmt_with_nul_at_cstm1, cst);
3688 but in the former case that might increase code size
3689 and in the latter case grow .rodata section too much.
3690 So punt for now. */
3691 size_t len = strlen (fmt_str);
3692 if (len >= destlen)
3693 return false;
3694
3695 gimple_seq stmts = NULL;
355fe088 3696 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3697 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3698 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3699 {
a73468e8
JJ
3700 repl = gimple_build_assign (lhs,
3701 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3702 gimple_seq_add_stmt_without_update (&stmts, repl);
3703 gsi_replace_with_seq_vops (gsi, stmts);
3704 /* gsi now points at the assignment to the lhs, get a
3705 stmt iterator to the memcpy call.
3706 ??? We can't use gsi_for_stmt as that doesn't work when the
3707 CFG isn't built yet. */
3708 gimple_stmt_iterator gsi2 = *gsi;
3709 gsi_prev (&gsi2);
3710 fold_stmt (&gsi2);
3711 }
3712 else
3713 {
3714 gsi_replace_with_seq_vops (gsi, stmts);
3715 fold_stmt (gsi);
3716 }
3717 return true;
3718 }
3719
3720 /* If the format is "%s", use strcpy if the result isn't used. */
3721 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3722 {
3723 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3724 if (!fn)
3725 return false;
3726
3727 /* Don't crash on snprintf (str1, cst, "%s"). */
3728 if (!orig)
3729 return false;
3730
598f7235 3731 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3732 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3733 return false;
d7e78447
RB
3734
3735 /* We could expand this as
3736 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3737 or to
3738 memcpy (str1, str2_with_nul_at_cstm1, cst);
3739 but in the former case that might increase code size
3740 and in the latter case grow .rodata section too much.
3741 So punt for now. */
3742 if (compare_tree_int (orig_len, destlen) >= 0)
3743 return false;
3744
3745 /* Convert snprintf (str1, cst, "%s", str2) into
3746 strcpy (str1, str2) if strlen (str2) < cst. */
3747 gimple_seq stmts = NULL;
355fe088 3748 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3749 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3750 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3751 {
a73468e8 3752 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3753 TREE_TYPE (orig_len)))
a73468e8
JJ
3754 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3755 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3756 gimple_seq_add_stmt_without_update (&stmts, repl);
3757 gsi_replace_with_seq_vops (gsi, stmts);
3758 /* gsi now points at the assignment to the lhs, get a
3759 stmt iterator to the memcpy call.
3760 ??? We can't use gsi_for_stmt as that doesn't work when the
3761 CFG isn't built yet. */
3762 gimple_stmt_iterator gsi2 = *gsi;
3763 gsi_prev (&gsi2);
3764 fold_stmt (&gsi2);
3765 }
3766 else
3767 {
3768 gsi_replace_with_seq_vops (gsi, stmts);
3769 fold_stmt (gsi);
3770 }
3771 return true;
3772 }
3773 return false;
3774}
35770bb2 3775
edd7ae68
RB
3776/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3777 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3778 more than 3 arguments, and ARG may be null in the 2-argument case.
3779
3780 Return NULL_TREE if no simplification was possible, otherwise return the
3781 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3782 code of the function to be simplified. */
3783
3784static bool
3785gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3786 tree fp, tree fmt, tree arg,
3787 enum built_in_function fcode)
3788{
3789 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3790 tree fn_fputc, fn_fputs;
3791 const char *fmt_str = NULL;
3792
3793 /* If the return value is used, don't do the transformation. */
3794 if (gimple_call_lhs (stmt) != NULL_TREE)
3795 return false;
3796
3797 /* Check whether the format is a literal string constant. */
3798 fmt_str = c_getstr (fmt);
3799 if (fmt_str == NULL)
3800 return false;
3801
3802 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3803 {
3804 /* If we're using an unlocked function, assume the other
3805 unlocked functions exist explicitly. */
3806 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3807 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3808 }
3809 else
3810 {
3811 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3812 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3813 }
3814
3815 if (!init_target_chars ())
3816 return false;
3817
3818 /* If the format doesn't contain % args or %%, use strcpy. */
3819 if (strchr (fmt_str, target_percent) == NULL)
3820 {
3821 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3822 && arg)
3823 return false;
3824
3825 /* If the format specifier was "", fprintf does nothing. */
3826 if (fmt_str[0] == '\0')
3827 {
3828 replace_call_with_value (gsi, NULL_TREE);
3829 return true;
3830 }
3831
3832 /* When "string" doesn't contain %, replace all cases of
3833 fprintf (fp, string) with fputs (string, fp). The fputs
3834 builtin will take care of special cases like length == 1. */
3835 if (fn_fputs)
3836 {
3837 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3838 replace_call_with_call_and_fold (gsi, repl);
3839 return true;
3840 }
3841 }
3842
3843 /* The other optimizations can be done only on the non-va_list variants. */
3844 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3845 return false;
3846
3847 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3848 else if (strcmp (fmt_str, target_percent_s) == 0)
3849 {
3850 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3851 return false;
3852 if (fn_fputs)
3853 {
3854 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3855 replace_call_with_call_and_fold (gsi, repl);
3856 return true;
3857 }
3858 }
3859
3860 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3861 else if (strcmp (fmt_str, target_percent_c) == 0)
3862 {
3863 if (!arg
3864 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3865 return false;
3866 if (fn_fputc)
3867 {
3868 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3869 replace_call_with_call_and_fold (gsi, repl);
3870 return true;
3871 }
3872 }
3873
3874 return false;
3875}
3876
ad03a744
RB
3877/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3878 FMT and ARG are the arguments to the call; we don't fold cases with
3879 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3880
3881 Return NULL_TREE if no simplification was possible, otherwise return the
3882 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3883 code of the function to be simplified. */
3884
3885static bool
3886gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3887 tree arg, enum built_in_function fcode)
3888{
3889 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3890 tree fn_putchar, fn_puts, newarg;
3891 const char *fmt_str = NULL;
3892
3893 /* If the return value is used, don't do the transformation. */
3894 if (gimple_call_lhs (stmt) != NULL_TREE)
3895 return false;
3896
3897 /* Check whether the format is a literal string constant. */
3898 fmt_str = c_getstr (fmt);
3899 if (fmt_str == NULL)
3900 return false;
3901
3902 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3903 {
3904 /* If we're using an unlocked function, assume the other
3905 unlocked functions exist explicitly. */
3906 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3907 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3908 }
3909 else
3910 {
3911 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3912 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3913 }
3914
3915 if (!init_target_chars ())
3916 return false;
3917
3918 if (strcmp (fmt_str, target_percent_s) == 0
3919 || strchr (fmt_str, target_percent) == NULL)
3920 {
3921 const char *str;
3922
3923 if (strcmp (fmt_str, target_percent_s) == 0)
3924 {
3925 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3926 return false;
3927
3928 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3929 return false;
3930
3931 str = c_getstr (arg);
3932 if (str == NULL)
3933 return false;
3934 }
3935 else
3936 {
3937 /* The format specifier doesn't contain any '%' characters. */
3938 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3939 && arg)
3940 return false;
3941 str = fmt_str;
3942 }
3943
3944 /* If the string was "", printf does nothing. */
3945 if (str[0] == '\0')
3946 {
3947 replace_call_with_value (gsi, NULL_TREE);
3948 return true;
3949 }
3950
3951 /* If the string has length of 1, call putchar. */
3952 if (str[1] == '\0')
3953 {
3954 /* Given printf("c"), (where c is any one character,)
3955 convert "c"[0] to an int and pass that to the replacement
3956 function. */
3957 newarg = build_int_cst (integer_type_node, str[0]);
3958 if (fn_putchar)
3959 {
3960 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3961 replace_call_with_call_and_fold (gsi, repl);
3962 return true;
3963 }
3964 }
3965 else
3966 {
3967 /* If the string was "string\n", call puts("string"). */
3968 size_t len = strlen (str);
3969 if ((unsigned char)str[len - 1] == target_newline
3970 && (size_t) (int) len == len
3971 && (int) len > 0)
3972 {
3973 char *newstr;
ad03a744
RB
3974
3975 /* Create a NUL-terminated string that's one char shorter
3976 than the original, stripping off the trailing '\n'. */
a353fec4 3977 newstr = xstrdup (str);
ad03a744 3978 newstr[len - 1] = '\0';
a353fec4
BE
3979 newarg = build_string_literal (len, newstr);
3980 free (newstr);
ad03a744
RB
3981 if (fn_puts)
3982 {
3983 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3984 replace_call_with_call_and_fold (gsi, repl);
3985 return true;
3986 }
3987 }
3988 else
3989 /* We'd like to arrange to call fputs(string,stdout) here,
3990 but we need stdout and don't have a way to get it yet. */
3991 return false;
3992 }
3993 }
3994
3995 /* The other optimizations can be done only on the non-va_list variants. */
3996 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3997 return false;
3998
3999 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4000 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4001 {
4002 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4003 return false;
4004 if (fn_puts)
4005 {
4006 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4007 replace_call_with_call_and_fold (gsi, repl);
4008 return true;
4009 }
4010 }
4011
4012 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4013 else if (strcmp (fmt_str, target_percent_c) == 0)
4014 {
4015 if (!arg || ! useless_type_conversion_p (integer_type_node,
4016 TREE_TYPE (arg)))
4017 return false;
4018 if (fn_putchar)
4019 {
4020 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4021 replace_call_with_call_and_fold (gsi, repl);
4022 return true;
4023 }
4024 }
4025
4026 return false;
4027}
4028
edd7ae68 4029
fef5a0d9
RB
4030
4031/* Fold a call to __builtin_strlen with known length LEN. */
4032
4033static bool
dcb7fae2 4034gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 4035{
355fe088 4036 gimple *stmt = gsi_stmt (*gsi);
e08341bb 4037 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
4038
4039 wide_int minlen;
4040 wide_int maxlen;
4041
5d6655eb 4042 c_strlen_data lendata = { };
03c4a945 4043 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
4044 && !lendata.decl
4045 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4046 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
4047 {
4048 /* The range of lengths refers to either a single constant
4049 string or to the longest and shortest constant string
4050 referenced by the argument of the strlen() call, or to
4051 the strings that can possibly be stored in the arrays
4052 the argument refers to. */
5d6655eb
MS
4053 minlen = wi::to_wide (lendata.minlen);
4054 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
4055 }
4056 else
4057 {
4058 unsigned prec = TYPE_PRECISION (sizetype);
4059
4060 minlen = wi::shwi (0, prec);
4061 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4062 }
4063
4064 if (minlen == maxlen)
4065 {
5d6655eb
MS
4066 /* Fold the strlen call to a constant. */
4067 tree type = TREE_TYPE (lendata.minlen);
4068 tree len = force_gimple_operand_gsi (gsi,
4069 wide_int_to_tree (type, minlen),
4070 true, NULL, true, GSI_SAME_STMT);
4071 replace_call_with_value (gsi, len);
c42d0aa0
MS
4072 return true;
4073 }
4074
d4bf6975 4075 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 4076 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 4077 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
4078
4079 return false;
cbdd87d4
RG
4080}
4081
48126138
NS
4082/* Fold a call to __builtin_acc_on_device. */
4083
4084static bool
4085gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4086{
4087 /* Defer folding until we know which compiler we're in. */
4088 if (symtab->state != EXPANSION)
4089 return false;
4090
4091 unsigned val_host = GOMP_DEVICE_HOST;
4092 unsigned val_dev = GOMP_DEVICE_NONE;
4093
4094#ifdef ACCEL_COMPILER
4095 val_host = GOMP_DEVICE_NOT_HOST;
4096 val_dev = ACCEL_COMPILER_acc_device;
4097#endif
4098
4099 location_t loc = gimple_location (gsi_stmt (*gsi));
4100
4101 tree host_eq = make_ssa_name (boolean_type_node);
4102 gimple *host_ass = gimple_build_assign
4103 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4104 gimple_set_location (host_ass, loc);
4105 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4106
4107 tree dev_eq = make_ssa_name (boolean_type_node);
4108 gimple *dev_ass = gimple_build_assign
4109 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4110 gimple_set_location (dev_ass, loc);
4111 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4112
4113 tree result = make_ssa_name (boolean_type_node);
4114 gimple *result_ass = gimple_build_assign
4115 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4116 gimple_set_location (result_ass, loc);
4117 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4118
4119 replace_call_with_value (gsi, result);
4120
4121 return true;
4122}
cbdd87d4 4123
fe75f732
PK
4124/* Fold realloc (0, n) -> malloc (n). */
4125
4126static bool
4127gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4128{
4129 gimple *stmt = gsi_stmt (*gsi);
4130 tree arg = gimple_call_arg (stmt, 0);
4131 tree size = gimple_call_arg (stmt, 1);
4132
4133 if (operand_equal_p (arg, null_pointer_node, 0))
4134 {
4135 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4136 if (fn_malloc)
4137 {
4138 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4139 replace_call_with_call_and_fold (gsi, repl);
4140 return true;
4141 }
4142 }
4143 return false;
4144}
4145
1bea0d0a
JJ
4146/* Number of bytes into which any type but aggregate or vector types
4147 should fit. */
4148static constexpr size_t clear_padding_unit
4149 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4150/* Buffer size on which __builtin_clear_padding folding code works. */
4151static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4152
4153/* Data passed through __builtin_clear_padding folding. */
4154struct clear_padding_struct {
4155 location_t loc;
896048cf
JJ
4156 /* 0 during __builtin_clear_padding folding, nonzero during
4157 clear_type_padding_in_mask. In that case, instead of clearing the
4158 non-padding bits in union_ptr array clear the padding bits in there. */
4159 bool clear_in_mask;
1bea0d0a
JJ
4160 tree base;
4161 tree alias_type;
4162 gimple_stmt_iterator *gsi;
4163 /* Alignment of buf->base + 0. */
4164 unsigned align;
4165 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4166 HOST_WIDE_INT off;
4167 /* Number of padding bytes before buf->off that don't have padding clear
4168 code emitted yet. */
4169 HOST_WIDE_INT padding_bytes;
4170 /* The size of the whole object. Never emit code to touch
4171 buf->base + buf->sz or following bytes. */
4172 HOST_WIDE_INT sz;
4173 /* Number of bytes recorded in buf->buf. */
4174 size_t size;
4175 /* When inside union, instead of emitting code we and bits inside of
4176 the union_ptr array. */
4177 unsigned char *union_ptr;
4178 /* Set bits mean padding bits that need to be cleared by the builtin. */
4179 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4180};
4181
4182/* Emit code to clear padding requested in BUF->buf - set bits
4183 in there stand for padding that should be cleared. FULL is true
4184 if everything from the buffer should be flushed, otherwise
4185 it can leave up to 2 * clear_padding_unit bytes for further
4186 processing. */
4187
4188static void
4189clear_padding_flush (clear_padding_struct *buf, bool full)
4190{
4191 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4192 if (!full && buf->size < 2 * clear_padding_unit)
4193 return;
4194 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4195 size_t end = buf->size;
4196 if (!full)
4197 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4198 * clear_padding_unit);
4199 size_t padding_bytes = buf->padding_bytes;
4200 if (buf->union_ptr)
4201 {
896048cf
JJ
4202 if (buf->clear_in_mask)
4203 {
4204 /* During clear_type_padding_in_mask, clear the padding
4205 bits set in buf->buf in the buf->union_ptr mask. */
4206 for (size_t i = 0; i < end; i++)
4207 {
4208 if (buf->buf[i] == (unsigned char) ~0)
4209 padding_bytes++;
4210 else
4211 {
4212 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4213 0, padding_bytes);
4214 padding_bytes = 0;
4215 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4216 }
4217 }
4218 if (full)
4219 {
4220 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4221 0, padding_bytes);
4222 buf->off = 0;
4223 buf->size = 0;
4224 buf->padding_bytes = 0;
4225 }
4226 else
4227 {
4228 memmove (buf->buf, buf->buf + end, buf->size - end);
4229 buf->off += end;
4230 buf->size -= end;
4231 buf->padding_bytes = padding_bytes;
4232 }
4233 return;
4234 }
1bea0d0a
JJ
4235 /* Inside of a union, instead of emitting any code, instead
4236 clear all bits in the union_ptr buffer that are clear
4237 in buf. Whole padding bytes don't clear anything. */
4238 for (size_t i = 0; i < end; i++)
4239 {
4240 if (buf->buf[i] == (unsigned char) ~0)
4241 padding_bytes++;
4242 else
4243 {
4244 padding_bytes = 0;
4245 buf->union_ptr[buf->off + i] &= buf->buf[i];
4246 }
4247 }
4248 if (full)
4249 {
4250 buf->off = 0;
4251 buf->size = 0;
4252 buf->padding_bytes = 0;
4253 }
4254 else
4255 {
4256 memmove (buf->buf, buf->buf + end, buf->size - end);
4257 buf->off += end;
4258 buf->size -= end;
4259 buf->padding_bytes = padding_bytes;
4260 }
4261 return;
4262 }
4263 size_t wordsize = UNITS_PER_WORD;
4264 for (size_t i = 0; i < end; i += wordsize)
4265 {
4266 size_t nonzero_first = wordsize;
4267 size_t nonzero_last = 0;
4adfcea0
JJ
4268 size_t zero_first = wordsize;
4269 size_t zero_last = 0;
4270 bool all_ones = true, bytes_only = true;
1bea0d0a
JJ
4271 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4272 > (unsigned HOST_WIDE_INT) buf->sz)
4273 {
4274 gcc_assert (wordsize > 1);
4275 wordsize /= 2;
4276 i -= wordsize;
4277 continue;
4278 }
4279 for (size_t j = i; j < i + wordsize && j < end; j++)
4280 {
4281 if (buf->buf[j])
4282 {
4283 if (nonzero_first == wordsize)
4284 {
4285 nonzero_first = j - i;
4286 nonzero_last = j - i;
4287 }
4288 if (nonzero_last != j - i)
4289 all_ones = false;
4290 nonzero_last = j + 1 - i;
4291 }
4adfcea0
JJ
4292 else
4293 {
4294 if (zero_first == wordsize)
4295 zero_first = j - i;
4296 zero_last = j + 1 - i;
4297 }
1bea0d0a 4298 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4adfcea0
JJ
4299 {
4300 all_ones = false;
4301 bytes_only = false;
4302 }
1bea0d0a 4303 }
4adfcea0 4304 size_t padding_end = i;
1bea0d0a
JJ
4305 if (padding_bytes)
4306 {
4307 if (nonzero_first == 0
4308 && nonzero_last == wordsize
4309 && all_ones)
4310 {
4311 /* All bits are padding and we had some padding
4312 before too. Just extend it. */
4313 padding_bytes += wordsize;
4314 continue;
4315 }
1bea0d0a
JJ
4316 if (all_ones && nonzero_first == 0)
4317 {
4318 padding_bytes += nonzero_last;
4319 padding_end += nonzero_last;
4320 nonzero_first = wordsize;
4321 nonzero_last = 0;
4322 }
4adfcea0
JJ
4323 else if (bytes_only && nonzero_first == 0)
4324 {
4325 gcc_assert (zero_first && zero_first != wordsize);
4326 padding_bytes += zero_first;
4327 padding_end += zero_first;
4328 }
4329 tree atype, src;
4330 if (padding_bytes == 1)
4331 {
4332 atype = char_type_node;
4333 src = build_zero_cst (char_type_node);
4334 }
4335 else
4336 {
4337 atype = build_array_type_nelts (char_type_node, padding_bytes);
4338 src = build_constructor (atype, NULL);
4339 }
1bea0d0a
JJ
4340 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4341 build_int_cst (buf->alias_type,
4342 buf->off + padding_end
4343 - padding_bytes));
1bea0d0a
JJ
4344 gimple *g = gimple_build_assign (dst, src);
4345 gimple_set_location (g, buf->loc);
4346 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4347 padding_bytes = 0;
4348 buf->padding_bytes = 0;
4349 }
4350 if (nonzero_first == wordsize)
4351 /* All bits in a word are 0, there are no padding bits. */
4352 continue;
4353 if (all_ones && nonzero_last == wordsize)
4354 {
4355 /* All bits between nonzero_first and end of word are padding
4356 bits, start counting padding_bytes. */
4357 padding_bytes = nonzero_last - nonzero_first;
4358 continue;
4359 }
4adfcea0
JJ
4360 if (bytes_only)
4361 {
4362 /* If bitfields aren't involved in this word, prefer storing
4363 individual bytes or groups of them over performing a RMW
4364 operation on the whole word. */
4365 gcc_assert (i + zero_last <= end);
4366 for (size_t j = padding_end; j < i + zero_last; j++)
4367 {
4368 if (buf->buf[j])
4369 {
4370 size_t k;
4371 for (k = j; k < i + zero_last; k++)
4372 if (buf->buf[k] == 0)
4373 break;
4374 HOST_WIDE_INT off = buf->off + j;
4375 tree atype, src;
4376 if (k - j == 1)
4377 {
4378 atype = char_type_node;
4379 src = build_zero_cst (char_type_node);
4380 }
4381 else
4382 {
4383 atype = build_array_type_nelts (char_type_node, k - j);
4384 src = build_constructor (atype, NULL);
4385 }
4386 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4387 buf->base,
4388 build_int_cst (buf->alias_type, off));
4389 gimple *g = gimple_build_assign (dst, src);
4390 gimple_set_location (g, buf->loc);
4391 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4392 j = k;
4393 }
4394 }
4395 if (nonzero_last == wordsize)
4396 padding_bytes = nonzero_last - zero_last;
4397 continue;
4398 }
1bea0d0a
JJ
4399 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4400 {
4401 if (nonzero_last - nonzero_first <= eltsz
4402 && ((nonzero_first & ~(eltsz - 1))
4403 == ((nonzero_last - 1) & ~(eltsz - 1))))
4404 {
4405 tree type;
4406 if (eltsz == 1)
4407 type = char_type_node;
4408 else
4409 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4410 0);
4411 size_t start = nonzero_first & ~(eltsz - 1);
4412 HOST_WIDE_INT off = buf->off + i + start;
4413 tree atype = type;
4414 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4415 atype = build_aligned_type (type, buf->align);
4416 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4417 build_int_cst (buf->alias_type, off));
4418 tree src;
4419 gimple *g;
4420 if (all_ones
4421 && nonzero_first == start
4422 && nonzero_last == start + eltsz)
4423 src = build_zero_cst (type);
4424 else
4425 {
4426 src = make_ssa_name (type);
4427 g = gimple_build_assign (src, unshare_expr (dst));
4428 gimple_set_location (g, buf->loc);
4429 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4430 tree mask = native_interpret_expr (type,
4431 buf->buf + i + start,
4432 eltsz);
4433 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4434 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4435 tree src_masked = make_ssa_name (type);
4436 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4437 src, mask);
4438 gimple_set_location (g, buf->loc);
4439 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4440 src = src_masked;
4441 }
4442 g = gimple_build_assign (dst, src);
4443 gimple_set_location (g, buf->loc);
4444 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4445 break;
4446 }
4447 }
4448 }
4449 if (full)
4450 {
4451 if (padding_bytes)
4452 {
4adfcea0
JJ
4453 tree atype, src;
4454 if (padding_bytes == 1)
4455 {
4456 atype = char_type_node;
4457 src = build_zero_cst (char_type_node);
4458 }
4459 else
4460 {
4461 atype = build_array_type_nelts (char_type_node, padding_bytes);
4462 src = build_constructor (atype, NULL);
4463 }
1bea0d0a
JJ
4464 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4465 build_int_cst (buf->alias_type,
4466 buf->off + end
4467 - padding_bytes));
1bea0d0a
JJ
4468 gimple *g = gimple_build_assign (dst, src);
4469 gimple_set_location (g, buf->loc);
4470 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4471 }
4472 size_t end_rem = end % UNITS_PER_WORD;
4473 buf->off += end - end_rem;
4474 buf->size = end_rem;
4475 memset (buf->buf, 0, buf->size);
4476 buf->padding_bytes = 0;
4477 }
4478 else
4479 {
4480 memmove (buf->buf, buf->buf + end, buf->size - end);
4481 buf->off += end;
4482 buf->size -= end;
4483 buf->padding_bytes = padding_bytes;
4484 }
4485}
4486
4487/* Append PADDING_BYTES padding bytes. */
4488
4489static void
4490clear_padding_add_padding (clear_padding_struct *buf,
4491 HOST_WIDE_INT padding_bytes)
4492{
4493 if (padding_bytes == 0)
4494 return;
4495 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4496 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4497 clear_padding_flush (buf, false);
4498 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4499 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4500 {
4501 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4502 padding_bytes -= clear_padding_buf_size - buf->size;
4503 buf->size = clear_padding_buf_size;
4504 clear_padding_flush (buf, false);
4505 gcc_assert (buf->padding_bytes);
4506 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4507 is guaranteed to be all ones. */
4508 padding_bytes += buf->size;
4509 buf->size = padding_bytes % UNITS_PER_WORD;
4510 memset (buf->buf, ~0, buf->size);
4511 buf->off += padding_bytes - buf->size;
4512 buf->padding_bytes += padding_bytes - buf->size;
4513 }
4514 else
4515 {
4516 memset (buf->buf + buf->size, ~0, padding_bytes);
4517 buf->size += padding_bytes;
4518 }
4519}
4520
4521static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4522
4523/* Clear padding bits of union type TYPE. */
4524
4525static void
4526clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4527{
4528 clear_padding_struct *union_buf;
4529 HOST_WIDE_INT start_off = 0, next_off = 0;
4530 size_t start_size = 0;
4531 if (buf->union_ptr)
4532 {
4533 start_off = buf->off + buf->size;
4534 next_off = start_off + sz;
4535 start_size = start_off % UNITS_PER_WORD;
4536 start_off -= start_size;
4537 clear_padding_flush (buf, true);
4538 union_buf = buf;
4539 }
4540 else
4541 {
4542 if (sz + buf->size > clear_padding_buf_size)
4543 clear_padding_flush (buf, false);
4544 union_buf = XALLOCA (clear_padding_struct);
4545 union_buf->loc = buf->loc;
896048cf 4546 union_buf->clear_in_mask = buf->clear_in_mask;
1bea0d0a
JJ
4547 union_buf->base = NULL_TREE;
4548 union_buf->alias_type = NULL_TREE;
4549 union_buf->gsi = NULL;
4550 union_buf->align = 0;
4551 union_buf->off = 0;
4552 union_buf->padding_bytes = 0;
4553 union_buf->sz = sz;
4554 union_buf->size = 0;
4555 if (sz + buf->size <= clear_padding_buf_size)
4556 union_buf->union_ptr = buf->buf + buf->size;
4557 else
4558 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4559 memset (union_buf->union_ptr, ~0, sz);
4560 }
4561
4562 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4563 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4564 {
a7285c86
JJ
4565 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4566 {
4567 if (TREE_TYPE (field) == error_mark_node)
4568 continue;
4569 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4570 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
896048cf
JJ
4571 if (!buf->clear_in_mask)
4572 error_at (buf->loc, "flexible array member %qD does not have "
4573 "well defined padding bits for %qs",
4574 field, "__builtin_clear_padding");
a7285c86
JJ
4575 continue;
4576 }
1bea0d0a
JJ
4577 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4578 gcc_assert (union_buf->size == 0);
4579 union_buf->off = start_off;
4580 union_buf->size = start_size;
4581 memset (union_buf->buf, ~0, start_size);
4582 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4583 clear_padding_add_padding (union_buf, sz - fldsz);
4584 clear_padding_flush (union_buf, true);
4585 }
4586
4587 if (buf == union_buf)
4588 {
4589 buf->off = next_off;
4590 buf->size = next_off % UNITS_PER_WORD;
4591 buf->off -= buf->size;
4592 memset (buf->buf, ~0, buf->size);
4593 }
4594 else if (sz + buf->size <= clear_padding_buf_size)
4595 buf->size += sz;
4596 else
4597 {
4598 unsigned char *union_ptr = union_buf->union_ptr;
4599 while (sz)
4600 {
4601 clear_padding_flush (buf, false);
4602 HOST_WIDE_INT this_sz
4603 = MIN ((unsigned HOST_WIDE_INT) sz,
4604 clear_padding_buf_size - buf->size);
4605 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4606 buf->size += this_sz;
4607 union_ptr += this_sz;
4608 sz -= this_sz;
4609 }
4610 XDELETE (union_buf->union_ptr);
4611 }
4612}
4613
4614/* The only known floating point formats with padding bits are the
4615 IEEE extended ones. */
4616
4617static bool
4618clear_padding_real_needs_padding_p (tree type)
4619{
4620 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4621 return (fmt->b == 2
4622 && fmt->signbit_ro == fmt->signbit_rw
4623 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4624}
4625
4626/* Return true if TYPE might contain any padding bits. */
4627
4628static bool
4629clear_padding_type_may_have_padding_p (tree type)
4630{
4631 switch (TREE_CODE (type))
4632 {
4633 case RECORD_TYPE:
4634 case UNION_TYPE:
4635 return true;
4636 case ARRAY_TYPE:
4637 case COMPLEX_TYPE:
4638 case VECTOR_TYPE:
4639 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4640 case REAL_TYPE:
4641 return clear_padding_real_needs_padding_p (type);
4642 default:
4643 return false;
4644 }
4645}
4646
4647/* Emit a runtime loop:
4648 for (; buf.base != end; buf.base += sz)
4649 __builtin_clear_padding (buf.base); */
4650
4651static void
4652clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4653{
4654 tree l1 = create_artificial_label (buf->loc);
4655 tree l2 = create_artificial_label (buf->loc);
4656 tree l3 = create_artificial_label (buf->loc);
4657 gimple *g = gimple_build_goto (l2);
4658 gimple_set_location (g, buf->loc);
4659 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4660 g = gimple_build_label (l1);
4661 gimple_set_location (g, buf->loc);
4662 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4663 clear_padding_type (buf, type, buf->sz);
4664 clear_padding_flush (buf, true);
4665 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4666 size_int (buf->sz));
4667 gimple_set_location (g, buf->loc);
4668 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4669 g = gimple_build_label (l2);
4670 gimple_set_location (g, buf->loc);
4671 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4672 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4673 gimple_set_location (g, buf->loc);
4674 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4675 g = gimple_build_label (l3);
4676 gimple_set_location (g, buf->loc);
4677 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4678}
4679
4680/* Clear padding bits for TYPE. Called recursively from
4681 gimple_fold_builtin_clear_padding. */
4682
4683static void
4684clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4685{
4686 switch (TREE_CODE (type))
4687 {
4688 case RECORD_TYPE:
4689 HOST_WIDE_INT cur_pos;
4690 cur_pos = 0;
4691 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4692 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4693 {
a7285c86 4694 tree ftype = TREE_TYPE (field);
1bea0d0a
JJ
4695 if (DECL_BIT_FIELD (field))
4696 {
a7285c86 4697 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
1bea0d0a
JJ
4698 if (fldsz == 0)
4699 continue;
4700 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4701 if (pos >= sz)
4702 continue;
1bea0d0a
JJ
4703 HOST_WIDE_INT bpos
4704 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4705 bpos %= BITS_PER_UNIT;
4706 HOST_WIDE_INT end
4707 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4708 if (pos + end > cur_pos)
4709 {
4710 clear_padding_add_padding (buf, pos + end - cur_pos);
4711 cur_pos = pos + end;
4712 }
4713 gcc_assert (cur_pos > pos
4714 && ((unsigned HOST_WIDE_INT) buf->size
4715 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4716 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4718 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4719 " in %qs", "__builtin_clear_padding");
4720 else if (BYTES_BIG_ENDIAN)
4721 {
4722 /* Big endian. */
4723 if (bpos + fldsz <= BITS_PER_UNIT)
4724 *p &= ~(((1 << fldsz) - 1)
4725 << (BITS_PER_UNIT - bpos - fldsz));
4726 else
4727 {
4728 if (bpos)
4729 {
4730 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4731 p++;
4732 fldsz -= BITS_PER_UNIT - bpos;
4733 }
4734 memset (p, 0, fldsz / BITS_PER_UNIT);
4735 p += fldsz / BITS_PER_UNIT;
4736 fldsz %= BITS_PER_UNIT;
4737 if (fldsz)
4738 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4739 }
4740 }
4741 else
4742 {
4743 /* Little endian. */
4744 if (bpos + fldsz <= BITS_PER_UNIT)
4745 *p &= ~(((1 << fldsz) - 1) << bpos);
4746 else
4747 {
4748 if (bpos)
4749 {
4750 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4751 p++;
4752 fldsz -= BITS_PER_UNIT - bpos;
4753 }
4754 memset (p, 0, fldsz / BITS_PER_UNIT);
4755 p += fldsz / BITS_PER_UNIT;
4756 fldsz %= BITS_PER_UNIT;
4757 if (fldsz)
4758 *p &= ~((1 << fldsz) - 1);
4759 }
4760 }
4761 }
a7285c86
JJ
4762 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4763 {
4764 if (ftype == error_mark_node)
4765 continue;
4766 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4767 && !COMPLETE_TYPE_P (ftype));
896048cf
JJ
4768 if (!buf->clear_in_mask)
4769 error_at (buf->loc, "flexible array member %qD does not "
4770 "have well defined padding bits for %qs",
4771 field, "__builtin_clear_padding");
a7285c86 4772 }
bf0a63a1
JJ
4773 else if (is_empty_type (TREE_TYPE (field)))
4774 continue;
1bea0d0a
JJ
4775 else
4776 {
4777 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4778 if (pos >= sz)
4779 continue;
1bea0d0a
JJ
4780 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4781 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4782 clear_padding_add_padding (buf, pos - cur_pos);
4783 cur_pos = pos;
4784 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4785 cur_pos += fldsz;
4786 }
4787 }
4788 gcc_assert (sz >= cur_pos);
4789 clear_padding_add_padding (buf, sz - cur_pos);
4790 break;
4791 case ARRAY_TYPE:
4792 HOST_WIDE_INT nelts, fldsz;
4793 fldsz = int_size_in_bytes (TREE_TYPE (type));
337d6362
ML
4794 if (fldsz == 0)
4795 break;
1bea0d0a
JJ
4796 nelts = sz / fldsz;
4797 if (nelts > 1
4798 && sz > 8 * UNITS_PER_WORD
4799 && buf->union_ptr == NULL
4800 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4801 {
4802 /* For sufficiently large array of more than one elements,
4803 emit a runtime loop to keep code size manageable. */
4804 tree base = buf->base;
4805 unsigned int prev_align = buf->align;
4806 HOST_WIDE_INT off = buf->off + buf->size;
4807 HOST_WIDE_INT prev_sz = buf->sz;
4808 clear_padding_flush (buf, true);
4809 tree elttype = TREE_TYPE (type);
4810 buf->base = create_tmp_var (build_pointer_type (elttype));
4811 tree end = make_ssa_name (TREE_TYPE (buf->base));
4812 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4813 base, size_int (off));
4814 gimple_set_location (g, buf->loc);
4815 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4816 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4817 size_int (sz));
4818 gimple_set_location (g, buf->loc);
4819 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4820 buf->sz = fldsz;
4821 buf->align = TYPE_ALIGN (elttype);
4822 buf->off = 0;
4823 buf->size = 0;
4824 clear_padding_emit_loop (buf, elttype, end);
4825 buf->base = base;
4826 buf->sz = prev_sz;
4827 buf->align = prev_align;
4828 buf->size = off % UNITS_PER_WORD;
4829 buf->off = off - buf->size;
4830 memset (buf->buf, 0, buf->size);
4831 break;
4832 }
4833 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4834 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4835 break;
4836 case UNION_TYPE:
4837 clear_padding_union (buf, type, sz);
4838 break;
4839 case REAL_TYPE:
4840 gcc_assert ((size_t) sz <= clear_padding_unit);
4841 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4842 clear_padding_flush (buf, false);
4843 if (clear_padding_real_needs_padding_p (type))
4844 {
4845 /* Use native_interpret_expr + native_encode_expr to figure out
4846 which bits are padding. */
4847 memset (buf->buf + buf->size, ~0, sz);
4848 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4849 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4850 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4851 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4852 for (size_t i = 0; i < (size_t) sz; i++)
4853 buf->buf[buf->size + i] ^= ~0;
4854 }
4855 else
4856 memset (buf->buf + buf->size, 0, sz);
4857 buf->size += sz;
4858 break;
4859 case COMPLEX_TYPE:
4860 fldsz = int_size_in_bytes (TREE_TYPE (type));
4861 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4862 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4863 break;
4864 case VECTOR_TYPE:
4865 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4866 fldsz = int_size_in_bytes (TREE_TYPE (type));
4867 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4868 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4869 break;
4870 case NULLPTR_TYPE:
4871 gcc_assert ((size_t) sz <= clear_padding_unit);
4872 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4873 clear_padding_flush (buf, false);
4874 memset (buf->buf + buf->size, ~0, sz);
4875 buf->size += sz;
4876 break;
4877 default:
4878 gcc_assert ((size_t) sz <= clear_padding_unit);
4879 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4880 clear_padding_flush (buf, false);
4881 memset (buf->buf + buf->size, 0, sz);
4882 buf->size += sz;
4883 break;
4884 }
4885}
4886
896048cf
JJ
4887/* Clear padding bits of TYPE in MASK. */
4888
4889void
4890clear_type_padding_in_mask (tree type, unsigned char *mask)
4891{
4892 clear_padding_struct buf;
4893 buf.loc = UNKNOWN_LOCATION;
4894 buf.clear_in_mask = true;
4895 buf.base = NULL_TREE;
4896 buf.alias_type = NULL_TREE;
4897 buf.gsi = NULL;
4898 buf.align = 0;
4899 buf.off = 0;
4900 buf.padding_bytes = 0;
4901 buf.sz = int_size_in_bytes (type);
4902 buf.size = 0;
4903 buf.union_ptr = mask;
4904 clear_padding_type (&buf, type, buf.sz);
4905 clear_padding_flush (&buf, true);
4906}
4907
1bea0d0a
JJ
4908/* Fold __builtin_clear_padding builtin. */
4909
4910static bool
4911gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4912{
4913 gimple *stmt = gsi_stmt (*gsi);
4914 gcc_assert (gimple_call_num_args (stmt) == 2);
4915 tree ptr = gimple_call_arg (stmt, 0);
4916 tree typearg = gimple_call_arg (stmt, 1);
4917 tree type = TREE_TYPE (TREE_TYPE (typearg));
4918 location_t loc = gimple_location (stmt);
4919 clear_padding_struct buf;
4920 gimple_stmt_iterator gsiprev = *gsi;
4921 /* This should be folded during the lower pass. */
4922 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4923 gcc_assert (COMPLETE_TYPE_P (type));
4924 gsi_prev (&gsiprev);
4925
4926 buf.loc = loc;
896048cf 4927 buf.clear_in_mask = false;
1bea0d0a
JJ
4928 buf.base = ptr;
4929 buf.alias_type = NULL_TREE;
4930 buf.gsi = gsi;
4931 buf.align = get_pointer_alignment (ptr);
4932 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4933 buf.align = MAX (buf.align, talign);
4934 buf.off = 0;
4935 buf.padding_bytes = 0;
4936 buf.size = 0;
4937 buf.sz = int_size_in_bytes (type);
4938 buf.union_ptr = NULL;
4939 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4940 sorry_at (loc, "%s not supported for variable length aggregates",
4941 "__builtin_clear_padding");
4942 /* The implementation currently assumes 8-bit host and target
4943 chars which is the case for all currently supported targets
4944 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4945 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4946 sorry_at (loc, "%s not supported on this target",
4947 "__builtin_clear_padding");
4948 else if (!clear_padding_type_may_have_padding_p (type))
4949 ;
4950 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4951 {
4952 tree sz = TYPE_SIZE_UNIT (type);
4953 tree elttype = type;
4954 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4955 while (TREE_CODE (elttype) == ARRAY_TYPE
4956 && int_size_in_bytes (elttype) < 0)
4957 elttype = TREE_TYPE (elttype);
4958 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4959 gcc_assert (eltsz >= 0);
4960 if (eltsz)
4961 {
4962 buf.base = create_tmp_var (build_pointer_type (elttype));
4963 tree end = make_ssa_name (TREE_TYPE (buf.base));
4964 gimple *g = gimple_build_assign (buf.base, ptr);
4965 gimple_set_location (g, loc);
4966 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4967 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4968 gimple_set_location (g, loc);
4969 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4970 buf.sz = eltsz;
4971 buf.align = TYPE_ALIGN (elttype);
4972 buf.alias_type = build_pointer_type (elttype);
4973 clear_padding_emit_loop (&buf, elttype, end);
4974 }
4975 }
4976 else
4977 {
4978 if (!is_gimple_mem_ref_addr (buf.base))
4979 {
4980 buf.base = make_ssa_name (TREE_TYPE (ptr));
4981 gimple *g = gimple_build_assign (buf.base, ptr);
4982 gimple_set_location (g, loc);
4983 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4984 }
4985 buf.alias_type = build_pointer_type (type);
4986 clear_padding_type (&buf, type, buf.sz);
4987 clear_padding_flush (&buf, true);
4988 }
4989
4990 gimple_stmt_iterator gsiprev2 = *gsi;
4991 gsi_prev (&gsiprev2);
4992 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4993 gsi_replace (gsi, gimple_build_nop (), true);
4994 else
4995 {
4996 gsi_remove (gsi, true);
4997 *gsi = gsiprev2;
4998 }
4999 return true;
5000}
5001
dcb7fae2
RB
5002/* Fold the non-target builtin at *GSI and return whether any simplification
5003 was made. */
cbdd87d4 5004
fef5a0d9 5005static bool
dcb7fae2 5006gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 5007{
538dd0b7 5008 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 5009 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 5010
dcb7fae2
RB
5011 /* Give up for always_inline inline builtins until they are
5012 inlined. */
5013 if (avoid_folding_inline_builtin (callee))
5014 return false;
cbdd87d4 5015
edd7ae68
RB
5016 unsigned n = gimple_call_num_args (stmt);
5017 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5018 switch (fcode)
cbdd87d4 5019 {
b3d8d88e
MS
5020 case BUILT_IN_BCMP:
5021 return gimple_fold_builtin_bcmp (gsi);
5022 case BUILT_IN_BCOPY:
5023 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 5024 case BUILT_IN_BZERO:
b3d8d88e
MS
5025 return gimple_fold_builtin_bzero (gsi);
5026
dcb7fae2
RB
5027 case BUILT_IN_MEMSET:
5028 return gimple_fold_builtin_memset (gsi,
5029 gimple_call_arg (stmt, 1),
5030 gimple_call_arg (stmt, 2));
dcb7fae2 5031 case BUILT_IN_MEMCPY:
dcb7fae2 5032 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
5033 case BUILT_IN_MEMMOVE:
5034 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 5035 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
5036 case BUILT_IN_SPRINTF_CHK:
5037 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 5038 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
5039 case BUILT_IN_STRCAT_CHK:
5040 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
5041 case BUILT_IN_STRNCAT_CHK:
5042 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 5043 case BUILT_IN_STRLEN:
dcb7fae2 5044 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 5045 case BUILT_IN_STRCPY:
dcb7fae2 5046 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 5047 gimple_call_arg (stmt, 0),
dcb7fae2 5048 gimple_call_arg (stmt, 1));
cbdd87d4 5049 case BUILT_IN_STRNCPY:
dcb7fae2 5050 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
5051 gimple_call_arg (stmt, 0),
5052 gimple_call_arg (stmt, 1),
dcb7fae2 5053 gimple_call_arg (stmt, 2));
9a7eefec 5054 case BUILT_IN_STRCAT:
dcb7fae2
RB
5055 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5056 gimple_call_arg (stmt, 1));
ad03a744
RB
5057 case BUILT_IN_STRNCAT:
5058 return gimple_fold_builtin_strncat (gsi);
71dea1dd 5059 case BUILT_IN_INDEX:
912d9ec3 5060 case BUILT_IN_STRCHR:
71dea1dd
WD
5061 return gimple_fold_builtin_strchr (gsi, false);
5062 case BUILT_IN_RINDEX:
5063 case BUILT_IN_STRRCHR:
5064 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
5065 case BUILT_IN_STRSTR:
5066 return gimple_fold_builtin_strstr (gsi);
a918bfbf 5067 case BUILT_IN_STRCMP:
8b0b334a 5068 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
5069 case BUILT_IN_STRCASECMP:
5070 case BUILT_IN_STRNCMP:
8b0b334a 5071 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
5072 case BUILT_IN_STRNCASECMP:
5073 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
5074 case BUILT_IN_MEMCHR:
5075 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 5076 case BUILT_IN_FPUTS:
dcb7fae2
RB
5077 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5078 gimple_call_arg (stmt, 1), false);
cbdd87d4 5079 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
5080 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5081 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
5082 case BUILT_IN_MEMCPY_CHK:
5083 case BUILT_IN_MEMPCPY_CHK:
5084 case BUILT_IN_MEMMOVE_CHK:
5085 case BUILT_IN_MEMSET_CHK:
dcb7fae2 5086 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
5087 gimple_call_arg (stmt, 0),
5088 gimple_call_arg (stmt, 1),
5089 gimple_call_arg (stmt, 2),
5090 gimple_call_arg (stmt, 3),
edd7ae68 5091 fcode);
2625bb5d
RB
5092 case BUILT_IN_STPCPY:
5093 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
5094 case BUILT_IN_STRCPY_CHK:
5095 case BUILT_IN_STPCPY_CHK:
dcb7fae2 5096 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
5097 gimple_call_arg (stmt, 0),
5098 gimple_call_arg (stmt, 1),
5099 gimple_call_arg (stmt, 2),
edd7ae68 5100 fcode);
cbdd87d4 5101 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 5102 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
5103 return gimple_fold_builtin_stxncpy_chk (gsi,
5104 gimple_call_arg (stmt, 0),
5105 gimple_call_arg (stmt, 1),
5106 gimple_call_arg (stmt, 2),
5107 gimple_call_arg (stmt, 3),
edd7ae68 5108 fcode);
cbdd87d4
RG
5109 case BUILT_IN_SNPRINTF_CHK:
5110 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 5111 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 5112
edd7ae68
RB
5113 case BUILT_IN_FPRINTF:
5114 case BUILT_IN_FPRINTF_UNLOCKED:
5115 case BUILT_IN_VFPRINTF:
5116 if (n == 2 || n == 3)
5117 return gimple_fold_builtin_fprintf (gsi,
5118 gimple_call_arg (stmt, 0),
5119 gimple_call_arg (stmt, 1),
5120 n == 3
5121 ? gimple_call_arg (stmt, 2)
5122 : NULL_TREE,
5123 fcode);
5124 break;
5125 case BUILT_IN_FPRINTF_CHK:
5126 case BUILT_IN_VFPRINTF_CHK:
5127 if (n == 3 || n == 4)
5128 return gimple_fold_builtin_fprintf (gsi,
5129 gimple_call_arg (stmt, 0),
5130 gimple_call_arg (stmt, 2),
5131 n == 4
5132 ? gimple_call_arg (stmt, 3)
5133 : NULL_TREE,
5134 fcode);
5135 break;
ad03a744
RB
5136 case BUILT_IN_PRINTF:
5137 case BUILT_IN_PRINTF_UNLOCKED:
5138 case BUILT_IN_VPRINTF:
5139 if (n == 1 || n == 2)
5140 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5141 n == 2
5142 ? gimple_call_arg (stmt, 1)
5143 : NULL_TREE, fcode);
5144 break;
5145 case BUILT_IN_PRINTF_CHK:
5146 case BUILT_IN_VPRINTF_CHK:
5147 if (n == 2 || n == 3)
5148 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5149 n == 3
5150 ? gimple_call_arg (stmt, 2)
5151 : NULL_TREE, fcode);
242a37f1 5152 break;
48126138
NS
5153 case BUILT_IN_ACC_ON_DEVICE:
5154 return gimple_fold_builtin_acc_on_device (gsi,
5155 gimple_call_arg (stmt, 0));
fe75f732
PK
5156 case BUILT_IN_REALLOC:
5157 return gimple_fold_builtin_realloc (gsi);
5158
1bea0d0a
JJ
5159 case BUILT_IN_CLEAR_PADDING:
5160 return gimple_fold_builtin_clear_padding (gsi);
5161
fef5a0d9
RB
5162 default:;
5163 }
5164
5165 /* Try the generic builtin folder. */
5166 bool ignore = (gimple_call_lhs (stmt) == NULL);
5167 tree result = fold_call_stmt (stmt, ignore);
5168 if (result)
5169 {
5170 if (ignore)
5171 STRIP_NOPS (result);
5172 else
5173 result = fold_convert (gimple_call_return_type (stmt), result);
52a5515e 5174 gimplify_and_update_call_from_tree (gsi, result);
fef5a0d9
RB
5175 return true;
5176 }
5177
5178 return false;
5179}
5180
451e8dae
NS
5181/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5182 function calls to constants, where possible. */
5183
5184static tree
5185fold_internal_goacc_dim (const gimple *call)
5186{
629b3d75
MJ
5187 int axis = oacc_get_ifn_dim_arg (call);
5188 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 5189 tree result = NULL_TREE;
67d2229e 5190 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 5191
67d2229e 5192 switch (gimple_call_internal_fn (call))
451e8dae 5193 {
67d2229e
TV
5194 case IFN_GOACC_DIM_POS:
5195 /* If the size is 1, we know the answer. */
5196 if (size == 1)
5197 result = build_int_cst (type, 0);
5198 break;
5199 case IFN_GOACC_DIM_SIZE:
5200 /* If the size is not dynamic, we know the answer. */
5201 if (size)
5202 result = build_int_cst (type, size);
5203 break;
5204 default:
5205 break;
451e8dae
NS
5206 }
5207
5208 return result;
5209}
5210
849a76a5
JJ
5211/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5212 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5213 &var where var is only addressable because of such calls. */
5214
5215bool
5216optimize_atomic_compare_exchange_p (gimple *stmt)
5217{
5218 if (gimple_call_num_args (stmt) != 6
5219 || !flag_inline_atomics
5220 || !optimize
45b2222a 5221 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
5222 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5223 || !gimple_vdef (stmt)
5224 || !gimple_vuse (stmt))
5225 return false;
5226
5227 tree fndecl = gimple_call_fndecl (stmt);
5228 switch (DECL_FUNCTION_CODE (fndecl))
5229 {
5230 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5231 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5232 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5233 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5234 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5235 break;
5236 default:
5237 return false;
5238 }
5239
5240 tree expected = gimple_call_arg (stmt, 1);
5241 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
5242 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5243 return false;
5244
5245 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5246 if (!is_gimple_reg_type (etype)
849a76a5 5247 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
5248 || TREE_THIS_VOLATILE (etype)
5249 || VECTOR_TYPE_P (etype)
5250 || TREE_CODE (etype) == COMPLEX_TYPE
5251 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5252 might not preserve all the bits. See PR71716. */
5253 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
5254 || maybe_ne (TYPE_PRECISION (etype),
5255 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
5256 return false;
5257
5258 tree weak = gimple_call_arg (stmt, 3);
5259 if (!integer_zerop (weak) && !integer_onep (weak))
5260 return false;
5261
5262 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5263 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5264 machine_mode mode = TYPE_MODE (itype);
5265
5266 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5267 == CODE_FOR_nothing
5268 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5269 return false;
5270
cf098191 5271 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
5272 return false;
5273
5274 return true;
5275}
5276
5277/* Fold
5278 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5279 into
5280 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5281 i = IMAGPART_EXPR <t>;
5282 r = (_Bool) i;
5283 e = REALPART_EXPR <t>; */
5284
5285void
5286fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5287{
5288 gimple *stmt = gsi_stmt (*gsi);
5289 tree fndecl = gimple_call_fndecl (stmt);
5290 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5291 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5292 tree ctype = build_complex_type (itype);
5293 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
5294 bool throws = false;
5295 edge e = NULL;
849a76a5
JJ
5296 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5297 expected);
5298 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5299 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5300 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5301 {
5302 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5303 build1 (VIEW_CONVERT_EXPR, itype,
5304 gimple_assign_lhs (g)));
5305 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5306 }
5307 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5308 + int_size_in_bytes (itype);
5309 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5310 gimple_call_arg (stmt, 0),
5311 gimple_assign_lhs (g),
5312 gimple_call_arg (stmt, 2),
5313 build_int_cst (integer_type_node, flag),
5314 gimple_call_arg (stmt, 4),
5315 gimple_call_arg (stmt, 5));
5316 tree lhs = make_ssa_name (ctype);
5317 gimple_call_set_lhs (g, lhs);
779724a5 5318 gimple_move_vops (g, stmt);
cc195d46 5319 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 5320 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
5321 {
5322 throws = true;
5323 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5324 }
5325 gimple_call_set_nothrow (as_a <gcall *> (g),
5326 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5327 gimple_call_set_lhs (stmt, NULL_TREE);
5328 gsi_replace (gsi, g, true);
5329 if (oldlhs)
849a76a5 5330 {
849a76a5
JJ
5331 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5332 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
5333 if (throws)
5334 {
5335 gsi_insert_on_edge_immediate (e, g);
5336 *gsi = gsi_for_stmt (g);
5337 }
5338 else
5339 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5340 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5341 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 5342 }
849a76a5
JJ
5343 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5344 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
5345 if (throws && oldlhs == NULL_TREE)
5346 {
5347 gsi_insert_on_edge_immediate (e, g);
5348 *gsi = gsi_for_stmt (g);
5349 }
5350 else
5351 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
5352 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5353 {
5354 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5355 VIEW_CONVERT_EXPR,
5356 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5357 gimple_assign_lhs (g)));
5358 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5359 }
5360 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5361 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5362 *gsi = gsiret;
5363}
5364
1304953e
JJ
5365/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5366 doesn't fit into TYPE. The test for overflow should be regardless of
5367 -fwrapv, and even for unsigned types. */
5368
5369bool
5370arith_overflowed_p (enum tree_code code, const_tree type,
5371 const_tree arg0, const_tree arg1)
5372{
1304953e
JJ
5373 widest2_int warg0 = widest2_int_cst (arg0);
5374 widest2_int warg1 = widest2_int_cst (arg1);
5375 widest2_int wres;
5376 switch (code)
5377 {
5378 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5379 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5380 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5381 default: gcc_unreachable ();
5382 }
5383 signop sign = TYPE_SIGN (type);
5384 if (sign == UNSIGNED && wi::neg_p (wres))
5385 return true;
5386 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5387}
5388
868363d4
RS
5389/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5390 for the memory it references, otherwise return null. VECTYPE is the
5391 type of the memory vector. */
5392
5393static tree
5394gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5395{
5396 tree ptr = gimple_call_arg (call, 0);
5397 tree alias_align = gimple_call_arg (call, 1);
5398 tree mask = gimple_call_arg (call, 2);
5399 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5400 return NULL_TREE;
5401
aa204d51 5402 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
868363d4
RS
5403 if (TYPE_ALIGN (vectype) != align)
5404 vectype = build_aligned_type (vectype, align);
5405 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5406 return fold_build2 (MEM_REF, vectype, ptr, offset);
5407}
5408
5409/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5410
5411static bool
5412gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5413{
5414 tree lhs = gimple_call_lhs (call);
5415 if (!lhs)
5416 return false;
5417
5418 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5419 {
5420 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5421 gimple_set_location (new_stmt, gimple_location (call));
5422 gimple_move_vops (new_stmt, call);
5423 gsi_replace (gsi, new_stmt, false);
5424 return true;
5425 }
5426 return false;
5427}
5428
5429/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5430
5431static bool
5432gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5433{
5434 tree rhs = gimple_call_arg (call, 3);
5435 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5436 {
5437 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5438 gimple_set_location (new_stmt, gimple_location (call));
5439 gimple_move_vops (new_stmt, call);
5440 gsi_replace (gsi, new_stmt, false);
5441 return true;
5442 }
5443 return false;
5444}
5445
cbdd87d4
RG
5446/* Attempt to fold a call statement referenced by the statement iterator GSI.
5447 The statement may be replaced by another statement, e.g., if the call
5448 simplifies to a constant value. Return true if any changes were made.
5449 It is assumed that the operands have been previously folded. */
5450
e021c122 5451static bool
ceeffab0 5452gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 5453{
538dd0b7 5454 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 5455 tree callee;
e021c122 5456 bool changed = false;
3b45a007
RG
5457
5458 /* Check for virtual calls that became direct calls. */
5459 callee = gimple_call_fn (stmt);
25583c4f 5460 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 5461 {
49c471e3
MJ
5462 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5463 {
450ad0cd
JH
5464 if (dump_file && virtual_method_call_p (callee)
5465 && !possible_polymorphic_call_target_p
6f8091fc
JH
5466 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5467 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
5468 {
5469 fprintf (dump_file,
a70e9985 5470 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
5471 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5472 fprintf (dump_file, " to ");
5473 print_generic_expr (dump_file, callee, TDF_SLIM);
5474 fprintf (dump_file, "\n");
5475 }
5476
49c471e3 5477 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
5478 changed = true;
5479 }
a70e9985 5480 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 5481 {
61dd6a2e
JH
5482 bool final;
5483 vec <cgraph_node *>targets
058d0a90 5484 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 5485 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 5486 {
a70e9985 5487 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
5488 if (dump_enabled_p ())
5489 {
4f5b9c80 5490 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
5491 "folding virtual function call to %s\n",
5492 targets.length () == 1
5493 ? targets[0]->name ()
5494 : "__builtin_unreachable");
5495 }
61dd6a2e 5496 if (targets.length () == 1)
cf3e5a89 5497 {
18954840
JJ
5498 tree fndecl = targets[0]->decl;
5499 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 5500 changed = true;
18954840
JJ
5501 /* If changing the call to __cxa_pure_virtual
5502 or similar noreturn function, adjust gimple_call_fntype
5503 too. */
865f7046 5504 if (gimple_call_noreturn_p (stmt)
18954840
JJ
5505 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5506 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5507 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5508 == void_type_node))
5509 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 5510 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
5511 if (lhs
5512 && gimple_call_noreturn_p (stmt)
18954840 5513 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 5514 || should_remove_lhs_p (lhs)))
a70e9985
JJ
5515 {
5516 if (TREE_CODE (lhs) == SSA_NAME)
5517 {
b731b390 5518 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 5519 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 5520 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
5521 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5522 }
5523 gimple_call_set_lhs (stmt, NULL_TREE);
5524 }
0b986c6a 5525 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 5526 }
a70e9985 5527 else
cf3e5a89
JJ
5528 {
5529 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 5530 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 5531 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
5532 /* If the call had a SSA name as lhs morph that into
5533 an uninitialized value. */
a70e9985
JJ
5534 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5535 {
b731b390 5536 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
5537 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5538 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5539 set_ssa_default_def (cfun, var, lhs);
42e52a51 5540 }
779724a5 5541 gimple_move_vops (new_stmt, stmt);
2da6996c 5542 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
5543 return true;
5544 }
e021c122 5545 }
49c471e3 5546 }
e021c122 5547 }
49c471e3 5548
f2d3d07e
RH
5549 /* Check for indirect calls that became direct calls, and then
5550 no longer require a static chain. */
5551 if (gimple_call_chain (stmt))
5552 {
5553 tree fn = gimple_call_fndecl (stmt);
5554 if (fn && !DECL_STATIC_CHAIN (fn))
5555 {
5556 gimple_call_set_chain (stmt, NULL);
5557 changed = true;
5558 }
f2d3d07e
RH
5559 }
5560
e021c122
RG
5561 if (inplace)
5562 return changed;
5563
5564 /* Check for builtins that CCP can handle using information not
5565 available in the generic fold routines. */
fef5a0d9
RB
5566 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5567 {
5568 if (gimple_fold_builtin (gsi))
5569 changed = true;
5570 }
5571 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 5572 {
ea679d55 5573 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 5574 }
368b454d 5575 else if (gimple_call_internal_p (stmt))
ed9c79e1 5576 {
368b454d
JJ
5577 enum tree_code subcode = ERROR_MARK;
5578 tree result = NULL_TREE;
1304953e
JJ
5579 bool cplx_result = false;
5580 tree overflow = NULL_TREE;
368b454d
JJ
5581 switch (gimple_call_internal_fn (stmt))
5582 {
5583 case IFN_BUILTIN_EXPECT:
5584 result = fold_builtin_expect (gimple_location (stmt),
5585 gimple_call_arg (stmt, 0),
5586 gimple_call_arg (stmt, 1),
1e9168b2
ML
5587 gimple_call_arg (stmt, 2),
5588 NULL_TREE);
368b454d 5589 break;
0e82f089 5590 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
5591 {
5592 tree offset = gimple_call_arg (stmt, 1);
5593 tree objsize = gimple_call_arg (stmt, 2);
5594 if (integer_all_onesp (objsize)
5595 || (TREE_CODE (offset) == INTEGER_CST
5596 && TREE_CODE (objsize) == INTEGER_CST
5597 && tree_int_cst_le (offset, objsize)))
5598 {
5599 replace_call_with_value (gsi, NULL_TREE);
5600 return true;
5601 }
5602 }
5603 break;
5604 case IFN_UBSAN_PTR:
5605 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 5606 {
ca1150f0 5607 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
5608 return true;
5609 }
5610 break;
ca1150f0
JJ
5611 case IFN_UBSAN_BOUNDS:
5612 {
5613 tree index = gimple_call_arg (stmt, 1);
5614 tree bound = gimple_call_arg (stmt, 2);
5615 if (TREE_CODE (index) == INTEGER_CST
5616 && TREE_CODE (bound) == INTEGER_CST)
5617 {
5618 index = fold_convert (TREE_TYPE (bound), index);
5619 if (TREE_CODE (index) == INTEGER_CST
5620 && tree_int_cst_le (index, bound))
5621 {
5622 replace_call_with_value (gsi, NULL_TREE);
5623 return true;
5624 }
5625 }
5626 }
5627 break;
451e8dae
NS
5628 case IFN_GOACC_DIM_SIZE:
5629 case IFN_GOACC_DIM_POS:
5630 result = fold_internal_goacc_dim (stmt);
5631 break;
368b454d
JJ
5632 case IFN_UBSAN_CHECK_ADD:
5633 subcode = PLUS_EXPR;
5634 break;
5635 case IFN_UBSAN_CHECK_SUB:
5636 subcode = MINUS_EXPR;
5637 break;
5638 case IFN_UBSAN_CHECK_MUL:
5639 subcode = MULT_EXPR;
5640 break;
1304953e
JJ
5641 case IFN_ADD_OVERFLOW:
5642 subcode = PLUS_EXPR;
5643 cplx_result = true;
5644 break;
5645 case IFN_SUB_OVERFLOW:
5646 subcode = MINUS_EXPR;
5647 cplx_result = true;
5648 break;
5649 case IFN_MUL_OVERFLOW:
5650 subcode = MULT_EXPR;
5651 cplx_result = true;
5652 break;
868363d4
RS
5653 case IFN_MASK_LOAD:
5654 changed |= gimple_fold_mask_load (gsi, stmt);
5655 break;
5656 case IFN_MASK_STORE:
5657 changed |= gimple_fold_mask_store (gsi, stmt);
5658 break;
368b454d
JJ
5659 default:
5660 break;
5661 }
5662 if (subcode != ERROR_MARK)
5663 {
5664 tree arg0 = gimple_call_arg (stmt, 0);
5665 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
5666 tree type = TREE_TYPE (arg0);
5667 if (cplx_result)
5668 {
5669 tree lhs = gimple_call_lhs (stmt);
5670 if (lhs == NULL_TREE)
5671 type = NULL_TREE;
5672 else
5673 type = TREE_TYPE (TREE_TYPE (lhs));
5674 }
5675 if (type == NULL_TREE)
5676 ;
368b454d 5677 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
5678 else if (integer_zerop (arg1))
5679 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
5680 /* x = 0 + y; x = 0 * y; */
5681 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 5682 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
5683 /* x = y - y; */
5684 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 5685 result = integer_zero_node;
368b454d 5686 /* x = y * 1; x = 1 * y; */
1304953e
JJ
5687 else if (subcode == MULT_EXPR && integer_onep (arg1))
5688 result = arg0;
5689 else if (subcode == MULT_EXPR && integer_onep (arg0))
5690 result = arg1;
5691 else if (TREE_CODE (arg0) == INTEGER_CST
5692 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 5693 {
1304953e
JJ
5694 if (cplx_result)
5695 result = int_const_binop (subcode, fold_convert (type, arg0),
5696 fold_convert (type, arg1));
5697 else
5698 result = int_const_binop (subcode, arg0, arg1);
5699 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5700 {
5701 if (cplx_result)
5702 overflow = build_one_cst (type);
5703 else
5704 result = NULL_TREE;
5705 }
5706 }
5707 if (result)
5708 {
5709 if (result == integer_zero_node)
5710 result = build_zero_cst (type);
5711 else if (cplx_result && TREE_TYPE (result) != type)
5712 {
5713 if (TREE_CODE (result) == INTEGER_CST)
5714 {
5715 if (arith_overflowed_p (PLUS_EXPR, type, result,
5716 integer_zero_node))
5717 overflow = build_one_cst (type);
5718 }
5719 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5720 && TYPE_UNSIGNED (type))
5721 || (TYPE_PRECISION (type)
5722 < (TYPE_PRECISION (TREE_TYPE (result))
5723 + (TYPE_UNSIGNED (TREE_TYPE (result))
5724 && !TYPE_UNSIGNED (type)))))
5725 result = NULL_TREE;
5726 if (result)
5727 result = fold_convert (type, result);
5728 }
368b454d
JJ
5729 }
5730 }
1304953e 5731
ed9c79e1
JJ
5732 if (result)
5733 {
1304953e
JJ
5734 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5735 result = drop_tree_overflow (result);
5736 if (cplx_result)
5737 {
5738 if (overflow == NULL_TREE)
5739 overflow = build_zero_cst (TREE_TYPE (result));
5740 tree ctype = build_complex_type (TREE_TYPE (result));
5741 if (TREE_CODE (result) == INTEGER_CST
5742 && TREE_CODE (overflow) == INTEGER_CST)
5743 result = build_complex (ctype, result, overflow);
5744 else
5745 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5746 ctype, result, overflow);
5747 }
52a5515e 5748 gimplify_and_update_call_from_tree (gsi, result);
ed9c79e1
JJ
5749 changed = true;
5750 }
5751 }
3b45a007 5752
e021c122 5753 return changed;
cbdd87d4
RG
5754}
5755
e0ee10ed 5756
89a79e96
RB
5757/* Return true whether NAME has a use on STMT. */
5758
5759static bool
355fe088 5760has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
5761{
5762 imm_use_iterator iter;
5763 use_operand_p use_p;
5764 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5765 if (USE_STMT (use_p) == stmt)
5766 return true;
5767 return false;
5768}
5769
e0ee10ed
RB
5770/* Worker for fold_stmt_1 dispatch to pattern based folding with
5771 gimple_simplify.
5772
5773 Replaces *GSI with the simplification result in RCODE and OPS
5774 and the associated statements in *SEQ. Does the replacement
5775 according to INPLACE and returns true if the operation succeeded. */
5776
5777static bool
5778replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 5779 gimple_match_op *res_op,
e0ee10ed
RB
5780 gimple_seq *seq, bool inplace)
5781{
355fe088 5782 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
5783 tree *ops = res_op->ops;
5784 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
5785
5786 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
5787 newly created statements. See also maybe_push_res_to_seq.
5788 As an exception allow such uses if there was a use of the
5789 same SSA name on the old stmt. */
5d75ad95
RS
5790 for (unsigned int i = 0; i < num_ops; ++i)
5791 if (TREE_CODE (ops[i]) == SSA_NAME
5792 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5793 && !has_use_on_stmt (ops[i], stmt))
5794 return false;
5795
5796 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5797 for (unsigned int i = 0; i < 2; ++i)
5798 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5799 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5800 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5801 return false;
e0ee10ed 5802
fec40d06
RS
5803 /* Don't insert new statements when INPLACE is true, even if we could
5804 reuse STMT for the final statement. */
5805 if (inplace && !gimple_seq_empty_p (*seq))
5806 return false;
5807
538dd0b7 5808 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 5809 {
5d75ad95
RS
5810 gcc_assert (res_op->code.is_tree_code ());
5811 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
5812 /* GIMPLE_CONDs condition may not throw. */
5813 && (!flag_exceptions
5814 || !cfun->can_throw_non_call_exceptions
5d75ad95 5815 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
5816 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5817 false, NULL_TREE)))
5d75ad95
RS
5818 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5819 else if (res_op->code == SSA_NAME)
538dd0b7 5820 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 5821 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 5822 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
5823 {
5824 if (integer_zerop (ops[0]))
538dd0b7 5825 gimple_cond_make_false (cond_stmt);
e0ee10ed 5826 else
538dd0b7 5827 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
5828 }
5829 else if (!inplace)
5830 {
5d75ad95 5831 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
5832 if (!res)
5833 return false;
538dd0b7 5834 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
5835 build_zero_cst (TREE_TYPE (res)));
5836 }
5837 else
5838 return false;
5839 if (dump_file && (dump_flags & TDF_DETAILS))
5840 {
5841 fprintf (dump_file, "gimple_simplified to ");
5842 if (!gimple_seq_empty_p (*seq))
5843 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5844 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5845 0, TDF_SLIM);
5846 }
5847 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5848 return true;
5849 }
5850 else if (is_gimple_assign (stmt)
5d75ad95 5851 && res_op->code.is_tree_code ())
e0ee10ed
RB
5852 {
5853 if (!inplace
5d75ad95 5854 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 5855 {
5d75ad95
RS
5856 maybe_build_generic_op (res_op);
5857 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5858 res_op->op_or_null (0),
5859 res_op->op_or_null (1),
5860 res_op->op_or_null (2));
e0ee10ed
RB
5861 if (dump_file && (dump_flags & TDF_DETAILS))
5862 {
5863 fprintf (dump_file, "gimple_simplified to ");
5864 if (!gimple_seq_empty_p (*seq))
5865 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5866 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5867 0, TDF_SLIM);
5868 }
5869 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5870 return true;
5871 }
5872 }
5d75ad95
RS
5873 else if (res_op->code.is_fn_code ()
5874 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 5875 {
5d75ad95
RS
5876 gcc_assert (num_ops == gimple_call_num_args (stmt));
5877 for (unsigned int i = 0; i < num_ops; ++i)
5878 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
5879 if (dump_file && (dump_flags & TDF_DETAILS))
5880 {
5881 fprintf (dump_file, "gimple_simplified to ");
5882 if (!gimple_seq_empty_p (*seq))
5883 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5884 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5885 }
5886 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
5887 return true;
5888 }
e0ee10ed
RB
5889 else if (!inplace)
5890 {
5891 if (gimple_has_lhs (stmt))
5892 {
5893 tree lhs = gimple_get_lhs (stmt);
5d75ad95 5894 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 5895 return false;
e0ee10ed
RB
5896 if (dump_file && (dump_flags & TDF_DETAILS))
5897 {
5898 fprintf (dump_file, "gimple_simplified to ");
5899 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5900 }
5901 gsi_replace_with_seq_vops (gsi, *seq);
5902 return true;
5903 }
5904 else
5905 gcc_unreachable ();
5906 }
5907
5908 return false;
5909}
5910
040292e7
RB
5911/* Canonicalize MEM_REFs invariant address operand after propagation. */
5912
5913static bool
fabe0ede 5914maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
040292e7
RB
5915{
5916 bool res = false;
fe8c8f1e 5917 tree *orig_t = t;
040292e7
RB
5918
5919 if (TREE_CODE (*t) == ADDR_EXPR)
5920 t = &TREE_OPERAND (*t, 0);
5921
f17a223d
RB
5922 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5923 generic vector extension. The actual vector referenced is
5924 view-converted to an array type for this purpose. If the index
5925 is constant the canonical representation in the middle-end is a
5926 BIT_FIELD_REF so re-write the former to the latter here. */
5927 if (TREE_CODE (*t) == ARRAY_REF
5928 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5929 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5930 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5931 {
5932 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5933 if (VECTOR_TYPE_P (vtype))
5934 {
5935 tree low = array_ref_low_bound (*t);
5936 if (TREE_CODE (low) == INTEGER_CST)
5937 {
5938 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5939 {
5940 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5941 wi::to_widest (low));
5942 idx = wi::mul (idx, wi::to_widest
5943 (TYPE_SIZE (TREE_TYPE (*t))));
5944 widest_int ext
5945 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5946 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5947 {
5948 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5949 TREE_TYPE (*t),
5950 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5951 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 5952 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
5953 res = true;
5954 }
5955 }
5956 }
5957 }
5958 }
5959
040292e7
RB
5960 while (handled_component_p (*t))
5961 t = &TREE_OPERAND (*t, 0);
5962
5963 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5964 of invariant addresses into a SSA name MEM_REF address. */
5965 if (TREE_CODE (*t) == MEM_REF
5966 || TREE_CODE (*t) == TARGET_MEM_REF)
5967 {
5968 tree addr = TREE_OPERAND (*t, 0);
5969 if (TREE_CODE (addr) == ADDR_EXPR
5970 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5971 || handled_component_p (TREE_OPERAND (addr, 0))))
5972 {
5973 tree base;
a90c8804 5974 poly_int64 coffset;
040292e7
RB
5975 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5976 &coffset);
5977 if (!base)
fabe0ede
JJ
5978 {
5979 if (is_debug)
5980 return false;
5981 gcc_unreachable ();
5982 }
040292e7
RB
5983
5984 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5985 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5986 TREE_OPERAND (*t, 1),
5987 size_int (coffset));
5988 res = true;
5989 }
5990 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5991 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5992 }
5993
5994 /* Canonicalize back MEM_REFs to plain reference trees if the object
5995 accessed is a decl that has the same access semantics as the MEM_REF. */
5996 if (TREE_CODE (*t) == MEM_REF
5997 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
5998 && integer_zerop (TREE_OPERAND (*t, 1))
5999 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
6000 {
6001 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6002 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6003 if (/* Same volatile qualification. */
6004 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6005 /* Same TBAA behavior with -fstrict-aliasing. */
6006 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6007 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6008 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6009 /* Same alignment. */
6010 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6011 /* We have to look out here to not drop a required conversion
6012 from the rhs to the lhs if *t appears on the lhs or vice-versa
6013 if it appears on the rhs. Thus require strict type
6014 compatibility. */
6015 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6016 {
6017 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6018 res = true;
6019 }
6020 }
6021
fe8c8f1e
RB
6022 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6023 && TREE_CODE (*t) == MEM_REF
6024 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6025 {
6026 tree base;
6027 poly_int64 coffset;
6028 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6029 &coffset);
6030 if (base)
6031 {
6032 gcc_assert (TREE_CODE (base) == MEM_REF);
6033 poly_int64 moffset;
6034 if (mem_ref_offset (base).to_shwi (&moffset))
6035 {
6036 coffset += moffset;
6037 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6038 {
6039 coffset += moffset;
6040 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6041 return true;
6042 }
6043 }
6044 }
6045 }
6046
040292e7
RB
6047 /* Canonicalize TARGET_MEM_REF in particular with respect to
6048 the indexes becoming constant. */
6049 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6050 {
6051 tree tem = maybe_fold_tmr (*t);
6052 if (tem)
6053 {
6054 *t = tem;
c7789683
RS
6055 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6056 recompute_tree_invariant_for_addr_expr (*orig_t);
040292e7
RB
6057 res = true;
6058 }
6059 }
6060
6061 return res;
6062}
6063
cbdd87d4
RG
6064/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6065 distinguishes both cases. */
6066
6067static bool
e0ee10ed 6068fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
6069{
6070 bool changed = false;
355fe088 6071 gimple *stmt = gsi_stmt (*gsi);
e9e2bad7 6072 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
cbdd87d4 6073 unsigned i;
a8b85ce9 6074 fold_defer_overflow_warnings ();
cbdd87d4 6075
040292e7
RB
6076 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6077 after propagation.
6078 ??? This shouldn't be done in generic folding but in the
6079 propagation helpers which also know whether an address was
89a79e96
RB
6080 propagated.
6081 Also canonicalize operand order. */
040292e7
RB
6082 switch (gimple_code (stmt))
6083 {
6084 case GIMPLE_ASSIGN:
6085 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6086 {
6087 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6088 if ((REFERENCE_CLASS_P (*rhs)
6089 || TREE_CODE (*rhs) == ADDR_EXPR)
6090 && maybe_canonicalize_mem_ref_addr (rhs))
6091 changed = true;
6092 tree *lhs = gimple_assign_lhs_ptr (stmt);
6093 if (REFERENCE_CLASS_P (*lhs)
6094 && maybe_canonicalize_mem_ref_addr (lhs))
6095 changed = true;
6096 }
89a79e96
RB
6097 else
6098 {
6099 /* Canonicalize operand order. */
6100 enum tree_code code = gimple_assign_rhs_code (stmt);
6101 if (TREE_CODE_CLASS (code) == tcc_comparison
6102 || commutative_tree_code (code)
6103 || commutative_ternary_tree_code (code))
6104 {
6105 tree rhs1 = gimple_assign_rhs1 (stmt);
6106 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 6107 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
6108 {
6109 gimple_assign_set_rhs1 (stmt, rhs2);
6110 gimple_assign_set_rhs2 (stmt, rhs1);
6111 if (TREE_CODE_CLASS (code) == tcc_comparison)
6112 gimple_assign_set_rhs_code (stmt,
6113 swap_tree_comparison (code));
6114 changed = true;
6115 }
6116 }
6117 }
040292e7
RB
6118 break;
6119 case GIMPLE_CALL:
6120 {
6121 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6122 {
6123 tree *arg = gimple_call_arg_ptr (stmt, i);
6124 if (REFERENCE_CLASS_P (*arg)
6125 && maybe_canonicalize_mem_ref_addr (arg))
6126 changed = true;
6127 }
6128 tree *lhs = gimple_call_lhs_ptr (stmt);
6129 if (*lhs
6130 && REFERENCE_CLASS_P (*lhs)
6131 && maybe_canonicalize_mem_ref_addr (lhs))
6132 changed = true;
6133 break;
6134 }
6135 case GIMPLE_ASM:
6136 {
538dd0b7
DM
6137 gasm *asm_stmt = as_a <gasm *> (stmt);
6138 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 6139 {
538dd0b7 6140 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
6141 tree op = TREE_VALUE (link);
6142 if (REFERENCE_CLASS_P (op)
6143 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6144 changed = true;
6145 }
538dd0b7 6146 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 6147 {
538dd0b7 6148 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
6149 tree op = TREE_VALUE (link);
6150 if ((REFERENCE_CLASS_P (op)
6151 || TREE_CODE (op) == ADDR_EXPR)
6152 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6153 changed = true;
6154 }
6155 }
6156 break;
6157 case GIMPLE_DEBUG:
6158 if (gimple_debug_bind_p (stmt))
6159 {
6160 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6161 if (*val
6162 && (REFERENCE_CLASS_P (*val)
6163 || TREE_CODE (*val) == ADDR_EXPR)
fabe0ede 6164 && maybe_canonicalize_mem_ref_addr (val, true))
040292e7
RB
6165 changed = true;
6166 }
6167 break;
89a79e96
RB
6168 case GIMPLE_COND:
6169 {
6170 /* Canonicalize operand order. */
6171 tree lhs = gimple_cond_lhs (stmt);
6172 tree rhs = gimple_cond_rhs (stmt);
14e72812 6173 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
6174 {
6175 gcond *gc = as_a <gcond *> (stmt);
6176 gimple_cond_set_lhs (gc, rhs);
6177 gimple_cond_set_rhs (gc, lhs);
6178 gimple_cond_set_code (gc,
6179 swap_tree_comparison (gimple_cond_code (gc)));
6180 changed = true;
6181 }
6182 }
040292e7
RB
6183 default:;
6184 }
6185
e0ee10ed
RB
6186 /* Dispatch to pattern-based folding. */
6187 if (!inplace
6188 || is_gimple_assign (stmt)
6189 || gimple_code (stmt) == GIMPLE_COND)
6190 {
6191 gimple_seq seq = NULL;
5d75ad95
RS
6192 gimple_match_op res_op;
6193 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 6194 valueize, valueize))
e0ee10ed 6195 {
5d75ad95 6196 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
6197 changed = true;
6198 else
6199 gimple_seq_discard (seq);
6200 }
6201 }
6202
6203 stmt = gsi_stmt (*gsi);
6204
cbdd87d4
RG
6205 /* Fold the main computation performed by the statement. */
6206 switch (gimple_code (stmt))
6207 {
6208 case GIMPLE_ASSIGN:
6209 {
819ec64c
RB
6210 /* Try to canonicalize for boolean-typed X the comparisons
6211 X == 0, X == 1, X != 0, and X != 1. */
6212 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6213 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 6214 {
819ec64c
RB
6215 tree lhs = gimple_assign_lhs (stmt);
6216 tree op1 = gimple_assign_rhs1 (stmt);
6217 tree op2 = gimple_assign_rhs2 (stmt);
6218 tree type = TREE_TYPE (op1);
6219
6220 /* Check whether the comparison operands are of the same boolean
6221 type as the result type is.
6222 Check that second operand is an integer-constant with value
6223 one or zero. */
6224 if (TREE_CODE (op2) == INTEGER_CST
6225 && (integer_zerop (op2) || integer_onep (op2))
6226 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6227 {
6228 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6229 bool is_logical_not = false;
6230
6231 /* X == 0 and X != 1 is a logical-not.of X
6232 X == 1 and X != 0 is X */
6233 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6234 || (cmp_code == NE_EXPR && integer_onep (op2)))
6235 is_logical_not = true;
6236
6237 if (is_logical_not == false)
6238 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6239 /* Only for one-bit precision typed X the transformation
6240 !X -> ~X is valied. */
6241 else if (TYPE_PRECISION (type) == 1)
6242 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6243 /* Otherwise we use !X -> X ^ 1. */
6244 else
6245 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6246 build_int_cst (type, 1));
6247 changed = true;
6248 break;
6249 }
5fbcc0ed 6250 }
819ec64c
RB
6251
6252 unsigned old_num_ops = gimple_num_ops (stmt);
6253 tree lhs = gimple_assign_lhs (stmt);
6254 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
6255 if (new_rhs
6256 && !useless_type_conversion_p (TREE_TYPE (lhs),
6257 TREE_TYPE (new_rhs)))
6258 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6259 if (new_rhs
6260 && (!inplace
6261 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6262 {
6263 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6264 changed = true;
6265 }
6266 break;
6267 }
6268
cbdd87d4 6269 case GIMPLE_CALL:
ceeffab0 6270 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
6271 break;
6272
bd422c4a
RG
6273 case GIMPLE_DEBUG:
6274 if (gimple_debug_bind_p (stmt))
6275 {
6276 tree val = gimple_debug_bind_get_value (stmt);
6277 if (val
6278 && REFERENCE_CLASS_P (val))
6279 {
0bf8cd9d 6280 tree tem = maybe_fold_reference (val);
bd422c4a
RG
6281 if (tem)
6282 {
6283 gimple_debug_bind_set_value (stmt, tem);
6284 changed = true;
6285 }
6286 }
3e888a5e
RG
6287 else if (val
6288 && TREE_CODE (val) == ADDR_EXPR)
6289 {
6290 tree ref = TREE_OPERAND (val, 0);
0bf8cd9d 6291 tree tem = maybe_fold_reference (ref);
3e888a5e
RG
6292 if (tem)
6293 {
6294 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6295 gimple_debug_bind_set_value (stmt, tem);
6296 changed = true;
6297 }
6298 }
bd422c4a
RG
6299 }
6300 break;
6301
cfe3d653
PK
6302 case GIMPLE_RETURN:
6303 {
6304 greturn *ret_stmt = as_a<greturn *> (stmt);
6305 tree ret = gimple_return_retval(ret_stmt);
6306
6307 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6308 {
6309 tree val = valueize (ret);
1af928db
RB
6310 if (val && val != ret
6311 && may_propagate_copy (ret, val))
cfe3d653
PK
6312 {
6313 gimple_return_set_retval (ret_stmt, val);
6314 changed = true;
6315 }
6316 }
6317 }
6318 break;
6319
cbdd87d4
RG
6320 default:;
6321 }
6322
6323 stmt = gsi_stmt (*gsi);
6324
a8b85ce9 6325 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
6326 return changed;
6327}
6328
e0ee10ed
RB
6329/* Valueziation callback that ends up not following SSA edges. */
6330
6331tree
6332no_follow_ssa_edges (tree)
6333{
6334 return NULL_TREE;
6335}
6336
45cc9f96
RB
6337/* Valueization callback that ends up following single-use SSA edges only. */
6338
6339tree
6340follow_single_use_edges (tree val)
6341{
6342 if (TREE_CODE (val) == SSA_NAME
6343 && !has_single_use (val))
6344 return NULL_TREE;
6345 return val;
6346}
6347
c566cc9f
RS
6348/* Valueization callback that follows all SSA edges. */
6349
6350tree
6351follow_all_ssa_edges (tree val)
6352{
6353 return val;
6354}
6355
cbdd87d4
RG
6356/* Fold the statement pointed to by GSI. In some cases, this function may
6357 replace the whole statement with a new one. Returns true iff folding
6358 makes any changes.
6359 The statement pointed to by GSI should be in valid gimple form but may
6360 be in unfolded state as resulting from for example constant propagation
6361 which can produce *&x = 0. */
6362
6363bool
6364fold_stmt (gimple_stmt_iterator *gsi)
6365{
e0ee10ed
RB
6366 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6367}
6368
6369bool
6370fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6371{
6372 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
6373}
6374
59401b92 6375/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
6376 *&x created by constant propagation are handled. The statement cannot
6377 be replaced with a new one. Return true if the statement was
6378 changed, false otherwise.
59401b92 6379 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
6380 be in unfolded state as resulting from for example constant propagation
6381 which can produce *&x = 0. */
6382
6383bool
59401b92 6384fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 6385{
355fe088 6386 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 6387 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 6388 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
6389 return changed;
6390}
6391
e89065a1
SL
6392/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6393 if EXPR is null or we don't know how.
6394 If non-null, the result always has boolean type. */
6395
6396static tree
6397canonicalize_bool (tree expr, bool invert)
6398{
6399 if (!expr)
6400 return NULL_TREE;
6401 else if (invert)
6402 {
6403 if (integer_nonzerop (expr))
6404 return boolean_false_node;
6405 else if (integer_zerop (expr))
6406 return boolean_true_node;
6407 else if (TREE_CODE (expr) == SSA_NAME)
6408 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6409 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6410 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6411 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6412 boolean_type_node,
6413 TREE_OPERAND (expr, 0),
6414 TREE_OPERAND (expr, 1));
6415 else
6416 return NULL_TREE;
6417 }
6418 else
6419 {
6420 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6421 return expr;
6422 if (integer_nonzerop (expr))
6423 return boolean_true_node;
6424 else if (integer_zerop (expr))
6425 return boolean_false_node;
6426 else if (TREE_CODE (expr) == SSA_NAME)
6427 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6428 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6429 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6430 return fold_build2 (TREE_CODE (expr),
6431 boolean_type_node,
6432 TREE_OPERAND (expr, 0),
6433 TREE_OPERAND (expr, 1));
6434 else
6435 return NULL_TREE;
6436 }
6437}
6438
6439/* Check to see if a boolean expression EXPR is logically equivalent to the
6440 comparison (OP1 CODE OP2). Check for various identities involving
6441 SSA_NAMEs. */
6442
6443static bool
6444same_bool_comparison_p (const_tree expr, enum tree_code code,
6445 const_tree op1, const_tree op2)
6446{
355fe088 6447 gimple *s;
e89065a1
SL
6448
6449 /* The obvious case. */
6450 if (TREE_CODE (expr) == code
6451 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6452 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6453 return true;
6454
6455 /* Check for comparing (name, name != 0) and the case where expr
6456 is an SSA_NAME with a definition matching the comparison. */
6457 if (TREE_CODE (expr) == SSA_NAME
6458 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6459 {
6460 if (operand_equal_p (expr, op1, 0))
6461 return ((code == NE_EXPR && integer_zerop (op2))
6462 || (code == EQ_EXPR && integer_nonzerop (op2)));
6463 s = SSA_NAME_DEF_STMT (expr);
6464 if (is_gimple_assign (s)
6465 && gimple_assign_rhs_code (s) == code
6466 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6467 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6468 return true;
6469 }
6470
6471 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6472 of name is a comparison, recurse. */
6473 if (TREE_CODE (op1) == SSA_NAME
6474 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6475 {
6476 s = SSA_NAME_DEF_STMT (op1);
6477 if (is_gimple_assign (s)
6478 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6479 {
6480 enum tree_code c = gimple_assign_rhs_code (s);
6481 if ((c == NE_EXPR && integer_zerop (op2))
6482 || (c == EQ_EXPR && integer_nonzerop (op2)))
6483 return same_bool_comparison_p (expr, c,
6484 gimple_assign_rhs1 (s),
6485 gimple_assign_rhs2 (s));
6486 if ((c == EQ_EXPR && integer_zerop (op2))
6487 || (c == NE_EXPR && integer_nonzerop (op2)))
6488 return same_bool_comparison_p (expr,
6489 invert_tree_comparison (c, false),
6490 gimple_assign_rhs1 (s),
6491 gimple_assign_rhs2 (s));
6492 }
6493 }
6494 return false;
6495}
6496
6497/* Check to see if two boolean expressions OP1 and OP2 are logically
6498 equivalent. */
6499
6500static bool
6501same_bool_result_p (const_tree op1, const_tree op2)
6502{
6503 /* Simple cases first. */
6504 if (operand_equal_p (op1, op2, 0))
6505 return true;
6506
6507 /* Check the cases where at least one of the operands is a comparison.
6508 These are a bit smarter than operand_equal_p in that they apply some
6509 identifies on SSA_NAMEs. */
98209db3 6510 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
6511 && same_bool_comparison_p (op1, TREE_CODE (op2),
6512 TREE_OPERAND (op2, 0),
6513 TREE_OPERAND (op2, 1)))
6514 return true;
98209db3 6515 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
6516 && same_bool_comparison_p (op2, TREE_CODE (op1),
6517 TREE_OPERAND (op1, 0),
6518 TREE_OPERAND (op1, 1)))
6519 return true;
6520
6521 /* Default case. */
6522 return false;
6523}
6524
6525/* Forward declarations for some mutually recursive functions. */
6526
6527static tree
5f487a34 6528and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6529 enum tree_code code2, tree op2a, tree op2b);
6530static tree
5f487a34 6531and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6532 enum tree_code code2, tree op2a, tree op2b);
6533static tree
5f487a34 6534and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6535 enum tree_code code2, tree op2a, tree op2b);
6536static tree
5f487a34 6537or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6538 enum tree_code code2, tree op2a, tree op2b);
6539static tree
5f487a34 6540or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
6541 enum tree_code code2, tree op2a, tree op2b);
6542static tree
5f487a34 6543or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
6544 enum tree_code code2, tree op2a, tree op2b);
6545
6546/* Helper function for and_comparisons_1: try to simplify the AND of the
6547 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6548 If INVERT is true, invert the value of the VAR before doing the AND.
6549 Return NULL_EXPR if we can't simplify this to a single expression. */
6550
6551static tree
5f487a34 6552and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6553 enum tree_code code2, tree op2a, tree op2b)
6554{
6555 tree t;
355fe088 6556 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6557
6558 /* We can only deal with variables whose definitions are assignments. */
6559 if (!is_gimple_assign (stmt))
6560 return NULL_TREE;
6561
6562 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6563 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6564 Then we only have to consider the simpler non-inverted cases. */
6565 if (invert)
5f487a34 6566 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
6567 invert_tree_comparison (code2, false),
6568 op2a, op2b);
6569 else
5f487a34 6570 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6571 return canonicalize_bool (t, invert);
6572}
6573
6574/* Try to simplify the AND of the ssa variable defined by the assignment
6575 STMT with the comparison specified by (OP2A CODE2 OP2B).
6576 Return NULL_EXPR if we can't simplify this to a single expression. */
6577
6578static tree
5f487a34 6579and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6580 enum tree_code code2, tree op2a, tree op2b)
6581{
6582 tree var = gimple_assign_lhs (stmt);
6583 tree true_test_var = NULL_TREE;
6584 tree false_test_var = NULL_TREE;
6585 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6586
6587 /* Check for identities like (var AND (var == 0)) => false. */
6588 if (TREE_CODE (op2a) == SSA_NAME
6589 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6590 {
6591 if ((code2 == NE_EXPR && integer_zerop (op2b))
6592 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6593 {
6594 true_test_var = op2a;
6595 if (var == true_test_var)
6596 return var;
6597 }
6598 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6599 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6600 {
6601 false_test_var = op2a;
6602 if (var == false_test_var)
6603 return boolean_false_node;
6604 }
6605 }
6606
6607 /* If the definition is a comparison, recurse on it. */
6608 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6609 {
5f487a34 6610 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
6611 gimple_assign_rhs1 (stmt),
6612 gimple_assign_rhs2 (stmt),
6613 code2,
6614 op2a,
6615 op2b);
6616 if (t)
6617 return t;
6618 }
6619
6620 /* If the definition is an AND or OR expression, we may be able to
6621 simplify by reassociating. */
eb9820c0
KT
6622 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6623 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6624 {
6625 tree inner1 = gimple_assign_rhs1 (stmt);
6626 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6627 gimple *s;
e89065a1
SL
6628 tree t;
6629 tree partial = NULL_TREE;
eb9820c0 6630 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
6631
6632 /* Check for boolean identities that don't require recursive examination
6633 of inner1/inner2:
6634 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6635 inner1 AND (inner1 OR inner2) => inner1
6636 !inner1 AND (inner1 AND inner2) => false
6637 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6638 Likewise for similar cases involving inner2. */
6639 if (inner1 == true_test_var)
6640 return (is_and ? var : inner1);
6641 else if (inner2 == true_test_var)
6642 return (is_and ? var : inner2);
6643 else if (inner1 == false_test_var)
6644 return (is_and
6645 ? boolean_false_node
5f487a34
LJH
6646 : and_var_with_comparison (type, inner2, false, code2, op2a,
6647 op2b));
e89065a1
SL
6648 else if (inner2 == false_test_var)
6649 return (is_and
6650 ? boolean_false_node
5f487a34
LJH
6651 : and_var_with_comparison (type, inner1, false, code2, op2a,
6652 op2b));
e89065a1
SL
6653
6654 /* Next, redistribute/reassociate the AND across the inner tests.
6655 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6656 if (TREE_CODE (inner1) == SSA_NAME
6657 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6658 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6659 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6660 gimple_assign_rhs1 (s),
6661 gimple_assign_rhs2 (s),
6662 code2, op2a, op2b)))
6663 {
6664 /* Handle the AND case, where we are reassociating:
6665 (inner1 AND inner2) AND (op2a code2 op2b)
6666 => (t AND inner2)
6667 If the partial result t is a constant, we win. Otherwise
6668 continue on to try reassociating with the other inner test. */
6669 if (is_and)
6670 {
6671 if (integer_onep (t))
6672 return inner2;
6673 else if (integer_zerop (t))
6674 return boolean_false_node;
6675 }
6676
6677 /* Handle the OR case, where we are redistributing:
6678 (inner1 OR inner2) AND (op2a code2 op2b)
6679 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
6680 else if (integer_onep (t))
6681 return boolean_true_node;
6682
6683 /* Save partial result for later. */
6684 partial = t;
e89065a1
SL
6685 }
6686
6687 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6688 if (TREE_CODE (inner2) == SSA_NAME
6689 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6690 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6691 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6692 gimple_assign_rhs1 (s),
6693 gimple_assign_rhs2 (s),
6694 code2, op2a, op2b)))
6695 {
6696 /* Handle the AND case, where we are reassociating:
6697 (inner1 AND inner2) AND (op2a code2 op2b)
6698 => (inner1 AND t) */
6699 if (is_and)
6700 {
6701 if (integer_onep (t))
6702 return inner1;
6703 else if (integer_zerop (t))
6704 return boolean_false_node;
8236c8eb
JJ
6705 /* If both are the same, we can apply the identity
6706 (x AND x) == x. */
6707 else if (partial && same_bool_result_p (t, partial))
6708 return t;
e89065a1
SL
6709 }
6710
6711 /* Handle the OR case. where we are redistributing:
6712 (inner1 OR inner2) AND (op2a code2 op2b)
6713 => (t OR (inner1 AND (op2a code2 op2b)))
6714 => (t OR partial) */
6715 else
6716 {
6717 if (integer_onep (t))
6718 return boolean_true_node;
6719 else if (partial)
6720 {
6721 /* We already got a simplification for the other
6722 operand to the redistributed OR expression. The
6723 interesting case is when at least one is false.
6724 Or, if both are the same, we can apply the identity
6725 (x OR x) == x. */
6726 if (integer_zerop (partial))
6727 return t;
6728 else if (integer_zerop (t))
6729 return partial;
6730 else if (same_bool_result_p (t, partial))
6731 return t;
6732 }
6733 }
6734 }
6735 }
6736 return NULL_TREE;
6737}
6738
6739/* Try to simplify the AND of two comparisons defined by
6740 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6741 If this can be done without constructing an intermediate value,
6742 return the resulting tree; otherwise NULL_TREE is returned.
6743 This function is deliberately asymmetric as it recurses on SSA_DEFs
6744 in the first comparison but not the second. */
6745
6746static tree
5f487a34 6747and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6748 enum tree_code code2, tree op2a, tree op2b)
6749{
ae22ac3c 6750 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6751
e89065a1
SL
6752 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6753 if (operand_equal_p (op1a, op2a, 0)
6754 && operand_equal_p (op1b, op2b, 0))
6755 {
eb9820c0 6756 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6757 tree t = combine_comparisons (UNKNOWN_LOCATION,
6758 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 6759 truth_type, op1a, op1b);
e89065a1
SL
6760 if (t)
6761 return t;
6762 }
6763
6764 /* Likewise the swapped case of the above. */
6765 if (operand_equal_p (op1a, op2b, 0)
6766 && operand_equal_p (op1b, op2a, 0))
6767 {
eb9820c0 6768 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6769 tree t = combine_comparisons (UNKNOWN_LOCATION,
6770 TRUTH_ANDIF_EXPR, code1,
6771 swap_tree_comparison (code2),
31ed6226 6772 truth_type, op1a, op1b);
e89065a1
SL
6773 if (t)
6774 return t;
6775 }
6776
e89065a1
SL
6777 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6778 NAME's definition is a truth value. See if there are any simplifications
6779 that can be done against the NAME's definition. */
6780 if (TREE_CODE (op1a) == SSA_NAME
6781 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6782 && (integer_zerop (op1b) || integer_onep (op1b)))
6783 {
6784 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6785 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6786 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6787 switch (gimple_code (stmt))
6788 {
6789 case GIMPLE_ASSIGN:
6790 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6791 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6792 op2b);
e89065a1
SL
6793
6794 case GIMPLE_PHI:
6795 /* If every argument to the PHI produces the same result when
6796 ANDed with the second comparison, we win.
6797 Do not do this unless the type is bool since we need a bool
6798 result here anyway. */
6799 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6800 {
6801 tree result = NULL_TREE;
6802 unsigned i;
6803 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6804 {
6805 tree arg = gimple_phi_arg_def (stmt, i);
6806
6807 /* If this PHI has itself as an argument, ignore it.
6808 If all the other args produce the same result,
6809 we're still OK. */
6810 if (arg == gimple_phi_result (stmt))
6811 continue;
6812 else if (TREE_CODE (arg) == INTEGER_CST)
6813 {
6814 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6815 {
6816 if (!result)
6817 result = boolean_false_node;
6818 else if (!integer_zerop (result))
6819 return NULL_TREE;
6820 }
6821 else if (!result)
6822 result = fold_build2 (code2, boolean_type_node,
6823 op2a, op2b);
6824 else if (!same_bool_comparison_p (result,
6825 code2, op2a, op2b))
6826 return NULL_TREE;
6827 }
0e8b84ec
JJ
6828 else if (TREE_CODE (arg) == SSA_NAME
6829 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6830 {
6c66f733 6831 tree temp;
355fe088 6832 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6833 /* In simple cases we can look through PHI nodes,
6834 but we have to be careful with loops.
6835 See PR49073. */
6836 if (! dom_info_available_p (CDI_DOMINATORS)
6837 || gimple_bb (def_stmt) == gimple_bb (stmt)
6838 || dominated_by_p (CDI_DOMINATORS,
6839 gimple_bb (def_stmt),
6840 gimple_bb (stmt)))
6841 return NULL_TREE;
5f487a34 6842 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 6843 op2a, op2b);
e89065a1
SL
6844 if (!temp)
6845 return NULL_TREE;
6846 else if (!result)
6847 result = temp;
6848 else if (!same_bool_result_p (result, temp))
6849 return NULL_TREE;
6850 }
6851 else
6852 return NULL_TREE;
6853 }
6854 return result;
6855 }
6856
6857 default:
6858 break;
6859 }
6860 }
6861 return NULL_TREE;
6862}
6863
5f487a34
LJH
6864/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6865 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6866 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6867 simplify this to a single expression. As we are going to lower the cost
6868 of building SSA names / gimple stmts significantly, we need to allocate
6869 them ont the stack. This will cause the code to be a bit ugly. */
6870
6871static tree
6872maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6873 enum tree_code code1,
6874 tree op1a, tree op1b,
6875 enum tree_code code2, tree op2a,
6876 tree op2b)
6877{
6878 /* Allocate gimple stmt1 on the stack. */
6879 gassign *stmt1
6880 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6881 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6882 gimple_assign_set_rhs_code (stmt1, code1);
6883 gimple_assign_set_rhs1 (stmt1, op1a);
6884 gimple_assign_set_rhs2 (stmt1, op1b);
6885
6886 /* Allocate gimple stmt2 on the stack. */
6887 gassign *stmt2
6888 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6889 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6890 gimple_assign_set_rhs_code (stmt2, code2);
6891 gimple_assign_set_rhs1 (stmt2, op2a);
6892 gimple_assign_set_rhs2 (stmt2, op2b);
6893
6894 /* Allocate SSA names(lhs1) on the stack. */
6895 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6896 memset (lhs1, 0, sizeof (tree_ssa_name));
6897 TREE_SET_CODE (lhs1, SSA_NAME);
6898 TREE_TYPE (lhs1) = type;
6899 init_ssa_name_imm_use (lhs1);
6900
6901 /* Allocate SSA names(lhs2) on the stack. */
6902 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6903 memset (lhs2, 0, sizeof (tree_ssa_name));
6904 TREE_SET_CODE (lhs2, SSA_NAME);
6905 TREE_TYPE (lhs2) = type;
6906 init_ssa_name_imm_use (lhs2);
6907
6908 gimple_assign_set_lhs (stmt1, lhs1);
6909 gimple_assign_set_lhs (stmt2, lhs2);
6910
6911 gimple_match_op op (gimple_match_cond::UNCOND, code,
6912 type, gimple_assign_lhs (stmt1),
6913 gimple_assign_lhs (stmt2));
6914 if (op.resimplify (NULL, follow_all_ssa_edges))
6915 {
6916 if (gimple_simplified_result_is_gimple_val (&op))
6917 {
6918 tree res = op.ops[0];
6919 if (res == lhs1)
6920 return build2 (code1, type, op1a, op1b);
6921 else if (res == lhs2)
6922 return build2 (code2, type, op2a, op2b);
6923 else
6924 return res;
6925 }
ae9c3507
ML
6926 else if (op.code.is_tree_code ()
6927 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6928 {
6929 tree op0 = op.ops[0];
6930 tree op1 = op.ops[1];
6931 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6932 return NULL_TREE; /* not simple */
6933
6934 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6935 }
5f487a34
LJH
6936 }
6937
6938 return NULL_TREE;
6939}
6940
e89065a1
SL
6941/* Try to simplify the AND of two comparisons, specified by
6942 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6943 If this can be simplified to a single expression (without requiring
6944 introducing more SSA variables to hold intermediate values),
6945 return the resulting tree. Otherwise return NULL_TREE.
6946 If the result expression is non-null, it has boolean type. */
6947
6948tree
5f487a34
LJH
6949maybe_fold_and_comparisons (tree type,
6950 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6951 enum tree_code code2, tree op2a, tree op2b)
6952{
5f487a34 6953 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6954 return t;
5f487a34
LJH
6955
6956 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6957 return t;
6958
6959 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6960 op1a, op1b, code2, op2a,
6961 op2b))
6962 return t;
6963
6964 return NULL_TREE;
e89065a1
SL
6965}
6966
6967/* Helper function for or_comparisons_1: try to simplify the OR of the
6968 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6969 If INVERT is true, invert the value of VAR before doing the OR.
6970 Return NULL_EXPR if we can't simplify this to a single expression. */
6971
6972static tree
5f487a34 6973or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6974 enum tree_code code2, tree op2a, tree op2b)
6975{
6976 tree t;
355fe088 6977 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6978
6979 /* We can only deal with variables whose definitions are assignments. */
6980 if (!is_gimple_assign (stmt))
6981 return NULL_TREE;
6982
6983 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6984 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6985 Then we only have to consider the simpler non-inverted cases. */
6986 if (invert)
5f487a34 6987 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
6988 invert_tree_comparison (code2, false),
6989 op2a, op2b);
6990 else
5f487a34 6991 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6992 return canonicalize_bool (t, invert);
6993}
6994
6995/* Try to simplify the OR of the ssa variable defined by the assignment
6996 STMT with the comparison specified by (OP2A CODE2 OP2B).
6997 Return NULL_EXPR if we can't simplify this to a single expression. */
6998
6999static tree
5f487a34 7000or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
7001 enum tree_code code2, tree op2a, tree op2b)
7002{
7003 tree var = gimple_assign_lhs (stmt);
7004 tree true_test_var = NULL_TREE;
7005 tree false_test_var = NULL_TREE;
7006 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7007
7008 /* Check for identities like (var OR (var != 0)) => true . */
7009 if (TREE_CODE (op2a) == SSA_NAME
7010 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7011 {
7012 if ((code2 == NE_EXPR && integer_zerop (op2b))
7013 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7014 {
7015 true_test_var = op2a;
7016 if (var == true_test_var)
7017 return var;
7018 }
7019 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7020 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7021 {
7022 false_test_var = op2a;
7023 if (var == false_test_var)
7024 return boolean_true_node;
7025 }
7026 }
7027
7028 /* If the definition is a comparison, recurse on it. */
7029 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7030 {
5f487a34 7031 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
7032 gimple_assign_rhs1 (stmt),
7033 gimple_assign_rhs2 (stmt),
7034 code2,
7035 op2a,
7036 op2b);
7037 if (t)
7038 return t;
7039 }
7040
7041 /* If the definition is an AND or OR expression, we may be able to
7042 simplify by reassociating. */
eb9820c0
KT
7043 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7044 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
7045 {
7046 tree inner1 = gimple_assign_rhs1 (stmt);
7047 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 7048 gimple *s;
e89065a1
SL
7049 tree t;
7050 tree partial = NULL_TREE;
eb9820c0 7051 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
7052
7053 /* Check for boolean identities that don't require recursive examination
7054 of inner1/inner2:
7055 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7056 inner1 OR (inner1 AND inner2) => inner1
7057 !inner1 OR (inner1 OR inner2) => true
7058 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7059 */
7060 if (inner1 == true_test_var)
7061 return (is_or ? var : inner1);
7062 else if (inner2 == true_test_var)
7063 return (is_or ? var : inner2);
7064 else if (inner1 == false_test_var)
7065 return (is_or
7066 ? boolean_true_node
5f487a34
LJH
7067 : or_var_with_comparison (type, inner2, false, code2, op2a,
7068 op2b));
e89065a1
SL
7069 else if (inner2 == false_test_var)
7070 return (is_or
7071 ? boolean_true_node
5f487a34
LJH
7072 : or_var_with_comparison (type, inner1, false, code2, op2a,
7073 op2b));
e89065a1
SL
7074
7075 /* Next, redistribute/reassociate the OR across the inner tests.
7076 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7077 if (TREE_CODE (inner1) == SSA_NAME
7078 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7079 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7080 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7081 gimple_assign_rhs1 (s),
7082 gimple_assign_rhs2 (s),
7083 code2, op2a, op2b)))
7084 {
7085 /* Handle the OR case, where we are reassociating:
7086 (inner1 OR inner2) OR (op2a code2 op2b)
7087 => (t OR inner2)
7088 If the partial result t is a constant, we win. Otherwise
7089 continue on to try reassociating with the other inner test. */
8236c8eb 7090 if (is_or)
e89065a1
SL
7091 {
7092 if (integer_onep (t))
7093 return boolean_true_node;
7094 else if (integer_zerop (t))
7095 return inner2;
7096 }
7097
7098 /* Handle the AND case, where we are redistributing:
7099 (inner1 AND inner2) OR (op2a code2 op2b)
7100 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
7101 else if (integer_zerop (t))
7102 return boolean_false_node;
7103
7104 /* Save partial result for later. */
7105 partial = t;
e89065a1
SL
7106 }
7107
7108 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7109 if (TREE_CODE (inner2) == SSA_NAME
7110 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7111 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7112 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7113 gimple_assign_rhs1 (s),
7114 gimple_assign_rhs2 (s),
7115 code2, op2a, op2b)))
7116 {
7117 /* Handle the OR case, where we are reassociating:
7118 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
7119 => (inner1 OR t)
7120 => (t OR partial) */
7121 if (is_or)
e89065a1
SL
7122 {
7123 if (integer_zerop (t))
7124 return inner1;
7125 else if (integer_onep (t))
7126 return boolean_true_node;
8236c8eb
JJ
7127 /* If both are the same, we can apply the identity
7128 (x OR x) == x. */
7129 else if (partial && same_bool_result_p (t, partial))
7130 return t;
e89065a1
SL
7131 }
7132
7133 /* Handle the AND case, where we are redistributing:
7134 (inner1 AND inner2) OR (op2a code2 op2b)
7135 => (t AND (inner1 OR (op2a code2 op2b)))
7136 => (t AND partial) */
7137 else
7138 {
7139 if (integer_zerop (t))
7140 return boolean_false_node;
7141 else if (partial)
7142 {
7143 /* We already got a simplification for the other
7144 operand to the redistributed AND expression. The
7145 interesting case is when at least one is true.
7146 Or, if both are the same, we can apply the identity
8236c8eb 7147 (x AND x) == x. */
e89065a1
SL
7148 if (integer_onep (partial))
7149 return t;
7150 else if (integer_onep (t))
7151 return partial;
7152 else if (same_bool_result_p (t, partial))
8236c8eb 7153 return t;
e89065a1
SL
7154 }
7155 }
7156 }
7157 }
7158 return NULL_TREE;
7159}
7160
7161/* Try to simplify the OR of two comparisons defined by
7162 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7163 If this can be done without constructing an intermediate value,
7164 return the resulting tree; otherwise NULL_TREE is returned.
7165 This function is deliberately asymmetric as it recurses on SSA_DEFs
7166 in the first comparison but not the second. */
7167
7168static tree
5f487a34 7169or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7170 enum tree_code code2, tree op2a, tree op2b)
7171{
ae22ac3c 7172 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 7173
e89065a1
SL
7174 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7175 if (operand_equal_p (op1a, op2a, 0)
7176 && operand_equal_p (op1b, op2b, 0))
7177 {
eb9820c0 7178 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7179 tree t = combine_comparisons (UNKNOWN_LOCATION,
7180 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 7181 truth_type, op1a, op1b);
e89065a1
SL
7182 if (t)
7183 return t;
7184 }
7185
7186 /* Likewise the swapped case of the above. */
7187 if (operand_equal_p (op1a, op2b, 0)
7188 && operand_equal_p (op1b, op2a, 0))
7189 {
eb9820c0 7190 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7191 tree t = combine_comparisons (UNKNOWN_LOCATION,
7192 TRUTH_ORIF_EXPR, code1,
7193 swap_tree_comparison (code2),
31ed6226 7194 truth_type, op1a, op1b);
e89065a1
SL
7195 if (t)
7196 return t;
7197 }
7198
e89065a1
SL
7199 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7200 NAME's definition is a truth value. See if there are any simplifications
7201 that can be done against the NAME's definition. */
7202 if (TREE_CODE (op1a) == SSA_NAME
7203 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7204 && (integer_zerop (op1b) || integer_onep (op1b)))
7205 {
7206 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7207 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 7208 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
7209 switch (gimple_code (stmt))
7210 {
7211 case GIMPLE_ASSIGN:
7212 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
7213 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7214 op2b);
e89065a1
SL
7215
7216 case GIMPLE_PHI:
7217 /* If every argument to the PHI produces the same result when
7218 ORed with the second comparison, we win.
7219 Do not do this unless the type is bool since we need a bool
7220 result here anyway. */
7221 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7222 {
7223 tree result = NULL_TREE;
7224 unsigned i;
7225 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7226 {
7227 tree arg = gimple_phi_arg_def (stmt, i);
7228
7229 /* If this PHI has itself as an argument, ignore it.
7230 If all the other args produce the same result,
7231 we're still OK. */
7232 if (arg == gimple_phi_result (stmt))
7233 continue;
7234 else if (TREE_CODE (arg) == INTEGER_CST)
7235 {
7236 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7237 {
7238 if (!result)
7239 result = boolean_true_node;
7240 else if (!integer_onep (result))
7241 return NULL_TREE;
7242 }
7243 else if (!result)
7244 result = fold_build2 (code2, boolean_type_node,
7245 op2a, op2b);
7246 else if (!same_bool_comparison_p (result,
7247 code2, op2a, op2b))
7248 return NULL_TREE;
7249 }
0e8b84ec
JJ
7250 else if (TREE_CODE (arg) == SSA_NAME
7251 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 7252 {
6c66f733 7253 tree temp;
355fe088 7254 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
7255 /* In simple cases we can look through PHI nodes,
7256 but we have to be careful with loops.
7257 See PR49073. */
7258 if (! dom_info_available_p (CDI_DOMINATORS)
7259 || gimple_bb (def_stmt) == gimple_bb (stmt)
7260 || dominated_by_p (CDI_DOMINATORS,
7261 gimple_bb (def_stmt),
7262 gimple_bb (stmt)))
7263 return NULL_TREE;
5f487a34 7264 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 7265 op2a, op2b);
e89065a1
SL
7266 if (!temp)
7267 return NULL_TREE;
7268 else if (!result)
7269 result = temp;
7270 else if (!same_bool_result_p (result, temp))
7271 return NULL_TREE;
7272 }
7273 else
7274 return NULL_TREE;
7275 }
7276 return result;
7277 }
7278
7279 default:
7280 break;
7281 }
7282 }
7283 return NULL_TREE;
7284}
7285
7286/* Try to simplify the OR of two comparisons, specified by
7287 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7288 If this can be simplified to a single expression (without requiring
7289 introducing more SSA variables to hold intermediate values),
7290 return the resulting tree. Otherwise return NULL_TREE.
7291 If the result expression is non-null, it has boolean type. */
7292
7293tree
5f487a34
LJH
7294maybe_fold_or_comparisons (tree type,
7295 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7296 enum tree_code code2, tree op2a, tree op2b)
7297{
5f487a34 7298 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 7299 return t;
cfef45c8 7300
5f487a34
LJH
7301 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7302 return t;
7303
7304 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7305 op1a, op1b, code2, op2a,
7306 op2b))
7307 return t;
7308
7309 return NULL_TREE;
7310}
cfef45c8
RG
7311
7312/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7313
7314 Either NULL_TREE, a simplified but non-constant or a constant
7315 is returned.
7316
7317 ??? This should go into a gimple-fold-inline.h file to be eventually
7318 privatized with the single valueize function used in the various TUs
7319 to avoid the indirect function call overhead. */
7320
7321tree
355fe088 7322gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 7323 tree (*gvalueize) (tree))
cfef45c8 7324{
5d75ad95 7325 gimple_match_op res_op;
45cc9f96
RB
7326 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7327 edges if there are intermediate VARYING defs. For this reason
7328 do not follow SSA edges here even though SCCVN can technically
7329 just deal fine with that. */
5d75ad95 7330 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 7331 {
34050b6b 7332 tree res = NULL_TREE;
5d75ad95
RS
7333 if (gimple_simplified_result_is_gimple_val (&res_op))
7334 res = res_op.ops[0];
34050b6b 7335 else if (mprts_hook)
5d75ad95 7336 res = mprts_hook (&res_op);
34050b6b 7337 if (res)
45cc9f96 7338 {
34050b6b
RB
7339 if (dump_file && dump_flags & TDF_DETAILS)
7340 {
7341 fprintf (dump_file, "Match-and-simplified ");
7342 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7343 fprintf (dump_file, " to ");
ef6cb4c7 7344 print_generic_expr (dump_file, res);
34050b6b
RB
7345 fprintf (dump_file, "\n");
7346 }
7347 return res;
45cc9f96 7348 }
45cc9f96
RB
7349 }
7350
cfef45c8
RG
7351 location_t loc = gimple_location (stmt);
7352 switch (gimple_code (stmt))
7353 {
7354 case GIMPLE_ASSIGN:
7355 {
7356 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7357
7358 switch (get_gimple_rhs_class (subcode))
7359 {
7360 case GIMPLE_SINGLE_RHS:
7361 {
7362 tree rhs = gimple_assign_rhs1 (stmt);
7363 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7364
7365 if (TREE_CODE (rhs) == SSA_NAME)
7366 {
7367 /* If the RHS is an SSA_NAME, return its known constant value,
7368 if any. */
7369 return (*valueize) (rhs);
7370 }
7371 /* Handle propagating invariant addresses into address
7372 operations. */
7373 else if (TREE_CODE (rhs) == ADDR_EXPR
7374 && !is_gimple_min_invariant (rhs))
7375 {
a90c8804 7376 poly_int64 offset = 0;
cfef45c8
RG
7377 tree base;
7378 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7379 &offset,
7380 valueize);
7381 if (base
7382 && (CONSTANT_CLASS_P (base)
7383 || decl_address_invariant_p (base)))
7384 return build_invariant_address (TREE_TYPE (rhs),
7385 base, offset);
7386 }
7387 else if (TREE_CODE (rhs) == CONSTRUCTOR
7388 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
7389 && known_eq (CONSTRUCTOR_NELTS (rhs),
7390 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 7391 {
794e3180
RS
7392 unsigned i, nelts;
7393 tree val;
cfef45c8 7394
928686b1 7395 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 7396 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
7397 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7398 {
7399 val = (*valueize) (val);
7400 if (TREE_CODE (val) == INTEGER_CST
7401 || TREE_CODE (val) == REAL_CST
7402 || TREE_CODE (val) == FIXED_CST)
794e3180 7403 vec.quick_push (val);
cfef45c8
RG
7404 else
7405 return NULL_TREE;
7406 }
7407
5ebaa477 7408 return vec.build ();
cfef45c8 7409 }
bdf37f7a
JH
7410 if (subcode == OBJ_TYPE_REF)
7411 {
7412 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7413 /* If callee is constant, we can fold away the wrapper. */
7414 if (is_gimple_min_invariant (val))
7415 return val;
7416 }
cfef45c8
RG
7417
7418 if (kind == tcc_reference)
7419 {
7420 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7421 || TREE_CODE (rhs) == REALPART_EXPR
7422 || TREE_CODE (rhs) == IMAGPART_EXPR)
7423 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7424 {
7425 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7426 return fold_unary_loc (EXPR_LOCATION (rhs),
7427 TREE_CODE (rhs),
7428 TREE_TYPE (rhs), val);
7429 }
7430 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7431 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7432 {
7433 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7434 return fold_ternary_loc (EXPR_LOCATION (rhs),
7435 TREE_CODE (rhs),
7436 TREE_TYPE (rhs), val,
7437 TREE_OPERAND (rhs, 1),
7438 TREE_OPERAND (rhs, 2));
7439 }
7440 else if (TREE_CODE (rhs) == MEM_REF
7441 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7442 {
7443 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7444 if (TREE_CODE (val) == ADDR_EXPR
7445 && is_gimple_min_invariant (val))
7446 {
7447 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7448 unshare_expr (val),
7449 TREE_OPERAND (rhs, 1));
7450 if (tem)
7451 rhs = tem;
7452 }
7453 }
7454 return fold_const_aggregate_ref_1 (rhs, valueize);
7455 }
7456 else if (kind == tcc_declaration)
7457 return get_symbol_constant_value (rhs);
7458 return rhs;
7459 }
7460
7461 case GIMPLE_UNARY_RHS:
f3582e54 7462 return NULL_TREE;
cfef45c8
RG
7463
7464 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
7465 /* Translate &x + CST into an invariant form suitable for
7466 further propagation. */
7467 if (subcode == POINTER_PLUS_EXPR)
7468 {
4b1b9e64
RB
7469 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7470 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
7471 if (TREE_CODE (op0) == ADDR_EXPR
7472 && TREE_CODE (op1) == INTEGER_CST)
7473 {
7474 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
7475 return build1_loc
7476 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
7477 fold_build2 (MEM_REF,
7478 TREE_TYPE (TREE_TYPE (op0)),
7479 unshare_expr (op0), off));
7480 }
7481 }
59c20dc7
RB
7482 /* Canonicalize bool != 0 and bool == 0 appearing after
7483 valueization. While gimple_simplify handles this
7484 it can get confused by the ~X == 1 -> X == 0 transform
7485 which we cant reduce to a SSA name or a constant
7486 (and we have no way to tell gimple_simplify to not
7487 consider those transforms in the first place). */
7488 else if (subcode == EQ_EXPR
7489 || subcode == NE_EXPR)
7490 {
7491 tree lhs = gimple_assign_lhs (stmt);
7492 tree op0 = gimple_assign_rhs1 (stmt);
7493 if (useless_type_conversion_p (TREE_TYPE (lhs),
7494 TREE_TYPE (op0)))
7495 {
7496 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7497 op0 = (*valueize) (op0);
8861704d
RB
7498 if (TREE_CODE (op0) == INTEGER_CST)
7499 std::swap (op0, op1);
7500 if (TREE_CODE (op1) == INTEGER_CST
7501 && ((subcode == NE_EXPR && integer_zerop (op1))
7502 || (subcode == EQ_EXPR && integer_onep (op1))))
7503 return op0;
59c20dc7
RB
7504 }
7505 }
4b1b9e64 7506 return NULL_TREE;
cfef45c8
RG
7507
7508 case GIMPLE_TERNARY_RHS:
7509 {
7510 /* Handle ternary operators that can appear in GIMPLE form. */
7511 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7512 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7513 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8 7514 return fold_ternary_loc (loc, subcode,
ce777eae
RB
7515 TREE_TYPE (gimple_assign_lhs (stmt)),
7516 op0, op1, op2);
cfef45c8
RG
7517 }
7518
7519 default:
7520 gcc_unreachable ();
7521 }
7522 }
7523
7524 case GIMPLE_CALL:
7525 {
25583c4f 7526 tree fn;
538dd0b7 7527 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
7528
7529 if (gimple_call_internal_p (stmt))
31e071ae
MP
7530 {
7531 enum tree_code subcode = ERROR_MARK;
7532 switch (gimple_call_internal_fn (stmt))
7533 {
7534 case IFN_UBSAN_CHECK_ADD:
7535 subcode = PLUS_EXPR;
7536 break;
7537 case IFN_UBSAN_CHECK_SUB:
7538 subcode = MINUS_EXPR;
7539 break;
7540 case IFN_UBSAN_CHECK_MUL:
7541 subcode = MULT_EXPR;
7542 break;
68fa96d6
ML
7543 case IFN_BUILTIN_EXPECT:
7544 {
7545 tree arg0 = gimple_call_arg (stmt, 0);
7546 tree op0 = (*valueize) (arg0);
7547 if (TREE_CODE (op0) == INTEGER_CST)
7548 return op0;
7549 return NULL_TREE;
7550 }
31e071ae
MP
7551 default:
7552 return NULL_TREE;
7553 }
368b454d
JJ
7554 tree arg0 = gimple_call_arg (stmt, 0);
7555 tree arg1 = gimple_call_arg (stmt, 1);
7556 tree op0 = (*valueize) (arg0);
7557 tree op1 = (*valueize) (arg1);
31e071ae
MP
7558
7559 if (TREE_CODE (op0) != INTEGER_CST
7560 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
7561 {
7562 switch (subcode)
7563 {
7564 case MULT_EXPR:
7565 /* x * 0 = 0 * x = 0 without overflow. */
7566 if (integer_zerop (op0) || integer_zerop (op1))
7567 return build_zero_cst (TREE_TYPE (arg0));
7568 break;
7569 case MINUS_EXPR:
7570 /* y - y = 0 without overflow. */
7571 if (operand_equal_p (op0, op1, 0))
7572 return build_zero_cst (TREE_TYPE (arg0));
7573 break;
7574 default:
7575 break;
7576 }
7577 }
7578 tree res
7579 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
7580 if (res
7581 && TREE_CODE (res) == INTEGER_CST
7582 && !TREE_OVERFLOW (res))
7583 return res;
7584 return NULL_TREE;
7585 }
25583c4f
RS
7586
7587 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 7588 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 7589 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 7590 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
7591 && gimple_builtin_call_types_compatible_p (stmt,
7592 TREE_OPERAND (fn, 0)))
cfef45c8
RG
7593 {
7594 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 7595 tree retval;
cfef45c8
RG
7596 unsigned i;
7597 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7598 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 7599 retval = fold_builtin_call_array (loc,
538dd0b7 7600 gimple_call_return_type (call_stmt),
cfef45c8 7601 fn, gimple_call_num_args (stmt), args);
cfef45c8 7602 if (retval)
5c944c6c
RB
7603 {
7604 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7605 STRIP_NOPS (retval);
538dd0b7
DM
7606 retval = fold_convert (gimple_call_return_type (call_stmt),
7607 retval);
5c944c6c 7608 }
cfef45c8
RG
7609 return retval;
7610 }
7611 return NULL_TREE;
7612 }
7613
7614 default:
7615 return NULL_TREE;
7616 }
7617}
7618
7619/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7620 Returns NULL_TREE if folding to a constant is not possible, otherwise
7621 returns a constant according to is_gimple_min_invariant. */
7622
7623tree
355fe088 7624gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
7625{
7626 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7627 if (res && is_gimple_min_invariant (res))
7628 return res;
7629 return NULL_TREE;
7630}
7631
7632
7633/* The following set of functions are supposed to fold references using
7634 their constant initializers. */
7635
cfef45c8
RG
7636/* See if we can find constructor defining value of BASE.
7637 When we know the consructor with constant offset (such as
7638 base is array[40] and we do know constructor of array), then
7639 BIT_OFFSET is adjusted accordingly.
7640
7641 As a special case, return error_mark_node when constructor
7642 is not explicitly available, but it is known to be zero
7643 such as 'static const int a;'. */
7644static tree
588db50c 7645get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
7646 tree (*valueize)(tree))
7647{
588db50c 7648 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
7649 bool reverse;
7650
cfef45c8
RG
7651 if (TREE_CODE (base) == MEM_REF)
7652 {
6a5aca53
ML
7653 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7654 if (!boff.to_shwi (bit_offset))
7655 return NULL_TREE;
cfef45c8
RG
7656
7657 if (valueize
7658 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7659 base = valueize (TREE_OPERAND (base, 0));
7660 if (!base || TREE_CODE (base) != ADDR_EXPR)
7661 return NULL_TREE;
7662 base = TREE_OPERAND (base, 0);
7663 }
13e88953
RB
7664 else if (valueize
7665 && TREE_CODE (base) == SSA_NAME)
7666 base = valueize (base);
cfef45c8
RG
7667
7668 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7669 DECL_INITIAL. If BASE is a nested reference into another
7670 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7671 the inner reference. */
7672 switch (TREE_CODE (base))
7673 {
7674 case VAR_DECL:
cfef45c8 7675 case CONST_DECL:
6a6dac52
JH
7676 {
7677 tree init = ctor_for_folding (base);
7678
688010ba 7679 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
7680 NULL means unknown, while error_mark_node is 0. */
7681 if (init == error_mark_node)
7682 return NULL_TREE;
7683 if (!init)
7684 return error_mark_node;
7685 return init;
7686 }
cfef45c8 7687
13e88953
RB
7688 case VIEW_CONVERT_EXPR:
7689 return get_base_constructor (TREE_OPERAND (base, 0),
7690 bit_offset, valueize);
7691
cfef45c8
RG
7692 case ARRAY_REF:
7693 case COMPONENT_REF:
ee45a32d
EB
7694 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7695 &reverse);
588db50c 7696 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
7697 return NULL_TREE;
7698 *bit_offset += bit_offset2;
7699 return get_base_constructor (base, bit_offset, valueize);
7700
cfef45c8
RG
7701 case CONSTRUCTOR:
7702 return base;
7703
7704 default:
13e88953
RB
7705 if (CONSTANT_CLASS_P (base))
7706 return base;
7707
cfef45c8
RG
7708 return NULL_TREE;
7709 }
7710}
7711
35b4d3a6
MS
7712/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7713 to the memory at bit OFFSET. When non-null, TYPE is the expected
7714 type of the reference; otherwise the type of the referenced element
7715 is used instead. When SIZE is zero, attempt to fold a reference to
7716 the entire element which OFFSET refers to. Increment *SUBOFF by
7717 the bit offset of the accessed element. */
cfef45c8
RG
7718
7719static tree
7720fold_array_ctor_reference (tree type, tree ctor,
7721 unsigned HOST_WIDE_INT offset,
c44c2088 7722 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7723 tree from_decl,
7724 unsigned HOST_WIDE_INT *suboff)
cfef45c8 7725{
807e902e
KZ
7726 offset_int low_bound;
7727 offset_int elt_size;
807e902e 7728 offset_int access_index;
6a636014 7729 tree domain_type = NULL_TREE;
cfef45c8
RG
7730 HOST_WIDE_INT inner_offset;
7731
7732 /* Compute low bound and elt size. */
eb8f1123
RG
7733 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7734 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
7735 if (domain_type && TYPE_MIN_VALUE (domain_type))
7736 {
6aa238a1 7737 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7738 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7739 return NULL_TREE;
807e902e 7740 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
7741 }
7742 else
807e902e 7743 low_bound = 0;
6aa238a1 7744 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7745 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7746 return NULL_TREE;
807e902e 7747 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 7748
35b4d3a6 7749 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 7750 access of a multiple of the array element size. Avoid division
6aa238a1
MS
7751 by zero below when ELT_SIZE is zero, such as with the result of
7752 an initializer for a zero-length array or an empty struct. */
7753 if (elt_size == 0
7754 || (type
7755 && (!TYPE_SIZE_UNIT (type)
831e688a 7756 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
7757 return NULL_TREE;
7758
7759 /* Compute the array index we look for. */
807e902e
KZ
7760 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7761 elt_size);
27bcd47c 7762 access_index += low_bound;
cfef45c8
RG
7763
7764 /* And offset within the access. */
27bcd47c 7765 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 7766
3c076c96
JJ
7767 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7768 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
7769 {
7770 /* native_encode_expr constraints. */
7771 if (size > MAX_BITSIZE_MODE_ANY_MODE
7772 || size % BITS_PER_UNIT != 0
3c076c96
JJ
7773 || inner_offset % BITS_PER_UNIT != 0
7774 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
7775 return NULL_TREE;
7776
7777 unsigned ctor_idx;
7778 tree val = get_array_ctor_element_at_index (ctor, access_index,
7779 &ctor_idx);
7780 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7781 return build_zero_cst (type);
7782
7783 /* native-encode adjacent ctor elements. */
7784 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7785 unsigned bufoff = 0;
7786 offset_int index = 0;
7787 offset_int max_index = access_index;
7788 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7789 if (!val)
7790 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7791 else if (!CONSTANT_CLASS_P (val))
7792 return NULL_TREE;
7793 if (!elt->index)
7794 ;
7795 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7796 {
7797 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7798 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7799 }
7800 else
7801 index = max_index = wi::to_offset (elt->index);
7802 index = wi::umax (index, access_index);
7803 do
7804 {
3c076c96
JJ
7805 if (bufoff + elt_sz > sizeof (buf))
7806 elt_sz = sizeof (buf) - bufoff;
7807 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 7808 inner_offset / BITS_PER_UNIT);
3c076c96 7809 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
7810 return NULL_TREE;
7811 inner_offset = 0;
7812 bufoff += len;
7813
7814 access_index += 1;
7815 if (wi::cmpu (access_index, index) == 0)
7816 val = elt->value;
7817 else if (wi::cmpu (access_index, max_index) > 0)
7818 {
7819 ctor_idx++;
7820 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7821 {
7822 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7823 ++max_index;
7824 }
7825 else
7826 {
7827 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7828 index = 0;
7829 max_index = access_index;
7830 if (!elt->index)
7831 ;
7832 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7833 {
7834 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7835 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7836 }
7837 else
7838 index = max_index = wi::to_offset (elt->index);
7839 index = wi::umax (index, access_index);
7840 if (wi::cmpu (access_index, index) == 0)
7841 val = elt->value;
7842 else
7843 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7844 }
7845 }
7846 }
7847 while (bufoff < size / BITS_PER_UNIT);
7848 *suboff += size;
7849 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7850 }
7851
6a636014 7852 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
7853 {
7854 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7855 {
7856 /* For the final reference to the entire accessed element
7857 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7858 may be null) in favor of the type of the element, and set
7859 SIZE to the size of the accessed element. */
7860 inner_offset = 0;
7861 type = TREE_TYPE (val);
6e41c27b 7862 size = elt_sz * BITS_PER_UNIT;
35b4d3a6 7863 }
6e41c27b
RB
7864 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7865 && TREE_CODE (val) == CONSTRUCTOR
7866 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7867 /* If this isn't the last element in the CTOR and a CTOR itself
7868 and it does not cover the whole object we are requesting give up
7869 since we're not set up for combining from multiple CTORs. */
7870 return NULL_TREE;
35b4d3a6 7871
6e41c27b 7872 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
35b4d3a6
MS
7873 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7874 suboff);
7875 }
cfef45c8 7876
35b4d3a6
MS
7877 /* Memory not explicitly mentioned in constructor is 0 (or
7878 the reference is out of range). */
7879 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
7880}
7881
35b4d3a6
MS
7882/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7883 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7884 is the expected type of the reference; otherwise the type of
7885 the referenced member is used instead. When SIZE is zero,
7886 attempt to fold a reference to the entire member which OFFSET
7887 refers to; in this case. Increment *SUBOFF by the bit offset
7888 of the accessed member. */
cfef45c8
RG
7889
7890static tree
7891fold_nonarray_ctor_reference (tree type, tree ctor,
7892 unsigned HOST_WIDE_INT offset,
c44c2088 7893 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7894 tree from_decl,
7895 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
7896{
7897 unsigned HOST_WIDE_INT cnt;
7898 tree cfield, cval;
7899
7900 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7901 cval)
7902 {
7903 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7904 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7905 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
7906
7907 if (!field_size)
7908 {
7909 /* Determine the size of the flexible array member from
7910 the size of the initializer provided for it. */
7911 field_size = TYPE_SIZE (TREE_TYPE (cval));
7912 }
cfef45c8
RG
7913
7914 /* Variable sized objects in static constructors makes no sense,
7915 but field_size can be NULL for flexible array members. */
7916 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7917 && TREE_CODE (byte_offset) == INTEGER_CST
7918 && (field_size != NULL_TREE
7919 ? TREE_CODE (field_size) == INTEGER_CST
7920 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7921
7922 /* Compute bit offset of the field. */
35b4d3a6
MS
7923 offset_int bitoffset
7924 = (wi::to_offset (field_offset)
7925 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 7926 /* Compute bit offset where the field ends. */
35b4d3a6 7927 offset_int bitoffset_end;
cfef45c8 7928 if (field_size != NULL_TREE)
807e902e 7929 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 7930 else
807e902e 7931 bitoffset_end = 0;
cfef45c8 7932
35b4d3a6
MS
7933 /* Compute the bit offset of the end of the desired access.
7934 As a special case, if the size of the desired access is
7935 zero, assume the access is to the entire field (and let
7936 the caller make any necessary adjustments by storing
7937 the actual bounds of the field in FIELDBOUNDS). */
7938 offset_int access_end = offset_int (offset);
7939 if (size)
7940 access_end += size;
7941 else
7942 access_end = bitoffset_end;
b8b2b009 7943
35b4d3a6
MS
7944 /* Is there any overlap between the desired access at
7945 [OFFSET, OFFSET+SIZE) and the offset of the field within
7946 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 7947 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 7948 && (field_size == NULL_TREE
807e902e 7949 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 7950 {
35b4d3a6
MS
7951 *suboff += bitoffset.to_uhwi ();
7952
7953 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7954 {
7955 /* For the final reference to the entire accessed member
7956 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7957 be null) in favor of the type of the member, and set
7958 SIZE to the size of the accessed member. */
7959 offset = bitoffset.to_uhwi ();
7960 type = TREE_TYPE (cval);
7961 size = (bitoffset_end - bitoffset).to_uhwi ();
7962 }
7963
7964 /* We do have overlap. Now see if the field is large enough
7965 to cover the access. Give up for accesses that extend
7966 beyond the end of the object or that span multiple fields. */
807e902e 7967 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 7968 return NULL_TREE;
032c80e9 7969 if (offset < bitoffset)
b8b2b009 7970 return NULL_TREE;
35b4d3a6
MS
7971
7972 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 7973 return fold_ctor_reference (type, cval,
27bcd47c 7974 inner_offset.to_uhwi (), size,
35b4d3a6 7975 from_decl, suboff);
cfef45c8
RG
7976 }
7977 }
14b7950f
MS
7978
7979 if (!type)
7980 return NULL_TREE;
7981
7982 return build_zero_cst (type);
cfef45c8
RG
7983}
7984
35b4d3a6 7985/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 7986 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
7987 is zero, attempt to fold a reference to the entire subobject
7988 which OFFSET refers to. This is used when folding accesses to
7989 string members of aggregates. When non-null, set *SUBOFF to
7990 the bit offset of the accessed subobject. */
cfef45c8 7991
8403c2cf 7992tree
35b4d3a6
MS
7993fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7994 const poly_uint64 &poly_size, tree from_decl,
7995 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
7996{
7997 tree ret;
7998
7999 /* We found the field with exact match. */
35b4d3a6
MS
8000 if (type
8001 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 8002 && known_eq (poly_offset, 0U))
9d60be38 8003 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8004
30acf282
RS
8005 /* The remaining optimizations need a constant size and offset. */
8006 unsigned HOST_WIDE_INT size, offset;
8007 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8008 return NULL_TREE;
8009
cfef45c8
RG
8010 /* We are at the end of walk, see if we can view convert the
8011 result. */
8012 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8013 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
8014 && !compare_tree_int (TYPE_SIZE (type), size)
8015 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 8016 {
9d60be38 8017 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8018 if (ret)
672d9f8e
RB
8019 {
8020 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8021 if (ret)
8022 STRIP_USELESS_TYPE_CONVERSION (ret);
8023 }
cfef45c8
RG
8024 return ret;
8025 }
b2505143
RB
8026 /* For constants and byte-aligned/sized reads try to go through
8027 native_encode/interpret. */
8028 if (CONSTANT_CLASS_P (ctor)
8029 && BITS_PER_UNIT == 8
8030 && offset % BITS_PER_UNIT == 0
ea69031c 8031 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 8032 && size % BITS_PER_UNIT == 0
ea69031c
JJ
8033 && size <= MAX_BITSIZE_MODE_ANY_MODE
8034 && can_native_interpret_type_p (type))
b2505143
RB
8035 {
8036 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
8037 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8038 offset / BITS_PER_UNIT);
8039 if (len > 0)
8040 return native_interpret_expr (type, buf, len);
b2505143 8041 }
cfef45c8
RG
8042 if (TREE_CODE (ctor) == CONSTRUCTOR)
8043 {
35b4d3a6
MS
8044 unsigned HOST_WIDE_INT dummy = 0;
8045 if (!suboff)
8046 suboff = &dummy;
cfef45c8 8047
ea69031c 8048 tree ret;
eb8f1123
RG
8049 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8050 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
8051 ret = fold_array_ctor_reference (type, ctor, offset, size,
8052 from_decl, suboff);
8053 else
8054 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8055 from_decl, suboff);
8056
8057 /* Fall back to native_encode_initializer. Needs to be done
8058 only in the outermost fold_ctor_reference call (because it itself
8059 recurses into CONSTRUCTORs) and doesn't update suboff. */
8060 if (ret == NULL_TREE
8061 && suboff == &dummy
8062 && BITS_PER_UNIT == 8
8063 && offset % BITS_PER_UNIT == 0
8064 && offset / BITS_PER_UNIT <= INT_MAX
8065 && size % BITS_PER_UNIT == 0
8066 && size <= MAX_BITSIZE_MODE_ANY_MODE
8067 && can_native_interpret_type_p (type))
8068 {
8069 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8070 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8071 offset / BITS_PER_UNIT);
8072 if (len > 0)
8073 return native_interpret_expr (type, buf, len);
8074 }
35b4d3a6 8075
ea69031c 8076 return ret;
cfef45c8
RG
8077 }
8078
8079 return NULL_TREE;
8080}
8081
8082/* Return the tree representing the element referenced by T if T is an
8083 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8084 names using VALUEIZE. Return NULL_TREE otherwise. */
8085
8086tree
8087fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8088{
8089 tree ctor, idx, base;
588db50c 8090 poly_int64 offset, size, max_size;
cfef45c8 8091 tree tem;
ee45a32d 8092 bool reverse;
cfef45c8 8093
f8a7df45
RG
8094 if (TREE_THIS_VOLATILE (t))
8095 return NULL_TREE;
8096
3a65ee74 8097 if (DECL_P (t))
cfef45c8
RG
8098 return get_symbol_constant_value (t);
8099
8100 tem = fold_read_from_constant_string (t);
8101 if (tem)
8102 return tem;
8103
8104 switch (TREE_CODE (t))
8105 {
8106 case ARRAY_REF:
8107 case ARRAY_RANGE_REF:
8108 /* Constant indexes are handled well by get_base_constructor.
8109 Only special case variable offsets.
8110 FIXME: This code can't handle nested references with variable indexes
8111 (they will be handled only by iteration of ccp). Perhaps we can bring
8112 get_ref_base_and_extent here and make it use a valueize callback. */
8113 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8114 && valueize
8115 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 8116 && poly_int_tree_p (idx))
cfef45c8
RG
8117 {
8118 tree low_bound, unit_size;
8119
8120 /* If the resulting bit-offset is constant, track it. */
8121 if ((low_bound = array_ref_low_bound (t),
588db50c 8122 poly_int_tree_p (low_bound))
cfef45c8 8123 && (unit_size = array_ref_element_size (t),
807e902e 8124 tree_fits_uhwi_p (unit_size)))
cfef45c8 8125 {
588db50c
RS
8126 poly_offset_int woffset
8127 = wi::sext (wi::to_poly_offset (idx)
8128 - wi::to_poly_offset (low_bound),
e287a2a1 8129 TYPE_PRECISION (sizetype));
a9e6359a
RB
8130 woffset *= tree_to_uhwi (unit_size);
8131 woffset *= BITS_PER_UNIT;
588db50c 8132 if (woffset.to_shwi (&offset))
807e902e 8133 {
807e902e
KZ
8134 base = TREE_OPERAND (t, 0);
8135 ctor = get_base_constructor (base, &offset, valueize);
8136 /* Empty constructor. Always fold to 0. */
8137 if (ctor == error_mark_node)
8138 return build_zero_cst (TREE_TYPE (t));
8139 /* Out of bound array access. Value is undefined,
8140 but don't fold. */
588db50c 8141 if (maybe_lt (offset, 0))
807e902e 8142 return NULL_TREE;
67914693 8143 /* We cannot determine ctor. */
807e902e
KZ
8144 if (!ctor)
8145 return NULL_TREE;
8146 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8147 tree_to_uhwi (unit_size)
8148 * BITS_PER_UNIT,
8149 base);
8150 }
cfef45c8
RG
8151 }
8152 }
8153 /* Fallthru. */
8154
8155 case COMPONENT_REF:
8156 case BIT_FIELD_REF:
8157 case TARGET_MEM_REF:
8158 case MEM_REF:
ee45a32d 8159 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
8160 ctor = get_base_constructor (base, &offset, valueize);
8161
8162 /* Empty constructor. Always fold to 0. */
8163 if (ctor == error_mark_node)
8164 return build_zero_cst (TREE_TYPE (t));
8165 /* We do not know precise address. */
588db50c 8166 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 8167 return NULL_TREE;
67914693 8168 /* We cannot determine ctor. */
cfef45c8
RG
8169 if (!ctor)
8170 return NULL_TREE;
8171
8172 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 8173 if (maybe_lt (offset, 0))
cfef45c8
RG
8174 return NULL_TREE;
8175
e4f1cbc3
JJ
8176 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8177 if (tem)
8178 return tem;
8179
8180 /* For bit field reads try to read the representative and
8181 adjust. */
8182 if (TREE_CODE (t) == COMPONENT_REF
8183 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8184 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8185 {
8186 HOST_WIDE_INT csize, coffset;
8187 tree field = TREE_OPERAND (t, 1);
8188 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8189 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8190 && size.is_constant (&csize)
8191 && offset.is_constant (&coffset)
8192 && (coffset % BITS_PER_UNIT != 0
8193 || csize % BITS_PER_UNIT != 0)
8194 && !reverse
8195 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8196 {
8197 poly_int64 bitoffset;
8198 poly_uint64 field_offset, repr_offset;
8199 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8200 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8201 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8202 else
8203 bitoffset = 0;
8204 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8205 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8206 HOST_WIDE_INT bitoff;
8207 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8208 - TYPE_PRECISION (TREE_TYPE (field)));
8209 if (bitoffset.is_constant (&bitoff)
8210 && bitoff >= 0
8211 && bitoff <= diff)
8212 {
8213 offset -= bitoff;
8214 size = tree_to_uhwi (DECL_SIZE (repr));
8215
8216 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8217 size, base);
8218 if (tem && TREE_CODE (tem) == INTEGER_CST)
8219 {
8220 if (!BYTES_BIG_ENDIAN)
8221 tem = wide_int_to_tree (TREE_TYPE (field),
8222 wi::lrshift (wi::to_wide (tem),
8223 bitoff));
8224 else
8225 tem = wide_int_to_tree (TREE_TYPE (field),
8226 wi::lrshift (wi::to_wide (tem),
8227 diff - bitoff));
8228 return tem;
8229 }
8230 }
8231 }
8232 }
8233 break;
cfef45c8
RG
8234
8235 case REALPART_EXPR:
8236 case IMAGPART_EXPR:
8237 {
8238 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8239 if (c && TREE_CODE (c) == COMPLEX_CST)
8240 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 8241 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
8242 break;
8243 }
8244
8245 default:
8246 break;
8247 }
8248
8249 return NULL_TREE;
8250}
8251
8252tree
8253fold_const_aggregate_ref (tree t)
8254{
8255 return fold_const_aggregate_ref_1 (t, NULL);
8256}
06bc3ec7 8257
85942f45 8258/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
8259 at OFFSET.
8260 Set CAN_REFER if non-NULL to false if method
8261 is not referable or if the virtual table is ill-formed (such as rewriten
8262 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
8263
8264tree
85942f45
JH
8265gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8266 tree v,
ec77d61f
JH
8267 unsigned HOST_WIDE_INT offset,
8268 bool *can_refer)
81fa35bd 8269{
85942f45
JH
8270 tree vtable = v, init, fn;
8271 unsigned HOST_WIDE_INT size;
8c311b50
JH
8272 unsigned HOST_WIDE_INT elt_size, access_index;
8273 tree domain_type;
81fa35bd 8274
ec77d61f
JH
8275 if (can_refer)
8276 *can_refer = true;
8277
9de2f554 8278 /* First of all double check we have virtual table. */
8813a647 8279 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 8280 {
ec77d61f
JH
8281 /* Pass down that we lost track of the target. */
8282 if (can_refer)
8283 *can_refer = false;
8284 return NULL_TREE;
8285 }
9de2f554 8286
2aa3da06
JH
8287 init = ctor_for_folding (v);
8288
9de2f554 8289 /* The virtual tables should always be born with constructors
2aa3da06
JH
8290 and we always should assume that they are avaialble for
8291 folding. At the moment we do not stream them in all cases,
8292 but it should never happen that ctor seem unreachable. */
8293 gcc_assert (init);
8294 if (init == error_mark_node)
8295 {
ec77d61f
JH
8296 /* Pass down that we lost track of the target. */
8297 if (can_refer)
8298 *can_refer = false;
2aa3da06
JH
8299 return NULL_TREE;
8300 }
81fa35bd 8301 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 8302 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 8303 offset *= BITS_PER_UNIT;
81fa35bd 8304 offset += token * size;
9de2f554 8305
8c311b50
JH
8306 /* Lookup the value in the constructor that is assumed to be array.
8307 This is equivalent to
8308 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8309 offset, size, NULL);
8310 but in a constant time. We expect that frontend produced a simple
8311 array without indexed initializers. */
8312
8313 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8314 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8315 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8316 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8317
8318 access_index = offset / BITS_PER_UNIT / elt_size;
8319 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8320
bf8d8309
MP
8321 /* The C++ FE can now produce indexed fields, and we check if the indexes
8322 match. */
8c311b50
JH
8323 if (access_index < CONSTRUCTOR_NELTS (init))
8324 {
8325 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
8326 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8327 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
8328 STRIP_NOPS (fn);
8329 }
8330 else
8331 fn = NULL;
9de2f554
JH
8332
8333 /* For type inconsistent program we may end up looking up virtual method
8334 in virtual table that does not contain TOKEN entries. We may overrun
8335 the virtual table and pick up a constant or RTTI info pointer.
8336 In any case the call is undefined. */
8337 if (!fn
8338 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8339 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8340 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8341 else
8342 {
8343 fn = TREE_OPERAND (fn, 0);
8344
8345 /* When cgraph node is missing and function is not public, we cannot
8346 devirtualize. This can happen in WHOPR when the actual method
8347 ends up in other partition, because we found devirtualization
8348 possibility too late. */
8349 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
8350 {
8351 if (can_refer)
8352 {
8353 *can_refer = false;
8354 return fn;
8355 }
8356 return NULL_TREE;
8357 }
9de2f554 8358 }
81fa35bd 8359
7501ca28
RG
8360 /* Make sure we create a cgraph node for functions we'll reference.
8361 They can be non-existent if the reference comes from an entry
8362 of an external vtable for example. */
d52f5295 8363 cgraph_node::get_create (fn);
7501ca28 8364
81fa35bd
MJ
8365 return fn;
8366}
8367
85942f45
JH
8368/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8369 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8370 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
8371 OBJ_TYPE_REF_OBJECT(REF).
8372 Set CAN_REFER if non-NULL to false if method
8373 is not referable or if the virtual table is ill-formed (such as rewriten
8374 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
8375
8376tree
ec77d61f
JH
8377gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8378 bool *can_refer)
85942f45
JH
8379{
8380 unsigned HOST_WIDE_INT offset;
8381 tree v;
8382
8383 v = BINFO_VTABLE (known_binfo);
8384 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8385 if (!v)
8386 return NULL_TREE;
8387
8388 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
8389 {
8390 if (can_refer)
8391 *can_refer = false;
8392 return NULL_TREE;
8393 }
8394 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
8395}
8396
737f500a
RB
8397/* Given a pointer value T, return a simplified version of an
8398 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
8399 possible. Note that the resulting type may be different from
8400 the type pointed to in the sense that it is still compatible
8401 from the langhooks point of view. */
8402
8403tree
8404gimple_fold_indirect_ref (tree t)
8405{
8406 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8407 tree sub = t;
8408 tree subtype;
8409
8410 STRIP_NOPS (sub);
8411 subtype = TREE_TYPE (sub);
737f500a
RB
8412 if (!POINTER_TYPE_P (subtype)
8413 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
8414 return NULL_TREE;
8415
8416 if (TREE_CODE (sub) == ADDR_EXPR)
8417 {
8418 tree op = TREE_OPERAND (sub, 0);
8419 tree optype = TREE_TYPE (op);
8420 /* *&p => p */
8421 if (useless_type_conversion_p (type, optype))
8422 return op;
8423
8424 /* *(foo *)&fooarray => fooarray[0] */
8425 if (TREE_CODE (optype) == ARRAY_TYPE
8426 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8427 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8428 {
8429 tree type_domain = TYPE_DOMAIN (optype);
8430 tree min_val = size_zero_node;
8431 if (type_domain && TYPE_MIN_VALUE (type_domain))
8432 min_val = TYPE_MIN_VALUE (type_domain);
8433 if (TREE_CODE (min_val) == INTEGER_CST)
8434 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8435 }
8436 /* *(foo *)&complexfoo => __real__ complexfoo */
8437 else if (TREE_CODE (optype) == COMPLEX_TYPE
8438 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8439 return fold_build1 (REALPART_EXPR, type, op);
8440 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8441 else if (TREE_CODE (optype) == VECTOR_TYPE
8442 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8443 {
8444 tree part_width = TYPE_SIZE (type);
8445 tree index = bitsize_int (0);
8446 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8447 }
8448 }
8449
8450 /* *(p + CST) -> ... */
8451 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8452 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8453 {
8454 tree addr = TREE_OPERAND (sub, 0);
8455 tree off = TREE_OPERAND (sub, 1);
8456 tree addrtype;
8457
8458 STRIP_NOPS (addr);
8459 addrtype = TREE_TYPE (addr);
8460
8461 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8462 if (TREE_CODE (addr) == ADDR_EXPR
8463 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8464 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 8465 && tree_fits_uhwi_p (off))
b184c8f1 8466 {
ae7e9ddd 8467 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
8468 tree part_width = TYPE_SIZE (type);
8469 unsigned HOST_WIDE_INT part_widthi
9439e9a1 8470 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
8471 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8472 tree index = bitsize_int (indexi);
928686b1
RS
8473 if (known_lt (offset / part_widthi,
8474 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
8475 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8476 part_width, index);
8477 }
8478
8479 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8480 if (TREE_CODE (addr) == ADDR_EXPR
8481 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8482 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8483 {
8484 tree size = TYPE_SIZE_UNIT (type);
8485 if (tree_int_cst_equal (size, off))
8486 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8487 }
8488
8489 /* *(p + CST) -> MEM_REF <p, CST>. */
8490 if (TREE_CODE (addr) != ADDR_EXPR
8491 || DECL_P (TREE_OPERAND (addr, 0)))
8492 return fold_build2 (MEM_REF, type,
8493 addr,
8e6cdc90 8494 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
8495 }
8496
8497 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8498 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8499 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8500 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8501 {
8502 tree type_domain;
8503 tree min_val = size_zero_node;
8504 tree osub = sub;
8505 sub = gimple_fold_indirect_ref (sub);
8506 if (! sub)
8507 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8508 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8509 if (type_domain && TYPE_MIN_VALUE (type_domain))
8510 min_val = TYPE_MIN_VALUE (type_domain);
8511 if (TREE_CODE (min_val) == INTEGER_CST)
8512 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8513 }
8514
8515 return NULL_TREE;
8516}
19e51b40
JJ
8517
8518/* Return true if CODE is an operation that when operating on signed
8519 integer types involves undefined behavior on overflow and the
8520 operation can be expressed with unsigned arithmetic. */
8521
8522bool
8523arith_code_with_undefined_signed_overflow (tree_code code)
8524{
8525 switch (code)
8526 {
8e2c037d 8527 case ABS_EXPR:
19e51b40
JJ
8528 case PLUS_EXPR:
8529 case MINUS_EXPR:
8530 case MULT_EXPR:
8531 case NEGATE_EXPR:
8532 case POINTER_PLUS_EXPR:
8533 return true;
8534 default:
8535 return false;
8536 }
8537}
8538
8539/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8540 operation that can be transformed to unsigned arithmetic by converting
8541 its operand, carrying out the operation in the corresponding unsigned
8542 type and converting the result back to the original type.
8543
8544 Returns a sequence of statements that replace STMT and also contain
8545 a modified form of STMT itself. */
8546
8547gimple_seq
355fe088 8548rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
8549{
8550 if (dump_file && (dump_flags & TDF_DETAILS))
8551 {
8552 fprintf (dump_file, "rewriting stmt with undefined signed "
8553 "overflow ");
8554 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8555 }
8556
8557 tree lhs = gimple_assign_lhs (stmt);
8558 tree type = unsigned_type_for (TREE_TYPE (lhs));
8559 gimple_seq stmts = NULL;
8e2c037d
RB
8560 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8561 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8562 else
8563 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8564 {
8565 tree op = gimple_op (stmt, i);
8566 op = gimple_convert (&stmts, type, op);
8567 gimple_set_op (stmt, i, op);
8568 }
19e51b40
JJ
8569 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8570 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8571 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 8572 gimple_set_modified (stmt, true);
19e51b40 8573 gimple_seq_add_stmt (&stmts, stmt);
355fe088 8574 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
8575 gimple_seq_add_stmt (&stmts, cvt);
8576
8577 return stmts;
8578}
d4f5cd5e 8579
3d2cf79f 8580
c26de36d
RB
8581/* The valueization hook we use for the gimple_build API simplification.
8582 This makes us match fold_buildN behavior by only combining with
8583 statements in the sequence(s) we are currently building. */
8584
8585static tree
8586gimple_build_valueize (tree op)
8587{
8588 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8589 return op;
8590 return NULL_TREE;
8591}
8592
3d2cf79f 8593/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 8594 simplifying it first if possible. Returns the built
3d2cf79f
RB
8595 expression value and appends statements possibly defining it
8596 to SEQ. */
8597
8598tree
8599gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8600 enum tree_code code, tree type, tree op0)
3d2cf79f 8601{
c26de36d 8602 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
8603 if (!res)
8604 {
a15ebbcd 8605 res = create_tmp_reg_or_ssa_name (type);
355fe088 8606 gimple *stmt;
3d2cf79f
RB
8607 if (code == REALPART_EXPR
8608 || code == IMAGPART_EXPR
8609 || code == VIEW_CONVERT_EXPR)
0d0e4a03 8610 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 8611 else
0d0e4a03 8612 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
8613 gimple_set_location (stmt, loc);
8614 gimple_seq_add_stmt_without_update (seq, stmt);
8615 }
8616 return res;
8617}
8618
8619/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 8620 simplifying it first if possible. Returns the built
3d2cf79f
RB
8621 expression value and appends statements possibly defining it
8622 to SEQ. */
8623
8624tree
8625gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8626 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 8627{
c26de36d 8628 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
8629 if (!res)
8630 {
a15ebbcd 8631 res = create_tmp_reg_or_ssa_name (type);
355fe088 8632 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
8633 gimple_set_location (stmt, loc);
8634 gimple_seq_add_stmt_without_update (seq, stmt);
8635 }
8636 return res;
8637}
8638
8639/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 8640 simplifying it first if possible. Returns the built
3d2cf79f
RB
8641 expression value and appends statements possibly defining it
8642 to SEQ. */
8643
8644tree
8645gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8646 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
8647{
8648 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 8649 seq, gimple_build_valueize);
3d2cf79f
RB
8650 if (!res)
8651 {
a15ebbcd 8652 res = create_tmp_reg_or_ssa_name (type);
355fe088 8653 gimple *stmt;
3d2cf79f 8654 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
8655 stmt = gimple_build_assign (res, code,
8656 build3 (code, type, op0, op1, op2));
3d2cf79f 8657 else
0d0e4a03 8658 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
8659 gimple_set_location (stmt, loc);
8660 gimple_seq_add_stmt_without_update (seq, stmt);
8661 }
8662 return res;
8663}
8664
93a73251
MM
8665/* Build the call FN () with a result of type TYPE (or no result if TYPE is
8666 void) with a location LOC. Returns the built expression value (or NULL_TREE
8667 if TYPE is void) and appends statements possibly defining it to SEQ. */
8668
8669tree
8670gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8671{
8672 tree res = NULL_TREE;
8673 gcall *stmt;
8674 if (internal_fn_p (fn))
8675 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8676 else
8677 {
8678 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8679 stmt = gimple_build_call (decl, 0);
8680 }
8681 if (!VOID_TYPE_P (type))
8682 {
8683 res = create_tmp_reg_or_ssa_name (type);
8684 gimple_call_set_lhs (stmt, res);
8685 }
8686 gimple_set_location (stmt, loc);
8687 gimple_seq_add_stmt_without_update (seq, stmt);
8688 return res;
8689}
8690
3d2cf79f
RB
8691/* Build the call FN (ARG0) with a result of type TYPE
8692 (or no result if TYPE is void) with location LOC,
c26de36d 8693 simplifying it first if possible. Returns the built
3d2cf79f
RB
8694 expression value (or NULL_TREE if TYPE is void) and appends
8695 statements possibly defining it to SEQ. */
8696
8697tree
eb69361d
RS
8698gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8699 tree type, tree arg0)
3d2cf79f 8700{
c26de36d 8701 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
8702 if (!res)
8703 {
eb69361d
RS
8704 gcall *stmt;
8705 if (internal_fn_p (fn))
8706 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8707 else
8708 {
8709 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8710 stmt = gimple_build_call (decl, 1, arg0);
8711 }
3d2cf79f
RB
8712 if (!VOID_TYPE_P (type))
8713 {
a15ebbcd 8714 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8715 gimple_call_set_lhs (stmt, res);
8716 }
8717 gimple_set_location (stmt, loc);
8718 gimple_seq_add_stmt_without_update (seq, stmt);
8719 }
8720 return res;
8721}
8722
8723/* Build the call FN (ARG0, ARG1) with a result of type TYPE
8724 (or no result if TYPE is void) with location LOC,
c26de36d 8725 simplifying it first if possible. Returns the built
3d2cf79f
RB
8726 expression value (or NULL_TREE if TYPE is void) and appends
8727 statements possibly defining it to SEQ. */
8728
8729tree
eb69361d
RS
8730gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8731 tree type, tree arg0, tree arg1)
3d2cf79f 8732{
c26de36d 8733 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
8734 if (!res)
8735 {
eb69361d
RS
8736 gcall *stmt;
8737 if (internal_fn_p (fn))
8738 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8739 else
8740 {
8741 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8742 stmt = gimple_build_call (decl, 2, arg0, arg1);
8743 }
3d2cf79f
RB
8744 if (!VOID_TYPE_P (type))
8745 {
a15ebbcd 8746 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8747 gimple_call_set_lhs (stmt, res);
8748 }
8749 gimple_set_location (stmt, loc);
8750 gimple_seq_add_stmt_without_update (seq, stmt);
8751 }
8752 return res;
8753}
8754
8755/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8756 (or no result if TYPE is void) with location LOC,
c26de36d 8757 simplifying it first if possible. Returns the built
3d2cf79f
RB
8758 expression value (or NULL_TREE if TYPE is void) and appends
8759 statements possibly defining it to SEQ. */
8760
8761tree
eb69361d
RS
8762gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8763 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 8764{
c26de36d
RB
8765 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8766 seq, gimple_build_valueize);
3d2cf79f
RB
8767 if (!res)
8768 {
eb69361d
RS
8769 gcall *stmt;
8770 if (internal_fn_p (fn))
8771 stmt = gimple_build_call_internal (as_internal_fn (fn),
8772 3, arg0, arg1, arg2);
8773 else
8774 {
8775 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8776 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8777 }
3d2cf79f
RB
8778 if (!VOID_TYPE_P (type))
8779 {
a15ebbcd 8780 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8781 gimple_call_set_lhs (stmt, res);
8782 }
8783 gimple_set_location (stmt, loc);
8784 gimple_seq_add_stmt_without_update (seq, stmt);
8785 }
8786 return res;
8787}
8788
8789/* Build the conversion (TYPE) OP with a result of type TYPE
8790 with location LOC if such conversion is neccesary in GIMPLE,
8791 simplifying it first.
8792 Returns the built expression value and appends
8793 statements possibly defining it to SEQ. */
d4f5cd5e
RB
8794
8795tree
8796gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8797{
8798 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8799 return op;
3d2cf79f 8800 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 8801}
68e57f04 8802
74e3c262
RB
8803/* Build the conversion (ptrofftype) OP with a result of a type
8804 compatible with ptrofftype with location LOC if such conversion
8805 is neccesary in GIMPLE, simplifying it first.
8806 Returns the built expression value and appends
8807 statements possibly defining it to SEQ. */
8808
8809tree
8810gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8811{
8812 if (ptrofftype_p (TREE_TYPE (op)))
8813 return op;
8814 return gimple_convert (seq, loc, sizetype, op);
8815}
8816
e7c45b66
RS
8817/* Build a vector of type TYPE in which each element has the value OP.
8818 Return a gimple value for the result, appending any new statements
8819 to SEQ. */
8820
8821tree
8822gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8823 tree op)
8824{
928686b1
RS
8825 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8826 && !CONSTANT_CLASS_P (op))
8827 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8828
e7c45b66
RS
8829 tree res, vec = build_vector_from_val (type, op);
8830 if (is_gimple_val (vec))
8831 return vec;
8832 if (gimple_in_ssa_p (cfun))
8833 res = make_ssa_name (type);
8834 else
8835 res = create_tmp_reg (type);
8836 gimple *stmt = gimple_build_assign (res, vec);
8837 gimple_set_location (stmt, loc);
8838 gimple_seq_add_stmt_without_update (seq, stmt);
8839 return res;
8840}
8841
abe73c3d
RS
8842/* Build a vector from BUILDER, handling the case in which some elements
8843 are non-constant. Return a gimple value for the result, appending any
8844 new instructions to SEQ.
8845
8846 BUILDER must not have a stepped encoding on entry. This is because
8847 the function is not geared up to handle the arithmetic that would
8848 be needed in the variable case, and any code building a vector that
8849 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
8850
8851tree
abe73c3d
RS
8852gimple_build_vector (gimple_seq *seq, location_t loc,
8853 tree_vector_builder *builder)
e7c45b66 8854{
abe73c3d
RS
8855 gcc_assert (builder->nelts_per_pattern () <= 2);
8856 unsigned int encoded_nelts = builder->encoded_nelts ();
8857 for (unsigned int i = 0; i < encoded_nelts; ++i)
8ebedfcd 8858 if (!CONSTANT_CLASS_P ((*builder)[i]))
e7c45b66 8859 {
abe73c3d 8860 tree type = builder->type ();
928686b1 8861 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
8862 vec<constructor_elt, va_gc> *v;
8863 vec_alloc (v, nelts);
8864 for (i = 0; i < nelts; ++i)
abe73c3d 8865 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
8866
8867 tree res;
8868 if (gimple_in_ssa_p (cfun))
8869 res = make_ssa_name (type);
8870 else
8871 res = create_tmp_reg (type);
8872 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8873 gimple_set_location (stmt, loc);
8874 gimple_seq_add_stmt_without_update (seq, stmt);
8875 return res;
8876 }
abe73c3d 8877 return builder->build ();
e7c45b66
RS
8878}
8879
93a73251
MM
8880/* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8881 and generate a value guaranteed to be rounded upwards to ALIGN.
8882
8883 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8884
8885tree
8886gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8887 tree old_size, unsigned HOST_WIDE_INT align)
8888{
8889 unsigned HOST_WIDE_INT tg_mask = align - 1;
8890 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8891 gcc_assert (INTEGRAL_TYPE_P (type));
8892 tree tree_mask = build_int_cst (type, tg_mask);
8893 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8894 tree_mask);
8895
8896 tree mask = build_int_cst (type, -align);
8897 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8898}
8899
68e57f04
RS
8900/* Return true if the result of assignment STMT is known to be non-negative.
8901 If the return value is based on the assumption that signed overflow is
8902 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8903 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8904
8905static bool
8906gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8907 int depth)
8908{
8909 enum tree_code code = gimple_assign_rhs_code (stmt);
ce777eae 8910 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
68e57f04
RS
8911 switch (get_gimple_rhs_class (code))
8912 {
8913 case GIMPLE_UNARY_RHS:
8914 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 8915 type,
68e57f04
RS
8916 gimple_assign_rhs1 (stmt),
8917 strict_overflow_p, depth);
8918 case GIMPLE_BINARY_RHS:
8919 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 8920 type,
68e57f04
RS
8921 gimple_assign_rhs1 (stmt),
8922 gimple_assign_rhs2 (stmt),
8923 strict_overflow_p, depth);
8924 case GIMPLE_TERNARY_RHS:
8925 return false;
8926 case GIMPLE_SINGLE_RHS:
8927 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8928 strict_overflow_p, depth);
8929 case GIMPLE_INVALID_RHS:
8930 break;
8931 }
8932 gcc_unreachable ();
8933}
8934
8935/* Return true if return value of call STMT is known to be non-negative.
8936 If the return value is based on the assumption that signed overflow is
8937 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8938 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8939
8940static bool
8941gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8942 int depth)
8943{
8944 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8945 gimple_call_arg (stmt, 0) : NULL_TREE;
8946 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8947 gimple_call_arg (stmt, 1) : NULL_TREE;
ce777eae
RB
8948 tree lhs = gimple_call_lhs (stmt);
8949 return (lhs
8950 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
8951 gimple_call_combined_fn (stmt),
8952 arg0, arg1,
8953 strict_overflow_p, depth));
68e57f04
RS
8954}
8955
4534c203
RB
8956/* Return true if return value of call STMT is known to be non-negative.
8957 If the return value is based on the assumption that signed overflow is
8958 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8959 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8960
8961static bool
8962gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8963 int depth)
8964{
8965 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8966 {
8967 tree arg = gimple_phi_arg_def (stmt, i);
8968 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8969 return false;
8970 }
8971 return true;
8972}
8973
68e57f04
RS
8974/* Return true if STMT is known to compute a non-negative value.
8975 If the return value is based on the assumption that signed overflow is
8976 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8977 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8978
8979bool
8980gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8981 int depth)
8982{
8983 switch (gimple_code (stmt))
8984 {
8985 case GIMPLE_ASSIGN:
8986 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8987 depth);
8988 case GIMPLE_CALL:
8989 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8990 depth);
4534c203
RB
8991 case GIMPLE_PHI:
8992 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8993 depth);
68e57f04
RS
8994 default:
8995 return false;
8996 }
8997}
67dbe582
RS
8998
8999/* Return true if the floating-point value computed by assignment STMT
9000 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 9001 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
9002
9003 DEPTH is the current nesting depth of the query. */
9004
9005static bool
9006gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9007{
9008 enum tree_code code = gimple_assign_rhs_code (stmt);
9009 switch (get_gimple_rhs_class (code))
9010 {
9011 case GIMPLE_UNARY_RHS:
9012 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9013 gimple_assign_rhs1 (stmt), depth);
9014 case GIMPLE_BINARY_RHS:
9015 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9016 gimple_assign_rhs1 (stmt),
9017 gimple_assign_rhs2 (stmt), depth);
9018 case GIMPLE_TERNARY_RHS:
9019 return false;
9020 case GIMPLE_SINGLE_RHS:
9021 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9022 case GIMPLE_INVALID_RHS:
9023 break;
9024 }
9025 gcc_unreachable ();
9026}
9027
9028/* Return true if the floating-point value computed by call STMT is known
9029 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9030 considered integer values. Return false for signaling NaN.
67dbe582
RS
9031
9032 DEPTH is the current nesting depth of the query. */
9033
9034static bool
9035gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9036{
9037 tree arg0 = (gimple_call_num_args (stmt) > 0
9038 ? gimple_call_arg (stmt, 0)
9039 : NULL_TREE);
9040 tree arg1 = (gimple_call_num_args (stmt) > 1
9041 ? gimple_call_arg (stmt, 1)
9042 : NULL_TREE);
1d9da71f 9043 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
9044 arg0, arg1, depth);
9045}
9046
9047/* Return true if the floating-point result of phi STMT is known to have
9048 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 9049 integer values. Return false for signaling NaN.
67dbe582
RS
9050
9051 DEPTH is the current nesting depth of the query. */
9052
9053static bool
9054gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9055{
9056 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9057 {
9058 tree arg = gimple_phi_arg_def (stmt, i);
9059 if (!integer_valued_real_single_p (arg, depth + 1))
9060 return false;
9061 }
9062 return true;
9063}
9064
9065/* Return true if the floating-point value computed by STMT is known
9066 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9067 considered integer values. Return false for signaling NaN.
67dbe582
RS
9068
9069 DEPTH is the current nesting depth of the query. */
9070
9071bool
9072gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9073{
9074 switch (gimple_code (stmt))
9075 {
9076 case GIMPLE_ASSIGN:
9077 return gimple_assign_integer_valued_real_p (stmt, depth);
9078 case GIMPLE_CALL:
9079 return gimple_call_integer_valued_real_p (stmt, depth);
9080 case GIMPLE_PHI:
9081 return gimple_phi_integer_valued_real_p (stmt, depth);
9082 default:
9083 return false;
9084 }
9085}