]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.cc
aarch64: Avoid using mismatched ZERO ZA sizes
[thirdparty/gcc.git] / gcc / gimple-fold.cc
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
a945c346 2 Copyright (C) 2010-2024 Free Software Foundation, Inc.
e53b6e56 3 Split out from tree-ssa-ccp.cc.
cbdd87d4
RG
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
2a837de2 33#include "gimple-ssa-warn-access.h"
cc8bea0a 34#include "gimple-ssa-warn-restrict.h"
c7131fb2 35#include "fold-const.h"
36566b39
PK
36#include "stmt.h"
37#include "expr.h"
38#include "stor-layout.h"
7ee2468b 39#include "dumpfile.h"
ba206889 40#include "gimple-iterator.h"
2fb9a547 41#include "gimple-fold.h"
45b0be94 42#include "gimplify.h"
442b4905
AM
43#include "tree-into-ssa.h"
44#include "tree-dfa.h"
025d57f0 45#include "tree-object-size.h"
7a300452 46#include "tree-ssa.h"
cbdd87d4 47#include "tree-ssa-propagate.h"
450ad0cd 48#include "ipa-utils.h"
4484a35a 49#include "tree-ssa-address.h"
862d0b35 50#include "langhooks.h"
19e51b40 51#include "gimplify-me.h"
2b5f0895 52#include "dbgcnt.h"
9b2b7279 53#include "builtins.h"
e0ee10ed
RB
54#include "tree-eh.h"
55#include "gimple-match.h"
48126138 56#include "gomp-constants.h"
f869c12f 57#include "optabs-query.h"
629b3d75 58#include "omp-general.h"
abd3a68c 59#include "tree-cfg.h"
a918bfbf 60#include "fold-const-call.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
45b2222a 63#include "asan.h"
025d57f0
MS
64#include "diagnostic-core.h"
65#include "intl.h"
6a33d0ff 66#include "calls.h"
5ebaa477 67#include "tree-vector-builder.h"
5d0d5d68 68#include "tree-ssa-strlen.h"
e7868dc6 69#include "varasm.h"
e9fff24c 70#include "internal-fn.h"
822a0823 71#include "gimple-range.h"
cbdd87d4 72
598f7235
MS
73enum strlen_range_kind {
74 /* Compute the exact constant string length. */
75 SRK_STRLEN,
76 /* Compute the maximum constant string length. */
77 SRK_STRLENMAX,
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
83 SRK_LENRANGE,
598f7235
MS
84 /* Determine the integer value of the argument (not string length). */
85 SRK_INT_VALUE
86};
87
03c4a945 88static bool
ba6e17e7 89get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 90
b3b9f3d0 91/* Return true when DECL can be referenced from current unit.
c44c2088
JH
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
94 reasons:
1389294c 95
1389294c
JH
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
101 set.
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
106 declaring the body.
107 3) COMDAT functions referred by external vtables that
3e89949e 108 we devirtualize only during final compilation stage.
b3b9f3d0
JH
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
111 directly. */
112
1389294c 113static bool
c44c2088 114can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 115{
2c8326a5 116 varpool_node *vnode;
1389294c 117 struct cgraph_node *node;
5e20cdc9 118 symtab_node *snode;
c44c2088 119
00de328a 120 if (DECL_ABSTRACT_P (decl))
1632a686
JH
121 return false;
122
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 125 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
126 return true;
127
d4babd37
JM
128 /* Static objects can be referred only if they are defined and not optimized
129 out yet. */
130 if (!TREE_PUBLIC (decl))
1632a686 131 {
d4babd37
JM
132 if (DECL_EXTERNAL (decl))
133 return false;
3aaf0529
JH
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
3dafb85c 136 if (symtab->function_flags_ready)
3aaf0529 137 return true;
d52f5295 138 snode = symtab_node::get (decl);
3aaf0529 139 if (!snode || !snode->definition)
1632a686 140 return false;
7de90a6c 141 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 142 return !node || !node->inlined_to;
1632a686
JH
143 }
144
6da8be89 145 /* We will later output the initializer, so we can refer to it.
c44c2088 146 So we are concerned only when DECL comes from initializer of
3aaf0529 147 external var or var that has been optimized out. */
c44c2088 148 if (!from_decl
8813a647 149 || !VAR_P (from_decl)
3aaf0529 150 || (!DECL_EXTERNAL (from_decl)
9041d2e6 151 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 152 && vnode->definition)
6da8be89 153 || (flag_ltrans
9041d2e6 154 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 155 && vnode->in_other_partition))
c44c2088 156 return true;
c44c2088
JH
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl)
161 && DECL_EXTERNAL (decl)
a33a931b 162 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 163 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 164 return false;
b3b9f3d0
JH
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
169 return true;
3aaf0529
JH
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
173
174 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 175 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
180 was privatized. */
3dafb85c 181 if (!symtab->function_flags_ready)
b3b9f3d0 182 return true;
c44c2088 183
d52f5295 184 snode = symtab_node::get (decl);
3aaf0529
JH
185 if (!snode
186 || ((!snode->definition || DECL_EXTERNAL (decl))
187 && (!snode->in_other_partition
188 || (!snode->forced_by_abi && !snode->force_output))))
189 return false;
190 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 191 return !node || !node->inlined_to;
1389294c
JH
192}
193
a15ebbcd
ML
194/* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
196 is made. */
197
edc19e03
WS
198tree
199create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
200{
201 if (gimple_in_ssa_p (cfun))
202 return make_ssa_name (type, stmt);
203 else
204 return create_tmp_reg (type);
205}
206
0038d4e0 207/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
210
211tree
c44c2088 212canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 213{
37f808c4
RB
214 if (CONSTANT_CLASS_P (cval))
215 return cval;
216
50619002
EB
217 tree orig_cval = cval;
218 STRIP_NOPS (cval);
315f5f1b
RG
219 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 221 {
315f5f1b
RG
222 tree ptr = TREE_OPERAND (cval, 0);
223 if (is_gimple_min_invariant (ptr))
224 cval = build1_loc (EXPR_LOCATION (cval),
225 ADDR_EXPR, TREE_TYPE (ptr),
226 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
227 ptr,
228 fold_convert (ptr_type_node,
229 TREE_OPERAND (cval, 1))));
17f39a39
JH
230 }
231 if (TREE_CODE (cval) == ADDR_EXPR)
232 {
5a27a197
RG
233 tree base = NULL_TREE;
234 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
235 {
236 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
237 if (base)
238 TREE_OPERAND (cval, 0) = base;
239 }
5a27a197
RG
240 else
241 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
242 if (!base)
243 return NULL_TREE;
b3b9f3d0 244
8813a647 245 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 246 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 247 return NULL_TREE;
13f92e8d
JJ
248 if (TREE_TYPE (base) == error_mark_node)
249 return NULL_TREE;
8813a647 250 if (VAR_P (base))
a076632e
RB
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
253 ;
7501ca28
RG
254 else if (TREE_CODE (base) == FUNCTION_DECL)
255 {
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
d52f5295 259 cgraph_node::get_create (base);
7501ca28 260 }
0038d4e0 261 /* Fixup types in global initializers. */
73aef89e
RG
262 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
263 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
264
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
266 cval = fold_convert (TREE_TYPE (orig_cval), cval);
267 return cval;
17f39a39 268 }
37f808c4
RB
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval) == INTEGER_CST)
271 {
272 if (TREE_OVERFLOW_P (cval))
273 cval = drop_tree_overflow (cval);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
275 cval = fold_convert (TREE_TYPE (orig_cval), cval);
276 return cval;
277 }
50619002 278 return orig_cval;
17f39a39 279}
cbdd87d4
RG
280
281/* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
283
284tree
285get_symbol_constant_value (tree sym)
286{
6a6dac52
JH
287 tree val = ctor_for_folding (sym);
288 if (val != error_mark_node)
cbdd87d4 289 {
cbdd87d4
RG
290 if (val)
291 {
9d60be38 292 val = canonicalize_constructor_val (unshare_expr (val), sym);
5c12507f
RB
293 if (val
294 && is_gimple_min_invariant (val)
295 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
17f39a39 296 return val;
1389294c
JH
297 else
298 return NULL_TREE;
cbdd87d4
RG
299 }
300 /* Variables declared 'const' without an initializer
301 have zero as the initializer if they may not be
302 overridden at link or run time. */
303 if (!val
b8a8c472 304 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 305 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
306 }
307
308 return NULL_TREE;
309}
310
311
cbdd87d4 312
0bf8cd9d
RB
313/* Subroutine of fold_stmt. We perform constant folding of the
314 memory reference tree EXPR. */
cbdd87d4
RG
315
316static tree
0bf8cd9d 317maybe_fold_reference (tree expr)
cbdd87d4 318{
2301a394 319 tree result = NULL_TREE;
cbdd87d4 320
f0eddb90
RG
321 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
322 || TREE_CODE (expr) == REALPART_EXPR
323 || TREE_CODE (expr) == IMAGPART_EXPR)
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
2301a394 325 result = fold_unary_loc (EXPR_LOCATION (expr),
f0eddb90
RG
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
2301a394
RB
328 TREE_OPERAND (expr, 0));
329 else if (TREE_CODE (expr) == BIT_FIELD_REF
330 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
331 result = fold_ternary_loc (EXPR_LOCATION (expr),
332 TREE_CODE (expr),
333 TREE_TYPE (expr),
334 TREE_OPERAND (expr, 0),
335 TREE_OPERAND (expr, 1),
336 TREE_OPERAND (expr, 2));
337 else
338 result = fold_const_aggregate_ref (expr);
f0eddb90 339
2301a394 340 if (result && is_gimple_min_invariant (result))
f0eddb90 341 return result;
cbdd87d4 342
cbdd87d4
RG
343 return NULL_TREE;
344}
345
52a5515e
RB
346/* Return true if EXPR is an acceptable right-hand-side for a
347 GIMPLE assignment. We validate the entire tree, not just
348 the root node, thus catching expressions that embed complex
349 operands that are not permitted in GIMPLE. This function
e53b6e56 350 is needed because the folding routines in fold-const.cc
52a5515e
RB
351 may return such expressions in some cases, e.g., an array
352 access with an embedded index addition. It may make more
353 sense to have folding routines that are sensitive to the
354 constraints on GIMPLE operands, rather than abandoning any
355 any attempt to fold if the usual folding turns out to be too
356 aggressive. */
357
358bool
359valid_gimple_rhs_p (tree expr)
360{
361 enum tree_code code = TREE_CODE (expr);
362
363 switch (TREE_CODE_CLASS (code))
364 {
365 case tcc_declaration:
366 if (!is_gimple_variable (expr))
367 return false;
368 break;
369
370 case tcc_constant:
371 /* All constants are ok. */
372 break;
373
374 case tcc_comparison:
375 /* GENERIC allows comparisons with non-boolean types, reject
376 those for GIMPLE. Let vector-typed comparisons pass - rules
377 for GENERIC and GIMPLE are the same here. */
378 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
379 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
380 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
381 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
382 return false;
383
384 /* Fallthru. */
385 case tcc_binary:
386 if (!is_gimple_val (TREE_OPERAND (expr, 0))
387 || !is_gimple_val (TREE_OPERAND (expr, 1)))
388 return false;
389 break;
390
391 case tcc_unary:
392 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
393 return false;
394 break;
395
396 case tcc_expression:
397 switch (code)
398 {
399 case ADDR_EXPR:
400 {
401 tree t;
402 if (is_gimple_min_invariant (expr))
403 return true;
404 t = TREE_OPERAND (expr, 0);
405 while (handled_component_p (t))
406 {
407 /* ??? More checks needed, see the GIMPLE verifier. */
408 if ((TREE_CODE (t) == ARRAY_REF
409 || TREE_CODE (t) == ARRAY_RANGE_REF)
410 && !is_gimple_val (TREE_OPERAND (t, 1)))
411 return false;
412 t = TREE_OPERAND (t, 0);
413 }
414 if (!is_gimple_id (t))
415 return false;
416 }
417 break;
418
419 default:
420 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
421 {
68e00633 422 if (!is_gimple_val (TREE_OPERAND (expr, 0))
52a5515e
RB
423 || !is_gimple_val (TREE_OPERAND (expr, 1))
424 || !is_gimple_val (TREE_OPERAND (expr, 2)))
425 return false;
426 break;
427 }
428 return false;
429 }
430 break;
431
432 case tcc_vl_exp:
433 return false;
434
435 case tcc_exceptional:
436 if (code == CONSTRUCTOR)
437 {
438 unsigned i;
439 tree elt;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
441 if (!is_gimple_val (elt))
442 return false;
443 return true;
444 }
445 if (code != SSA_NAME)
446 return false;
447 break;
448
449 case tcc_reference:
450 if (code == BIT_FIELD_REF)
451 return is_gimple_val (TREE_OPERAND (expr, 0));
452 return false;
453
454 default:
455 return false;
456 }
457
458 return true;
459}
460
cbdd87d4
RG
461
462/* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
465 folded. */
466
467static tree
468fold_gimple_assign (gimple_stmt_iterator *si)
469{
355fe088 470 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
471 enum tree_code subcode = gimple_assign_rhs_code (stmt);
472 location_t loc = gimple_location (stmt);
473
474 tree result = NULL_TREE;
475
476 switch (get_gimple_rhs_class (subcode))
477 {
478 case GIMPLE_SINGLE_RHS:
479 {
480 tree rhs = gimple_assign_rhs1 (stmt);
481
8c00ba08
JW
482 if (TREE_CLOBBER_P (rhs))
483 return NULL_TREE;
484
4e71066d 485 if (REFERENCE_CLASS_P (rhs))
0bf8cd9d 486 return maybe_fold_reference (rhs);
cbdd87d4 487
bdf37f7a
JH
488 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
489 {
490 tree val = OBJ_TYPE_REF_EXPR (rhs);
491 if (is_gimple_min_invariant (val))
492 return val;
f8a39967 493 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
494 {
495 bool final;
496 vec <cgraph_node *>targets
f8a39967 497 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 498 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 499 {
2b5f0895
XDL
500 if (dump_enabled_p ())
501 {
4f5b9c80 502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets.length () == 1
506 ? targets[0]->name ()
3ef276e4 507 : "NULL");
2b5f0895 508 }
3ef276e4
RB
509 if (targets.length () == 1)
510 {
511 val = fold_convert (TREE_TYPE (val),
512 build_fold_addr_expr_loc
513 (loc, targets[0]->decl));
514 STRIP_USELESS_TYPE_CONVERSION (val);
515 }
516 else
67914693
SL
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
3ef276e4 519 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
520 return val;
521 }
522 }
bdf37f7a 523 }
7524f419 524
cbdd87d4
RG
525 else if (TREE_CODE (rhs) == ADDR_EXPR)
526 {
70f34814 527 tree ref = TREE_OPERAND (rhs, 0);
0bf8cd9d
RB
528 if (TREE_CODE (ref) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref, 1)))
7524f419 530 {
0bf8cd9d
RB
531 result = TREE_OPERAND (ref, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs),
533 TREE_TYPE (result)))
534 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
535 return result;
7524f419 536 }
cbdd87d4
RG
537 }
538
539 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 540 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
541 {
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 unsigned i;
544 tree val;
545
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 547 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
548 return NULL_TREE;
549
550 return build_vector_from_ctor (TREE_TYPE (rhs),
551 CONSTRUCTOR_ELTS (rhs));
552 }
553
ca8e8301
RB
554 else if (DECL_P (rhs)
555 && is_gimple_reg_type (TREE_TYPE (rhs)))
9d60be38 556 return get_symbol_constant_value (rhs);
cbdd87d4
RG
557 }
558 break;
559
560 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
561 break;
562
563 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
564 break;
565
0354c0c7 566 case GIMPLE_TERNARY_RHS:
5c099d40
RB
567 result = fold_ternary_loc (loc, subcode,
568 TREE_TYPE (gimple_assign_lhs (stmt)),
569 gimple_assign_rhs1 (stmt),
570 gimple_assign_rhs2 (stmt),
571 gimple_assign_rhs3 (stmt));
0354c0c7
BS
572
573 if (result)
574 {
575 STRIP_USELESS_TYPE_CONVERSION (result);
576 if (valid_gimple_rhs_p (result))
577 return result;
0354c0c7
BS
578 }
579 break;
580
cbdd87d4
RG
581 case GIMPLE_INVALID_RHS:
582 gcc_unreachable ();
583 }
584
585 return NULL_TREE;
586}
587
fef5a0d9
RB
588
589/* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
593
cb6922f4 594void
fef5a0d9
RB
595gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
596{
355fe088 597 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
598
599 if (gimple_has_location (stmt))
600 annotate_all_with_location (stmts, gimple_location (stmt));
601
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
355fe088 604 gimple *laststore = NULL;
fef5a0d9
RB
605 for (gimple_stmt_iterator i = gsi_last (stmts);
606 !gsi_end_p (i); gsi_prev (&i))
607 {
355fe088 608 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
609 if ((gimple_assign_single_p (new_stmt)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
611 || (is_gimple_call (new_stmt)
612 && (gimple_call_flags (new_stmt)
613 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
614 {
615 tree vdef;
616 if (!laststore)
617 vdef = gimple_vdef (stmt);
618 else
619 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
620 gimple_set_vdef (new_stmt, vdef);
621 if (vdef && TREE_CODE (vdef) == SSA_NAME)
622 SSA_NAME_DEF_STMT (vdef) = new_stmt;
623 laststore = new_stmt;
624 }
625 }
626
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse = gimple_vuse (stmt);
630 for (gimple_stmt_iterator i = gsi_start (stmts);
631 !gsi_end_p (i); gsi_next (&i))
632 {
355fe088 633 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt))
637 gimple_set_vuse (new_stmt, reaching_vuse);
638 gimple_set_modified (new_stmt, true);
639 if (gimple_vdef (new_stmt))
640 reaching_vuse = gimple_vdef (new_stmt);
641 }
642
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
645 if (reaching_vuse
646 && reaching_vuse == gimple_vuse (stmt))
647 {
648 tree vdef = gimple_vdef (stmt);
649 if (vdef
650 && TREE_CODE (vdef) == SSA_NAME)
651 {
652 unlink_stmt_vdef (stmt);
653 release_ssa_name (vdef);
654 }
655 }
656
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p, stmts, false);
659}
660
52a5515e
RB
661/* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
664
665static void
666finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
667 gimple *stmt)
668{
669 tree lhs = gimple_call_lhs (stmt);
670 gimple_call_set_lhs (new_stmt, lhs);
671 if (lhs && TREE_CODE (lhs) == SSA_NAME)
672 SSA_NAME_DEF_STMT (lhs) = new_stmt;
673 gimple_move_vops (new_stmt, stmt);
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 if (gimple_block (new_stmt) == NULL_TREE)
676 gimple_set_block (new_stmt, gimple_block (stmt));
677 gsi_replace (si_p, new_stmt, false);
678}
679
680/* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
683
684bool
685update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
686{
687 va_list ap;
688 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
689
690 gcc_assert (is_gimple_call (stmt));
691 va_start (ap, nargs);
692 new_stmt = gimple_build_call_valist (fn, nargs, ap);
693 finish_update_gimple_call (si_p, new_stmt, stmt);
694 va_end (ap);
695 return true;
696}
697
698/* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
701
702static bool
703valid_gimple_call_p (tree expr)
704{
705 unsigned i, nargs;
706
707 if (TREE_CODE (expr) != CALL_EXPR)
708 return false;
709
710 nargs = call_expr_nargs (expr);
711 for (i = 0; i < nargs; i++)
712 {
713 tree arg = CALL_EXPR_ARG (expr, i);
714 if (is_gimple_reg_type (TREE_TYPE (arg)))
715 {
716 if (!is_gimple_val (arg))
717 return false;
718 }
719 else
720 if (!is_gimple_lvalue (arg))
721 return false;
722 }
723
724 return true;
725}
726
cbdd87d4
RG
727/* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
cbdd87d4
RG
736
737void
738gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
739{
740 tree lhs;
355fe088 741 gimple *stmt, *new_stmt;
cbdd87d4 742 gimple_stmt_iterator i;
355a7673 743 gimple_seq stmts = NULL;
cbdd87d4
RG
744
745 stmt = gsi_stmt (*si_p);
746
747 gcc_assert (is_gimple_call (stmt));
748
52a5515e
RB
749 if (valid_gimple_call_p (expr))
750 {
751 /* The call has simplified to another call. */
752 tree fn = CALL_EXPR_FN (expr);
753 unsigned i;
754 unsigned nargs = call_expr_nargs (expr);
755 vec<tree> args = vNULL;
756 gcall *new_stmt;
757
758 if (nargs > 0)
759 {
760 args.create (nargs);
761 args.safe_grow_cleared (nargs, true);
762
763 for (i = 0; i < nargs; i++)
764 args[i] = CALL_EXPR_ARG (expr, i);
765 }
766
767 new_stmt = gimple_build_call_vec (fn, args);
768 finish_update_gimple_call (si_p, new_stmt, stmt);
769 args.release ();
770 return;
771 }
cbdd87d4 772
e256dfce 773 lhs = gimple_call_lhs (stmt);
cbdd87d4 774 if (lhs == NULL_TREE)
6e572326 775 {
52a5515e 776 push_gimplify_context (gimple_in_ssa_p (cfun));
6e572326 777 gimplify_and_add (expr, &stmts);
52a5515e
RB
778 pop_gimplify_context (NULL);
779
6e572326
RG
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts))
783 {
784 if (gimple_in_ssa_p (cfun))
785 {
786 unlink_stmt_vdef (stmt);
787 release_defs (stmt);
788 }
f6b4dc28 789 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
790 return;
791 }
792 }
cbdd87d4 793 else
e256dfce 794 {
381cdae4 795 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
796 new_stmt = gimple_build_assign (lhs, tmp);
797 i = gsi_last (stmts);
798 gsi_insert_after_without_update (&i, new_stmt,
799 GSI_CONTINUE_LINKING);
800 }
cbdd87d4 801
fef5a0d9
RB
802 gsi_replace_with_seq_vops (si_p, stmts);
803}
cbdd87d4 804
fef5a0d9
RB
805
806/* Replace the call at *GSI with the gimple value VAL. */
807
e3174bdf 808void
fef5a0d9
RB
809replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
810{
355fe088 811 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 812 tree lhs = gimple_call_lhs (stmt);
355fe088 813 gimple *repl;
fef5a0d9 814 if (lhs)
e256dfce 815 {
fef5a0d9
RB
816 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
817 val = fold_convert (TREE_TYPE (lhs), val);
818 repl = gimple_build_assign (lhs, val);
819 }
820 else
821 repl = gimple_build_nop ();
822 tree vdef = gimple_vdef (stmt);
823 if (vdef && TREE_CODE (vdef) == SSA_NAME)
824 {
825 unlink_stmt_vdef (stmt);
826 release_ssa_name (vdef);
827 }
f6b4dc28 828 gsi_replace (gsi, repl, false);
fef5a0d9
RB
829}
830
831/* Replace the call at *GSI with the new call REPL and fold that
832 again. */
833
834static void
355fe088 835replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 836{
355fe088 837 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
838 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
839 gimple_set_location (repl, gimple_location (stmt));
779724a5 840 gimple_move_vops (repl, stmt);
f6b4dc28 841 gsi_replace (gsi, repl, false);
fef5a0d9
RB
842 fold_stmt (gsi);
843}
844
845/* Return true if VAR is a VAR_DECL or a component thereof. */
846
847static bool
848var_decl_component_p (tree var)
849{
850 tree inner = var;
851 while (handled_component_p (inner))
852 inner = TREE_OPERAND (inner, 0);
47cac108
RB
853 return (DECL_P (inner)
854 || (TREE_CODE (inner) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
856}
857
c89af696
AH
858/* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
860
861static bool
862size_must_be_zero_p (tree size)
863{
864 if (integer_zerop (size))
865 return true;
866
3f27391f 867 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
868 return false;
869
6512c0f1
MS
870 tree type = TREE_TYPE (size);
871 int prec = TYPE_PRECISION (type);
872
6512c0f1
MS
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
cb779afe
AH
876 wide_int zero = wi::zero (TYPE_PRECISION (type));
877 value_range valid_range (type, zero, ssize_max);
028d81b1 878 value_range vr;
b7a28c09
JG
879 get_range_query (cfun)->range_of_expr (vr, size);
880
45f4e2b0
AH
881 if (vr.undefined_p ())
882 vr.set_varying (TREE_TYPE (size));
c13fd1b8 883 vr.intersect (valid_range);
c89af696 884 return vr.zero_p ();
6512c0f1
MS
885}
886
cc8bea0a
MS
887/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
888 diagnose (otherwise undefined) overlapping copies without preventing
889 folding. When folded, GCC guarantees that overlapping memcpy has
890 the same semantics as memmove. Call to the library memcpy need not
891 provide the same guarantee. Return false if no simplification can
892 be made. */
fef5a0d9
RB
893
894static bool
895gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 896 tree dest, tree src, enum built_in_function code)
fef5a0d9 897{
355fe088 898 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
899 tree lhs = gimple_call_lhs (stmt);
900 tree len = gimple_call_arg (stmt, 2);
fef5a0d9
RB
901 location_t loc = gimple_location (stmt);
902
6512c0f1
MS
903 /* If the LEN parameter is a constant zero or in range where
904 the only valid value is zero, return DEST. */
905 if (size_must_be_zero_p (len))
fef5a0d9 906 {
355fe088 907 gimple *repl;
fef5a0d9
RB
908 if (gimple_call_lhs (stmt))
909 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
910 else
911 repl = gimple_build_nop ();
912 tree vdef = gimple_vdef (stmt);
913 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 914 {
fef5a0d9
RB
915 unlink_stmt_vdef (stmt);
916 release_ssa_name (vdef);
917 }
f6b4dc28 918 gsi_replace (gsi, repl, false);
fef5a0d9
RB
919 return true;
920 }
921
922 /* If SRC and DEST are the same (and not volatile), return
923 DEST{,+LEN,+LEN-1}. */
924 if (operand_equal_p (src, dest, 0))
925 {
cc8bea0a
MS
926 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
927 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 928 32667). */
fef5a0d9
RB
929 unlink_stmt_vdef (stmt);
930 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
931 release_ssa_name (gimple_vdef (stmt));
932 if (!lhs)
933 {
f6b4dc28 934 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
935 return true;
936 }
937 goto done;
938 }
939 else
940 {
b541b871
EB
941 /* We cannot (easily) change the type of the copy if it is a storage
942 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 modify the storage order of objects (see storage_order_barrier_p). */
944 tree srctype
945 = POINTER_TYPE_P (TREE_TYPE (src))
946 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
947 tree desttype
948 = POINTER_TYPE_P (TREE_TYPE (dest))
949 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
950 tree destvar, srcvar, srcoff;
fef5a0d9 951 unsigned int src_align, dest_align;
d01b568a 952 unsigned HOST_WIDE_INT tmp_len;
b541b871 953 const char *tmp_str;
fef5a0d9
RB
954
955 /* Build accesses at offset zero with a ref-all character type. */
b541b871
EB
956 tree off0
957 = build_int_cst (build_pointer_type_for_mode (char_type_node,
958 ptr_mode, true), 0);
fef5a0d9 959
b8207ad3
RB
960 /* If we can perform the copy efficiently with first doing all loads
961 and then all stores inline it that way. Currently efficiently
962 means that we can load all the memory into a single integer
963 register which is what MOVE_MAX gives us. */
fef5a0d9
RB
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (tree_fits_uhwi_p (len)
b8207ad3 967 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
968 /* FIXME: Don't transform copies from strings with known length.
969 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 from being handled, and the case was XFAILed for that reason.
971 Now that it is handled and the XFAIL removed, as soon as other
972 strlenopt tests that rely on it for passing are adjusted, this
973 hack can be removed. */
974 && !c_strlen (src, 1)
866626ef 975 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
b541b871
EB
976 && memchr (tmp_str, 0, tmp_len) == NULL)
977 && !(srctype
978 && AGGREGATE_TYPE_P (srctype)
979 && TYPE_REVERSE_STORAGE_ORDER (srctype))
980 && !(desttype
981 && AGGREGATE_TYPE_P (desttype)
982 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
983 {
984 unsigned ilen = tree_to_uhwi (len);
146ec50f 985 if (pow2p_hwi (ilen))
fef5a0d9 986 {
213694e5
MS
987 /* Detect out-of-bounds accesses without issuing warnings.
988 Avoid folding out-of-bounds copies but to avoid false
989 positives for unreachable code defer warning until after
990 DCE has worked its magic.
991 -Wrestrict is still diagnosed. */
992 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
993 dest, src, len, len,
994 false, false))
995 if (warning != OPT_Wrestrict)
996 return false;
cc8bea0a 997
cc383e97
JJ
998 scalar_int_mode imode;
999 machine_mode mode;
1000 if (int_mode_for_size (ilen * BITS_PER_UNIT, 0).exists (&imode)
1001 && bitwise_mode_for_size (ilen
1002 * BITS_PER_UNIT).exists (&mode)
1003 && known_eq (GET_MODE_BITSIZE (mode), ilen * BITS_PER_UNIT)
fef5a0d9
RB
1004 /* If the destination pointer is not aligned we must be able
1005 to emit an unaligned store. */
64ab8765 1006 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1007 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 1008 || (optab_handler (movmisalign_optab, mode)
f869c12f 1009 != CODE_FOR_nothing)))
fef5a0d9 1010 {
cc383e97 1011 tree type = bitwise_type_for_mode (mode);
fef5a0d9
RB
1012 tree srctype = type;
1013 tree desttype = type;
64ab8765 1014 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1015 srctype = build_aligned_type (type, src_align);
1016 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1017 tree tem = fold_const_aggregate_ref (srcmem);
1018 if (tem)
1019 srcmem = tem;
64ab8765 1020 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1021 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 1022 && (optab_handler (movmisalign_optab, mode)
f869c12f 1023 == CODE_FOR_nothing))
fef5a0d9
RB
1024 srcmem = NULL_TREE;
1025 if (srcmem)
1026 {
355fe088 1027 gimple *new_stmt;
fef5a0d9
RB
1028 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1029 {
1030 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
1031 srcmem
1032 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1033 new_stmt);
fef5a0d9
RB
1034 gimple_assign_set_lhs (new_stmt, srcmem);
1035 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
87534276 1036 gimple_set_location (new_stmt, loc);
fef5a0d9
RB
1037 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1038 }
64ab8765 1039 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1040 desttype = build_aligned_type (type, dest_align);
1041 new_stmt
1042 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1043 dest, off0),
1044 srcmem);
779724a5 1045 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1046 if (!lhs)
1047 {
f6b4dc28 1048 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1049 return true;
1050 }
a5dc2641 1051 gimple_set_location (new_stmt, loc);
fef5a0d9
RB
1052 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1053 goto done;
1054 }
1055 }
1056 }
1057 }
1058
0d67a510 1059 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
1060 {
1061 /* Both DEST and SRC must be pointer types.
1062 ??? This is what old code did. Is the testing for pointer types
1063 really mandatory?
1064
1065 If either SRC is readonly or length is 1, we can use memcpy. */
1066 if (!dest_align || !src_align)
1067 return false;
1068 if (readonly_data_expr (src)
1069 || (tree_fits_uhwi_p (len)
1070 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1071 >= tree_to_uhwi (len))))
1072 {
1073 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1074 if (!fn)
1075 return false;
1076 gimple_call_set_fndecl (stmt, fn);
1077 gimple_call_set_arg (stmt, 0, dest);
1078 gimple_call_set_arg (stmt, 1, src);
1079 fold_stmt (gsi);
1080 return true;
1081 }
1082
1083 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1084 if (TREE_CODE (src) == ADDR_EXPR
1085 && TREE_CODE (dest) == ADDR_EXPR)
1086 {
1087 tree src_base, dest_base, fn;
a90c8804
RS
1088 poly_int64 src_offset = 0, dest_offset = 0;
1089 poly_uint64 maxsize;
fef5a0d9
RB
1090
1091 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
1092 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1093 if (src_base == NULL)
1094 src_base = srcvar;
fef5a0d9 1095 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
1096 dest_base = get_addr_base_and_unit_offset (destvar,
1097 &dest_offset);
1098 if (dest_base == NULL)
1099 dest_base = destvar;
a90c8804 1100 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 1101 maxsize = -1;
fef5a0d9
RB
1102 if (SSA_VAR_P (src_base)
1103 && SSA_VAR_P (dest_base))
1104 {
1105 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
1106 && ranges_maybe_overlap_p (src_offset, maxsize,
1107 dest_offset, maxsize))
fef5a0d9
RB
1108 return false;
1109 }
1110 else if (TREE_CODE (src_base) == MEM_REF
1111 && TREE_CODE (dest_base) == MEM_REF)
1112 {
1113 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1114 TREE_OPERAND (dest_base, 0), 0))
1115 return false;
a90c8804
RS
1116 poly_offset_int full_src_offset
1117 = mem_ref_offset (src_base) + src_offset;
1118 poly_offset_int full_dest_offset
1119 = mem_ref_offset (dest_base) + dest_offset;
1120 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1121 full_dest_offset, maxsize))
fef5a0d9
RB
1122 return false;
1123 }
1124 else
1125 return false;
1126
1127 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1128 if (!fn)
1129 return false;
1130 gimple_call_set_fndecl (stmt, fn);
1131 gimple_call_set_arg (stmt, 0, dest);
1132 gimple_call_set_arg (stmt, 1, src);
1133 fold_stmt (gsi);
1134 return true;
1135 }
1136
1137 /* If the destination and source do not alias optimize into
1138 memcpy as well. */
1139 if ((is_gimple_min_invariant (dest)
1140 || TREE_CODE (dest) == SSA_NAME)
1141 && (is_gimple_min_invariant (src)
1142 || TREE_CODE (src) == SSA_NAME))
1143 {
1144 ao_ref destr, srcr;
1145 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1146 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1147 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1148 {
1149 tree fn;
1150 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1151 if (!fn)
1152 return false;
1153 gimple_call_set_fndecl (stmt, fn);
1154 gimple_call_set_arg (stmt, 0, dest);
1155 gimple_call_set_arg (stmt, 1, src);
1156 fold_stmt (gsi);
1157 return true;
1158 }
1159 }
1160
1161 return false;
1162 }
1163
1164 if (!tree_fits_shwi_p (len))
1165 return false;
b541b871
EB
1166 if (!srctype
1167 || (AGGREGATE_TYPE_P (srctype)
1168 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1169 return false;
1170 if (!desttype
1171 || (AGGREGATE_TYPE_P (desttype)
1172 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
1173 return false;
1174 /* In the following try to find a type that is most natural to be
1175 used for the memcpy source and destination and that allows
1176 the most optimization when memcpy is turned into a plain assignment
1177 using that type. In theory we could always use a char[len] type
1178 but that only gains us that the destination and source possibly
1179 no longer will have their address taken. */
fef5a0d9
RB
1180 if (TREE_CODE (srctype) == ARRAY_TYPE
1181 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 1182 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
1183 if (TREE_CODE (desttype) == ARRAY_TYPE
1184 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 1185 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
1186 if (TREE_ADDRESSABLE (srctype)
1187 || TREE_ADDRESSABLE (desttype))
1188 return false;
1189
1190 /* Make sure we are not copying using a floating-point mode or
1191 a type whose size possibly does not match its precision. */
1192 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1193 || TREE_CODE (desttype) == BOOLEAN_TYPE
1194 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1195 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1196 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1197 || TREE_CODE (srctype) == BOOLEAN_TYPE
1198 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1199 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1200 if (!srctype)
1201 srctype = desttype;
1202 if (!desttype)
1203 desttype = srctype;
1204 if (!srctype)
1205 return false;
1206
1207 src_align = get_pointer_alignment (src);
1208 dest_align = get_pointer_alignment (dest);
fef5a0d9 1209
5105b576
RB
1210 /* Choose between src and destination type for the access based
1211 on alignment, whether the access constitutes a register access
1212 and whether it may actually expose a declaration for SSA rewrite
e362a897
EB
1213 or SRA decomposition. Also try to expose a string constant, we
1214 might be able to concatenate several of them later into a single
1215 string store. */
42f74245 1216 destvar = NULL_TREE;
5105b576 1217 srcvar = NULL_TREE;
42f74245
RB
1218 if (TREE_CODE (dest) == ADDR_EXPR
1219 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
1220 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1221 && dest_align >= TYPE_ALIGN (desttype)
1222 && (is_gimple_reg_type (desttype)
1223 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1224 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1225 else if (TREE_CODE (src) == ADDR_EXPR
1226 && var_decl_component_p (TREE_OPERAND (src, 0))
1227 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1228 && src_align >= TYPE_ALIGN (srctype)
1229 && (is_gimple_reg_type (srctype)
1230 || dest_align >= TYPE_ALIGN (srctype)))
1231 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
e362a897
EB
1232 /* FIXME: Don't transform copies from strings with known original length.
1233 As soon as strlenopt tests that rely on it for passing are adjusted,
1234 this hack can be removed. */
1235 else if (gimple_call_alloca_for_var_p (stmt)
1236 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1237 && integer_zerop (srcoff)
1238 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1239 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1240 srctype = TREE_TYPE (srcvar);
1241 else
fef5a0d9
RB
1242 return false;
1243
5105b576
RB
1244 /* Now that we chose an access type express the other side in
1245 terms of it if the target allows that with respect to alignment
1246 constraints. */
fef5a0d9
RB
1247 if (srcvar == NULL_TREE)
1248 {
fef5a0d9
RB
1249 if (src_align >= TYPE_ALIGN (desttype))
1250 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1251 else
1252 {
d9792f8d
RB
1253 enum machine_mode mode = TYPE_MODE (desttype);
1254 if ((mode == BLKmode && STRICT_ALIGNMENT)
1255 || (targetm.slow_unaligned_access (mode, src_align)
1256 && (optab_handler (movmisalign_optab, mode)
1257 == CODE_FOR_nothing)))
fef5a0d9
RB
1258 return false;
1259 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1260 src_align);
1261 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1262 }
1263 }
1264 else if (destvar == NULL_TREE)
1265 {
fef5a0d9
RB
1266 if (dest_align >= TYPE_ALIGN (srctype))
1267 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1268 else
1269 {
d9792f8d
RB
1270 enum machine_mode mode = TYPE_MODE (srctype);
1271 if ((mode == BLKmode && STRICT_ALIGNMENT)
1272 || (targetm.slow_unaligned_access (mode, dest_align)
1273 && (optab_handler (movmisalign_optab, mode)
1274 == CODE_FOR_nothing)))
fef5a0d9
RB
1275 return false;
1276 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1277 dest_align);
1278 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1279 }
1280 }
1281
213694e5
MS
1282 /* Same as above, detect out-of-bounds accesses without issuing
1283 warnings. Avoid folding out-of-bounds copies but to avoid
1284 false positives for unreachable code defer warning until
1285 after DCE has worked its magic.
1286 -Wrestrict is still diagnosed. */
1287 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1288 dest, src, len, len,
1289 false, false))
1290 if (warning != OPT_Wrestrict)
1291 return false;
cc8bea0a 1292
355fe088 1293 gimple *new_stmt;
fef5a0d9
RB
1294 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1295 {
921b13d0
RB
1296 tree tem = fold_const_aggregate_ref (srcvar);
1297 if (tem)
1298 srcvar = tem;
1299 if (! is_gimple_min_invariant (srcvar))
1300 {
1301 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1302 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1303 new_stmt);
921b13d0
RB
1304 gimple_assign_set_lhs (new_stmt, srcvar);
1305 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
a5dc2641 1306 gimple_set_location (new_stmt, loc);
921b13d0
RB
1307 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1308 }
d7257171
RB
1309 new_stmt = gimple_build_assign (destvar, srcvar);
1310 goto set_vop_and_replace;
fef5a0d9 1311 }
d7257171 1312
e362a897
EB
1313 /* We get an aggregate copy. If the source is a STRING_CST, then
1314 directly use its type to perform the copy. */
1315 if (TREE_CODE (srcvar) == STRING_CST)
1316 desttype = srctype;
1317
1318 /* Or else, use an unsigned char[] type to perform the copy in order
1319 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1320 types or float modes behavior on copying. */
1321 else
1322 {
1323 desttype = build_array_type_nelts (unsigned_char_type_node,
1324 tree_to_uhwi (len));
1325 srctype = desttype;
1326 if (src_align > TYPE_ALIGN (srctype))
1327 srctype = build_aligned_type (srctype, src_align);
1328 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1329 }
1330
d7257171
RB
1331 if (dest_align > TYPE_ALIGN (desttype))
1332 desttype = build_aligned_type (desttype, dest_align);
e362a897
EB
1333 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1334 new_stmt = gimple_build_assign (destvar, srcvar);
1335
d7257171 1336set_vop_and_replace:
779724a5 1337 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1338 if (!lhs)
1339 {
f6b4dc28 1340 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1341 return true;
1342 }
a5dc2641 1343 gimple_set_location (new_stmt, loc);
fef5a0d9
RB
1344 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1345 }
1346
1347done:
74e3c262 1348 gimple_seq stmts = NULL;
0d67a510 1349 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1350 len = NULL_TREE;
0d67a510 1351 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1352 {
1353 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1354 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1355 TREE_TYPE (dest), dest, len);
1356 }
0d67a510
ML
1357 else
1358 gcc_unreachable ();
fef5a0d9 1359
74e3c262 1360 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1361 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1362 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1363 return true;
1364}
1365
b3d8d88e
MS
1366/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1367 to built-in memcmp (a, b, len). */
1368
1369static bool
1370gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1371{
1372 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1373
1374 if (!fn)
1375 return false;
1376
1377 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1378
1379 gimple *stmt = gsi_stmt (*gsi);
1380 tree a = gimple_call_arg (stmt, 0);
1381 tree b = gimple_call_arg (stmt, 1);
1382 tree len = gimple_call_arg (stmt, 2);
1383
1384 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1385 replace_call_with_call_and_fold (gsi, repl);
1386
1387 return true;
1388}
1389
1390/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1391 to built-in memmove (dest, src, len). */
1392
1393static bool
1394gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1395{
1396 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1397
1398 if (!fn)
1399 return false;
1400
1401 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1402 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1403 len) into memmove (dest, src, len). */
1404
1405 gimple *stmt = gsi_stmt (*gsi);
1406 tree src = gimple_call_arg (stmt, 0);
1407 tree dest = gimple_call_arg (stmt, 1);
1408 tree len = gimple_call_arg (stmt, 2);
1409
1410 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1411 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1412 replace_call_with_call_and_fold (gsi, repl);
1413
1414 return true;
1415}
1416
1417/* Transform a call to built-in bzero (dest, len) at *GSI into one
1418 to built-in memset (dest, 0, len). */
1419
1420static bool
1421gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1422{
1423 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1424
1425 if (!fn)
1426 return false;
1427
1428 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1429
1430 gimple *stmt = gsi_stmt (*gsi);
1431 tree dest = gimple_call_arg (stmt, 0);
1432 tree len = gimple_call_arg (stmt, 1);
1433
1434 gimple_seq seq = NULL;
1435 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1436 gimple_seq_add_stmt_without_update (&seq, repl);
1437 gsi_replace_with_seq_vops (gsi, seq);
1438 fold_stmt (gsi);
1439
1440 return true;
1441}
1442
fef5a0d9
RB
1443/* Fold function call to builtin memset or bzero at *GSI setting the
1444 memory of size LEN to VAL. Return whether a simplification was made. */
1445
1446static bool
1447gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1448{
355fe088 1449 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1450 tree etype;
1451 unsigned HOST_WIDE_INT length, cval;
1452
1453 /* If the LEN parameter is zero, return DEST. */
1454 if (integer_zerop (len))
1455 {
1456 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1457 return true;
1458 }
1459
1460 if (! tree_fits_uhwi_p (len))
1461 return false;
1462
1463 if (TREE_CODE (c) != INTEGER_CST)
1464 return false;
1465
1466 tree dest = gimple_call_arg (stmt, 0);
1467 tree var = dest;
1468 if (TREE_CODE (var) != ADDR_EXPR)
1469 return false;
1470
1471 var = TREE_OPERAND (var, 0);
1472 if (TREE_THIS_VOLATILE (var))
1473 return false;
1474
1475 etype = TREE_TYPE (var);
1476 if (TREE_CODE (etype) == ARRAY_TYPE)
1477 etype = TREE_TYPE (etype);
1478
4f4fa250
JJ
1479 if ((!INTEGRAL_TYPE_P (etype)
1480 && !POINTER_TYPE_P (etype))
1481 || TREE_CODE (etype) == BITINT_TYPE)
fef5a0d9
RB
1482 return NULL_TREE;
1483
1484 if (! var_decl_component_p (var))
1485 return NULL_TREE;
1486
1487 length = tree_to_uhwi (len);
7a504f33 1488 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1489 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1490 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1491 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1492 return NULL_TREE;
1493
1494 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1495 return NULL_TREE;
1496
1ba9acb1
RB
1497 if (!type_has_mode_precision_p (etype))
1498 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1499 TYPE_UNSIGNED (etype));
1500
fef5a0d9
RB
1501 if (integer_zerop (c))
1502 cval = 0;
1503 else
1504 {
1505 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1506 return NULL_TREE;
1507
1508 cval = TREE_INT_CST_LOW (c);
1509 cval &= 0xff;
1510 cval |= cval << 8;
1511 cval |= cval << 16;
1512 cval |= (cval << 31) << 1;
1513 }
1514
1515 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1516 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1517 gimple_move_vops (store, stmt);
f1ba6a81 1518 gimple_set_location (store, gimple_location (stmt));
fef5a0d9
RB
1519 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1520 if (gimple_call_lhs (stmt))
1521 {
355fe088 1522 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1523 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1524 }
1525 else
1526 {
1527 gimple_stmt_iterator gsi2 = *gsi;
1528 gsi_prev (gsi);
1529 gsi_remove (&gsi2, true);
1530 }
1531
1532 return true;
1533}
1534
fb471a13 1535/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1536
1537static bool
ba6e17e7 1538get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
03c4a945 1539 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1540{
fb471a13 1541 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1542
fb471a13
MS
1543 /* The length computed by this invocation of the function. */
1544 tree val = NULL_TREE;
1545
eef2da67
MS
1546 /* True if VAL is an optimistic (tight) bound determined from
1547 the size of the character array in which the string may be
1548 stored. In that case, the computed VAL is used to set
1549 PDATA->MAXBOUND. */
1550 bool tight_bound = false;
1551
fb471a13
MS
1552 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1553 if (TREE_CODE (arg) == ADDR_EXPR
1554 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1555 {
fb471a13
MS
1556 tree op = TREE_OPERAND (arg, 0);
1557 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1558 {
fb471a13
MS
1559 tree aop0 = TREE_OPERAND (op, 0);
1560 if (TREE_CODE (aop0) == INDIRECT_REF
1561 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1562 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1563 pdata, eltsize);
fef5a0d9 1564 }
598f7235 1565 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1566 && rkind == SRK_LENRANGE)
fef5a0d9 1567 {
fb471a13
MS
1568 /* Fail if an array is the last member of a struct object
1569 since it could be treated as a (fake) flexible array
1570 member. */
1571 tree idx = TREE_OPERAND (op, 1);
1572
1573 arg = TREE_OPERAND (op, 0);
1574 tree optype = TREE_TYPE (arg);
1575 if (tree dom = TYPE_DOMAIN (optype))
1576 if (tree bound = TYPE_MAX_VALUE (dom))
1577 if (TREE_CODE (bound) == INTEGER_CST
1578 && TREE_CODE (idx) == INTEGER_CST
1579 && tree_int_cst_lt (bound, idx))
1580 return false;
fef5a0d9 1581 }
fb471a13 1582 }
7d583f42 1583
598f7235 1584 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1585 {
1586 /* We are computing the maximum value (not string length). */
1587 val = arg;
1588 if (TREE_CODE (val) != INTEGER_CST
1589 || tree_int_cst_sgn (val) < 0)
1590 return false;
1591 }
1592 else
1593 {
1594 c_strlen_data lendata = { };
1595 val = c_strlen (arg, 1, &lendata, eltsize);
1596
fb471a13
MS
1597 if (!val && lendata.decl)
1598 {
03c4a945
MS
1599 /* ARG refers to an unterminated const character array.
1600 DATA.DECL with size DATA.LEN. */
1601 val = lendata.minlen;
730832cd 1602 pdata->decl = lendata.decl;
7d583f42 1603 }
fb471a13
MS
1604 }
1605
a7160771
MS
1606 /* Set if VAL represents the maximum length based on array size (set
1607 when exact length cannot be determined). */
1608 bool maxbound = false;
1609
84de9426 1610 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1611 {
1612 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1613 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1614 pdata, eltsize);
88d0c3f0 1615
fb471a13 1616 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1617 {
fb471a13 1618 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1619
fb471a13
MS
1620 /* Determine the "innermost" array type. */
1621 while (TREE_CODE (optype) == ARRAY_TYPE
1622 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1623 optype = TREE_TYPE (optype);
c42d0aa0 1624
fb471a13
MS
1625 /* Avoid arrays of pointers. */
1626 tree eltype = TREE_TYPE (optype);
1627 if (TREE_CODE (optype) != ARRAY_TYPE
1628 || !INTEGRAL_TYPE_P (eltype))
1629 return false;
c42d0aa0 1630
fb471a13
MS
1631 /* Fail when the array bound is unknown or zero. */
1632 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1633 if (!val
1634 || TREE_CODE (val) != INTEGER_CST
1635 || integer_zerop (val))
fb471a13 1636 return false;
1bfd6a00 1637
fb471a13
MS
1638 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1639 integer_one_node);
c42d0aa0 1640
fb471a13
MS
1641 /* Set the minimum size to zero since the string in
1642 the array could have zero length. */
730832cd 1643 pdata->minlen = ssize_int (0);
204a7ecb 1644
eef2da67 1645 tight_bound = true;
fb471a13
MS
1646 }
1647 else if (TREE_CODE (arg) == COMPONENT_REF
1648 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1649 == ARRAY_TYPE))
1650 {
1651 /* Use the type of the member array to determine the upper
1652 bound on the length of the array. This may be overly
1653 optimistic if the array itself isn't NUL-terminated and
1654 the caller relies on the subsequent member to contain
1655 the NUL but that would only be considered valid if
03c4a945 1656 the array were the last member of a struct. */
fb471a13
MS
1657
1658 tree fld = TREE_OPERAND (arg, 1);
1659
1660 tree optype = TREE_TYPE (fld);
1661
1662 /* Determine the "innermost" array type. */
1663 while (TREE_CODE (optype) == ARRAY_TYPE
1664 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1665 optype = TREE_TYPE (optype);
1666
1667 /* Fail when the array bound is unknown or zero. */
1668 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1669 if (!val
1670 || TREE_CODE (val) != INTEGER_CST
1671 || integer_zerop (val))
fb471a13
MS
1672 return false;
1673 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1674 integer_one_node);
1675
1676 /* Set the minimum size to zero since the string in
1677 the array could have zero length. */
730832cd 1678 pdata->minlen = ssize_int (0);
fb471a13 1679
eef2da67
MS
1680 /* The array size determined above is an optimistic bound
1681 on the length. If the array isn't nul-terminated the
1682 length computed by the library function would be greater.
1683 Even though using strlen to cross the subobject boundary
1684 is undefined, avoid drawing conclusions from the member
1685 type about the length here. */
1686 tight_bound = true;
1687 }
e7868dc6
MS
1688 else if (TREE_CODE (arg) == MEM_REF
1689 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1690 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1691 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1692 {
1693 /* Handle a MEM_REF into a DECL accessing an array of integers,
1694 being conservative about references to extern structures with
1695 flexible array members that can be initialized to arbitrary
ace0ae09 1696 numbers of elements as an extension (static structs are okay). */
e7868dc6
MS
1697 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1698 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1699 && (decl_binds_to_current_def_p (ref)
ace0ae09 1700 || !array_ref_flexible_size_p (arg)))
e7868dc6
MS
1701 {
1702 /* Fail if the offset is out of bounds. Such accesses
1703 should be diagnosed at some point. */
1704 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1705 if (!val
1706 || TREE_CODE (val) != INTEGER_CST
1707 || integer_zerop (val))
e7868dc6
MS
1708 return false;
1709
1710 poly_offset_int psiz = wi::to_offset (val);
1711 poly_offset_int poff = mem_ref_offset (arg);
1712 if (known_le (psiz, poff))
1713 return false;
1714
1715 pdata->minlen = ssize_int (0);
1716
1717 /* Subtract the offset and one for the terminating nul. */
1718 psiz -= poff;
1719 psiz -= 1;
1720 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1721 /* Since VAL reflects the size of a declared object
1722 rather the type of the access it is not a tight bound. */
1723 }
1724 }
1725 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1726 {
eef2da67
MS
1727 /* Avoid handling pointers to arrays. GCC might misuse
1728 a pointer to an array of one bound to point to an array
1729 object of a greater bound. */
1730 tree argtype = TREE_TYPE (arg);
1731 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1732 {
eef2da67 1733 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1734 if (!val
1735 || TREE_CODE (val) != INTEGER_CST
1736 || integer_zerop (val))
88d0c3f0 1737 return false;
fb471a13
MS
1738 val = wide_int_to_tree (TREE_TYPE (val),
1739 wi::sub (wi::to_wide (val), 1));
1740
e495e31a
MS
1741 /* Set the minimum size to zero since the string in
1742 the array could have zero length. */
730832cd 1743 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1744 }
1745 }
a7160771 1746 maxbound = true;
fb471a13 1747 }
88d0c3f0 1748
fb471a13
MS
1749 if (!val)
1750 return false;
fef5a0d9 1751
fb471a13 1752 /* Adjust the lower bound on the string length as necessary. */
730832cd 1753 if (!pdata->minlen
598f7235 1754 || (rkind != SRK_STRLEN
730832cd 1755 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1756 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1757 && tree_int_cst_lt (val, pdata->minlen)))
1758 pdata->minlen = val;
88d0c3f0 1759
a7160771 1760 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1761 {
1762 /* Adjust the tighter (more optimistic) string length bound
1763 if necessary and proceed to adjust the more conservative
1764 bound. */
1765 if (TREE_CODE (val) == INTEGER_CST)
1766 {
a7160771
MS
1767 if (tree_int_cst_lt (pdata->maxbound, val))
1768 pdata->maxbound = val;
730832cd
MS
1769 }
1770 else
1771 pdata->maxbound = val;
1772 }
a7160771
MS
1773 else if (pdata->maxbound || maxbound)
1774 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1775 if VAL corresponds to the maximum length determined based
1776 on the type of the object. */
730832cd
MS
1777 pdata->maxbound = val;
1778
eef2da67
MS
1779 if (tight_bound)
1780 {
1781 /* VAL computed above represents an optimistically tight bound
1782 on the length of the string based on the referenced object's
1783 or subobject's type. Determine the conservative upper bound
1784 based on the enclosing object's size if possible. */
84de9426 1785 if (rkind == SRK_LENRANGE)
eef2da67
MS
1786 {
1787 poly_int64 offset;
1788 tree base = get_addr_base_and_unit_offset (arg, &offset);
1789 if (!base)
1790 {
1791 /* When the call above fails due to a non-constant offset
1792 assume the offset is zero and use the size of the whole
1793 enclosing object instead. */
1794 base = get_base_address (arg);
1795 offset = 0;
1796 }
1797 /* If the base object is a pointer no upper bound on the length
1798 can be determined. Otherwise the maximum length is equal to
1799 the size of the enclosing object minus the offset of
1800 the referenced subobject minus 1 (for the terminating nul). */
1801 tree type = TREE_TYPE (base);
1802 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1803 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1804 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1805 val = build_all_ones_cst (size_type_node);
1806 else
1807 {
1808 val = DECL_SIZE_UNIT (base);
1809 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1810 size_int (offset + 1));
1811 }
1812 }
1813 else
1814 return false;
1815 }
1816
730832cd 1817 if (pdata->maxlen)
fb471a13
MS
1818 {
1819 /* Adjust the more conservative bound if possible/necessary
1820 and fail otherwise. */
598f7235 1821 if (rkind != SRK_STRLEN)
fef5a0d9 1822 {
730832cd 1823 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1824 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1825 return false;
fef5a0d9 1826
730832cd
MS
1827 if (tree_int_cst_lt (pdata->maxlen, val))
1828 pdata->maxlen = val;
fb471a13
MS
1829 return true;
1830 }
730832cd 1831 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1832 {
1833 /* Fail if the length of this ARG is different from that
1834 previously determined from another ARG. */
1835 return false;
1836 }
fef5a0d9
RB
1837 }
1838
730832cd 1839 pdata->maxlen = val;
84de9426 1840 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1841}
1842
5d6655eb
MS
1843/* For an ARG referencing one or more strings, try to obtain the range
1844 of their lengths, or the size of the largest array ARG referes to if
1845 the range of lengths cannot be determined, and store all in *PDATA.
1846 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1847 the maximum constant value.
1848 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1849 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1850 length or if we are unable to determine the length, return false.
fb471a13 1851 VISITED is a bitmap of visited variables.
598f7235
MS
1852 RKIND determines the kind of value or range to obtain (see
1853 strlen_range_kind).
1854 Set PDATA->DECL if ARG refers to an unterminated constant array.
1855 On input, set ELTSIZE to 1 for normal single byte character strings,
1856 and either 2 or 4 for wide characer strings (the size of wchar_t).
1857 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1858
1859static bool
ba6e17e7 1860get_range_strlen (tree arg, bitmap visited,
03c4a945
MS
1861 strlen_range_kind rkind,
1862 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1863{
1864
1865 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1866 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1867
fef5a0d9
RB
1868 /* If ARG is registered for SSA update we cannot look at its defining
1869 statement. */
1870 if (name_registered_for_update_p (arg))
1871 return false;
1872
1873 /* If we were already here, break the infinite cycle. */
ba6e17e7 1874 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1875 return true;
1876
fb471a13
MS
1877 tree var = arg;
1878 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1879
fef5a0d9
RB
1880 switch (gimple_code (def_stmt))
1881 {
1882 case GIMPLE_ASSIGN:
598f7235
MS
1883 /* The RHS of the statement defining VAR must either have a
1884 constant length or come from another SSA_NAME with a constant
1885 length. */
fef5a0d9
RB
1886 if (gimple_assign_single_p (def_stmt)
1887 || gimple_assign_unary_nop_p (def_stmt))
1888 {
598f7235 1889 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1890 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1891 }
1892 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1893 {
c8602fe6
JJ
1894 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1895 gimple_assign_rhs3 (def_stmt) };
1896
1897 for (unsigned int i = 0; i < 2; i++)
03c4a945 1898 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1899 {
84de9426 1900 if (rkind != SRK_LENRANGE)
c8602fe6 1901 return false;
80c2bad6
MS
1902 /* Set the upper bound to the maximum to prevent
1903 it from being adjusted in the next iteration but
1904 leave MINLEN and the more conservative MAXBOUND
1905 determined so far alone (or leave them null if
1906 they haven't been set yet). That the MINLEN is
1907 in fact zero can be determined from MAXLEN being
1908 unbounded but the discovered minimum is used for
1909 diagnostics. */
730832cd 1910 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1911 }
1912 return true;
cc8bea0a 1913 }
fef5a0d9
RB
1914 return false;
1915
1916 case GIMPLE_PHI:
598f7235
MS
1917 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1918 must have a constant length. */
c8602fe6 1919 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1920 {
1921 tree arg = gimple_phi_arg (def_stmt, i)->def;
1922
1923 /* If this PHI has itself as an argument, we cannot
1924 determine the string length of this argument. However,
1925 if we can find a constant string length for the other
1926 PHI args then we can still be sure that this is a
1927 constant string length. So be optimistic and just
1928 continue with the next argument. */
1929 if (arg == gimple_phi_result (def_stmt))
1930 continue;
1931
03c4a945 1932 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1933 {
84de9426 1934 if (rkind != SRK_LENRANGE)
88d0c3f0 1935 return false;
80c2bad6
MS
1936 /* Set the upper bound to the maximum to prevent
1937 it from being adjusted in the next iteration but
1938 leave MINLEN and the more conservative MAXBOUND
1939 determined so far alone (or leave them null if
1940 they haven't been set yet). That the MINLEN is
1941 in fact zero can be determined from MAXLEN being
1942 unbounded but the discovered minimum is used for
1943 diagnostics. */
730832cd 1944 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1945 }
fef5a0d9 1946 }
fef5a0d9
RB
1947 return true;
1948
1949 default:
1950 return false;
1951 }
1952}
5d6655eb 1953
97623b52
MS
1954/* Try to obtain the range of the lengths of the string(s) referenced
1955 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1956 of lengths cannot be determined, and store all in *PDATA which must
1957 be zero-initialized on input except PDATA->MAXBOUND may be set to
1958 a non-null tree node other than INTEGER_CST to request to have it
1959 set to the length of the longest string in a PHI. ELTSIZE is
1960 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1961 some power of 2 for wide characters.
1962 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1963 for optimization. Returning false means that a nonzero PDATA->MINLEN
1964 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1965 is -1 (in that case, the actual range is indeterminate, i.e.,
1966 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1967
3f343040 1968bool
84de9426 1969get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0 1970{
ba6e17e7 1971 auto_bitmap visited;
a7160771 1972 tree maxbound = pdata->maxbound;
88d0c3f0 1973
ba6e17e7 1974 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1975 {
5d6655eb
MS
1976 /* On failure extend the length range to an impossible maximum
1977 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1978 members can stay unchanged regardless. */
1979 pdata->minlen = ssize_int (0);
1980 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1981 }
5d6655eb
MS
1982 else if (!pdata->minlen)
1983 pdata->minlen = ssize_int (0);
1984
a7160771
MS
1985 /* If it's unchanged from it initial non-null value, set the conservative
1986 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1987 if (maxbound && pdata->maxbound == maxbound)
1988 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0 1989
03c4a945 1990 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1991}
1992
5d6655eb
MS
1993/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1994 For ARG of pointer types, NONSTR indicates if the caller is prepared
1995 to handle unterminated strings. For integer ARG and when RKIND ==
1996 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1997
5d6655eb
MS
1998 If an unterminated array is discovered and our caller handles
1999 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
2000 return the maximum size. Otherwise return NULL. */
2001
598f7235
MS
2002static tree
2003get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 2004{
598f7235
MS
2005 /* A non-null NONSTR is meaningless when determining the maximum
2006 value of an integer ARG. */
2007 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2008 /* ARG must have an integral type when RKIND says so. */
2009 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2010
ba6e17e7 2011 auto_bitmap visited;
3f343040 2012
5d6655eb
MS
2013 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2014 is unbounded. */
730832cd 2015 c_strlen_data lendata = { };
ba6e17e7 2016 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
730832cd 2017 lendata.maxlen = NULL_TREE;
5d6655eb
MS
2018 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2019 lendata.maxlen = NULL_TREE;
2020
e08341bb
MS
2021 if (nonstr)
2022 {
2023 /* For callers prepared to handle unterminated arrays set
2024 *NONSTR to point to the declaration of the array and return
2025 the maximum length/size. */
730832cd
MS
2026 *nonstr = lendata.decl;
2027 return lendata.maxlen;
e08341bb
MS
2028 }
2029
2030 /* Fail if the constant array isn't nul-terminated. */
730832cd 2031 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
2032}
2033
cea4dab8
SP
2034/* Return true if LEN is known to be less than or equal to (or if STRICT is
2035 true, strictly less than) the lower bound of SIZE at compile time and false
2036 otherwise. */
2037
2038static bool
2039known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2040{
2041 if (len == NULL_TREE)
2042 return false;
2043
2044 wide_int size_range[2];
2045 wide_int len_range[2];
2046 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2047 {
2048 if (strict)
2049 return wi::ltu_p (len_range[1], size_range[0]);
2050 else
2051 return wi::leu_p (len_range[1], size_range[0]);
2052 }
2053
2054 return false;
2055}
fef5a0d9
RB
2056
2057/* Fold function call to builtin strcpy with arguments DEST and SRC.
2058 If LEN is not NULL, it represents the length of the string to be
2059 copied. Return NULL_TREE if no simplification can be made. */
2060
2061static bool
2062gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 2063 tree dest, tree src)
fef5a0d9 2064{
cc8bea0a
MS
2065 gimple *stmt = gsi_stmt (*gsi);
2066 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2067 tree fn;
2068
2069 /* If SRC and DEST are the same (and not volatile), return DEST. */
2070 if (operand_equal_p (src, dest, 0))
2071 {
8cd95cec
MS
2072 /* Issue -Wrestrict unless the pointers are null (those do
2073 not point to objects and so do not indicate an overlap;
2074 such calls could be the result of sanitization and jump
2075 threading). */
e9e2bad7 2076 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
2077 {
2078 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2079
e9b9fa4c
MS
2080 warning_at (loc, OPT_Wrestrict,
2081 "%qD source argument is the same as destination",
2082 func);
2083 }
cc8bea0a 2084
fef5a0d9
RB
2085 replace_call_with_value (gsi, dest);
2086 return true;
2087 }
2088
2089 if (optimize_function_for_size_p (cfun))
2090 return false;
2091
2092 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2093 if (!fn)
2094 return false;
2095
e08341bb
MS
2096 /* Set to non-null if ARG refers to an unterminated array. */
2097 tree nonstr = NULL;
598f7235 2098 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
2099
2100 if (nonstr)
2101 {
2102 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 2103 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
81d6cdd3 2104 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
e9e2bad7 2105 suppress_warning (stmt, OPT_Wstringop_overread);
e08341bb
MS
2106 return false;
2107 }
2108
fef5a0d9 2109 if (!len)
dcb7fae2 2110 return false;
fef5a0d9
RB
2111
2112 len = fold_convert_loc (loc, size_type_node, len);
2113 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2114 len = force_gimple_operand_gsi (gsi, len, true,
2115 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2116 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2117 replace_call_with_call_and_fold (gsi, repl);
2118 return true;
2119}
2120
2121/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2122 If SLEN is not NULL, it represents the length of the source string.
2123 Return NULL_TREE if no simplification can be made. */
2124
2125static bool
dcb7fae2
RB
2126gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2127 tree dest, tree src, tree len)
fef5a0d9 2128{
025d57f0
MS
2129 gimple *stmt = gsi_stmt (*gsi);
2130 location_t loc = gimple_location (stmt);
6a33d0ff 2131 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
2132
2133 /* If the LEN parameter is zero, return DEST. */
2134 if (integer_zerop (len))
2135 {
53b28abf 2136 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
2137 decorate with attribute nonstring. */
2138 if (!nonstring)
2139 {
2140 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
2141
2142 /* Warn about the lack of nul termination: the result is not
2143 a (nul-terminated) string. */
598f7235 2144 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
2145 if (slen && !integer_zerop (slen))
2146 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d 2147 "%qD destination unchanged after copying no bytes "
6a33d0ff 2148 "from a string of length %E",
6d3bab5d 2149 fndecl, slen);
6a33d0ff
MS
2150 else
2151 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d
MS
2152 "%qD destination unchanged after copying no bytes",
2153 fndecl);
6a33d0ff 2154 }
025d57f0 2155
fef5a0d9
RB
2156 replace_call_with_value (gsi, dest);
2157 return true;
2158 }
2159
2160 /* We can't compare slen with len as constants below if len is not a
2161 constant. */
dcb7fae2 2162 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
2163 return false;
2164
fef5a0d9 2165 /* Now, we must be passed a constant src ptr parameter. */
598f7235 2166 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 2167 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
2168 return false;
2169
025d57f0
MS
2170 /* The size of the source string including the terminating nul. */
2171 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
2172
2173 /* We do not support simplification of this case, though we do
2174 support it when expanding trees into RTL. */
2175 /* FIXME: generate a call to __builtin_memset. */
025d57f0 2176 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
2177 return false;
2178
5d0d5d68
MS
2179 /* Diagnose truncation that leaves the copy unterminated. */
2180 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 2181
fef5a0d9 2182 /* OK transform into builtin memcpy. */
025d57f0 2183 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
2184 if (!fn)
2185 return false;
2186
2187 len = fold_convert_loc (loc, size_type_node, len);
2188 len = force_gimple_operand_gsi (gsi, len, true,
2189 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2190 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 2191 replace_call_with_call_and_fold (gsi, repl);
025d57f0 2192
fef5a0d9
RB
2193 return true;
2194}
2195
71dea1dd
WD
2196/* Fold function call to builtin strchr or strrchr.
2197 If both arguments are constant, evaluate and fold the result,
2198 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
2199 In general strlen is significantly faster than strchr
2200 due to being a simpler operation. */
2201static bool
71dea1dd 2202gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
2203{
2204 gimple *stmt = gsi_stmt (*gsi);
2205 tree str = gimple_call_arg (stmt, 0);
2206 tree c = gimple_call_arg (stmt, 1);
2207 location_t loc = gimple_location (stmt);
71dea1dd
WD
2208 const char *p;
2209 char ch;
912d9ec3 2210
71dea1dd 2211 if (!gimple_call_lhs (stmt))
912d9ec3
WD
2212 return false;
2213
b5338fb3
MS
2214 /* Avoid folding if the first argument is not a nul-terminated array.
2215 Defer warning until later. */
2216 if (!check_nul_terminated_array (NULL_TREE, str))
2217 return false;
2218
71dea1dd
WD
2219 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2220 {
2221 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2222
2223 if (p1 == NULL)
2224 {
2225 replace_call_with_value (gsi, integer_zero_node);
2226 return true;
2227 }
2228
2229 tree len = build_int_cst (size_type_node, p1 - p);
2230 gimple_seq stmts = NULL;
2231 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2232 POINTER_PLUS_EXPR, str, len);
2233 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2234 gsi_replace_with_seq_vops (gsi, stmts);
2235 return true;
2236 }
2237
2238 if (!integer_zerop (c))
912d9ec3
WD
2239 return false;
2240
71dea1dd 2241 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 2242 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
2243 {
2244 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2245
c8952930 2246 if (strchr_fn)
71dea1dd
WD
2247 {
2248 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2249 replace_call_with_call_and_fold (gsi, repl);
2250 return true;
2251 }
2252
2253 return false;
2254 }
2255
912d9ec3
WD
2256 tree len;
2257 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2258
2259 if (!strlen_fn)
2260 return false;
2261
2262 /* Create newstr = strlen (str). */
2263 gimple_seq stmts = NULL;
2264 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2265 gimple_set_location (new_stmt, loc);
a15ebbcd 2266 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2267 gimple_call_set_lhs (new_stmt, len);
2268 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2269
2270 /* Create (str p+ strlen (str)). */
2271 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2272 POINTER_PLUS_EXPR, str, len);
2273 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2274 gsi_replace_with_seq_vops (gsi, stmts);
2275 /* gsi now points at the assignment to the lhs, get a
2276 stmt iterator to the strlen.
2277 ??? We can't use gsi_for_stmt as that doesn't work when the
2278 CFG isn't built yet. */
2279 gimple_stmt_iterator gsi2 = *gsi;
2280 gsi_prev (&gsi2);
2281 fold_stmt (&gsi2);
2282 return true;
2283}
2284
c8952930
JJ
2285/* Fold function call to builtin strstr.
2286 If both arguments are constant, evaluate and fold the result,
2287 additionally fold strstr (x, "") into x and strstr (x, "c")
2288 into strchr (x, 'c'). */
2289static bool
2290gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2291{
2292 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2293 if (!gimple_call_lhs (stmt))
2294 return false;
2295
c8952930
JJ
2296 tree haystack = gimple_call_arg (stmt, 0);
2297 tree needle = gimple_call_arg (stmt, 1);
c8952930 2298
b5338fb3
MS
2299 /* Avoid folding if either argument is not a nul-terminated array.
2300 Defer warning until later. */
2301 if (!check_nul_terminated_array (NULL_TREE, haystack)
2302 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2303 return false;
2304
b5338fb3 2305 const char *q = c_getstr (needle);
c8952930
JJ
2306 if (q == NULL)
2307 return false;
2308
b5338fb3 2309 if (const char *p = c_getstr (haystack))
c8952930
JJ
2310 {
2311 const char *r = strstr (p, q);
2312
2313 if (r == NULL)
2314 {
2315 replace_call_with_value (gsi, integer_zero_node);
2316 return true;
2317 }
2318
2319 tree len = build_int_cst (size_type_node, r - p);
2320 gimple_seq stmts = NULL;
2321 gimple *new_stmt
2322 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2323 haystack, len);
2324 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2325 gsi_replace_with_seq_vops (gsi, stmts);
2326 return true;
2327 }
2328
2329 /* For strstr (x, "") return x. */
2330 if (q[0] == '\0')
2331 {
2332 replace_call_with_value (gsi, haystack);
2333 return true;
2334 }
2335
2336 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2337 if (q[1] == '\0')
2338 {
2339 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2340 if (strchr_fn)
2341 {
2342 tree c = build_int_cst (integer_type_node, q[0]);
2343 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2344 replace_call_with_call_and_fold (gsi, repl);
2345 return true;
2346 }
2347 }
2348
2349 return false;
2350}
2351
fef5a0d9
RB
2352/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2353 to the call.
2354
2355 Return NULL_TREE if no simplification was possible, otherwise return the
2356 simplified form of the call as a tree.
2357
2358 The simplified form may be a constant or other expression which
2359 computes the same value, but in a more efficient manner (including
2360 calls to other builtin functions).
2361
2362 The call may contain arguments which need to be evaluated, but
2363 which are not useful to determine the result of the call. In
2364 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2365 COMPOUND_EXPR will be an argument which must be evaluated.
2366 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2367 COMPOUND_EXPR in the chain will contain the tree for the simplified
2368 form of the builtin function call. */
2369
2370static bool
dcb7fae2 2371gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2372{
355fe088 2373 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2374 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2375
2376 const char *p = c_getstr (src);
2377
2378 /* If the string length is zero, return the dst parameter. */
2379 if (p && *p == '\0')
2380 {
2381 replace_call_with_value (gsi, dst);
2382 return true;
2383 }
2384
2385 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2386 return false;
2387
2388 /* See if we can store by pieces into (dst + strlen(dst)). */
2389 tree newdst;
2390 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2391 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2392
2393 if (!strlen_fn || !memcpy_fn)
2394 return false;
2395
2396 /* If the length of the source string isn't computable don't
2397 split strcat into strlen and memcpy. */
598f7235 2398 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2399 if (! len)
fef5a0d9
RB
2400 return false;
2401
2402 /* Create strlen (dst). */
2403 gimple_seq stmts = NULL, stmts2;
355fe088 2404 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2405 gimple_set_location (repl, loc);
a15ebbcd 2406 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2407 gimple_call_set_lhs (repl, newdst);
2408 gimple_seq_add_stmt_without_update (&stmts, repl);
2409
2410 /* Create (dst p+ strlen (dst)). */
2411 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2412 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2413 gimple_seq_add_seq_without_update (&stmts, stmts2);
2414
2415 len = fold_convert_loc (loc, size_type_node, len);
2416 len = size_binop_loc (loc, PLUS_EXPR, len,
2417 build_int_cst (size_type_node, 1));
2418 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2419 gimple_seq_add_seq_without_update (&stmts, stmts2);
2420
2421 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2422 gimple_seq_add_stmt_without_update (&stmts, repl);
2423 if (gimple_call_lhs (stmt))
2424 {
2425 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2426 gimple_seq_add_stmt_without_update (&stmts, repl);
2427 gsi_replace_with_seq_vops (gsi, stmts);
2428 /* gsi now points at the assignment to the lhs, get a
2429 stmt iterator to the memcpy call.
2430 ??? We can't use gsi_for_stmt as that doesn't work when the
2431 CFG isn't built yet. */
2432 gimple_stmt_iterator gsi2 = *gsi;
2433 gsi_prev (&gsi2);
2434 fold_stmt (&gsi2);
2435 }
2436 else
2437 {
2438 gsi_replace_with_seq_vops (gsi, stmts);
2439 fold_stmt (gsi);
2440 }
2441 return true;
2442}
2443
07f1cf56
RB
2444/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2445 are the arguments to the call. */
2446
2447static bool
2448gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2449{
355fe088 2450 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2451 tree dest = gimple_call_arg (stmt, 0);
2452 tree src = gimple_call_arg (stmt, 1);
2453 tree size = gimple_call_arg (stmt, 2);
2454 tree fn;
2455 const char *p;
2456
2457
2458 p = c_getstr (src);
2459 /* If the SRC parameter is "", return DEST. */
2460 if (p && *p == '\0')
2461 {
2462 replace_call_with_value (gsi, dest);
2463 return true;
2464 }
2465
2466 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2467 return false;
2468
2469 /* If __builtin_strcat_chk is used, assume strcat is available. */
2470 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2471 if (!fn)
2472 return false;
2473
355fe088 2474 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2475 replace_call_with_call_and_fold (gsi, repl);
2476 return true;
2477}
2478
ad03a744
RB
2479/* Simplify a call to the strncat builtin. */
2480
2481static bool
2482gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2483{
8a45b051 2484 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2485 tree dst = gimple_call_arg (stmt, 0);
2486 tree src = gimple_call_arg (stmt, 1);
2487 tree len = gimple_call_arg (stmt, 2);
323026c7 2488 tree src_len = c_strlen (src, 1);
ad03a744
RB
2489
2490 /* If the requested length is zero, or the src parameter string
2491 length is zero, return the dst parameter. */
323026c7 2492 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
ad03a744
RB
2493 {
2494 replace_call_with_value (gsi, dst);
2495 return true;
2496 }
2497
025d57f0
MS
2498 /* Return early if the requested len is less than the string length.
2499 Warnings will be issued elsewhere later. */
323026c7 2500 if (!src_len || known_lower (stmt, len, src_len, true))
025d57f0
MS
2501 return false;
2502
323026c7
SP
2503 /* Warn on constant LEN. */
2504 if (TREE_CODE (len) == INTEGER_CST)
ad03a744 2505 {
323026c7 2506 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
422f9eb7 2507 tree dstsize;
ad03a744 2508
422f9eb7
SP
2509 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2510 && TREE_CODE (dstsize) == INTEGER_CST)
025d57f0 2511 {
422f9eb7 2512 int cmpdst = tree_int_cst_compare (len, dstsize);
323026c7
SP
2513
2514 if (cmpdst >= 0)
2515 {
2516 tree fndecl = gimple_call_fndecl (stmt);
2517
2518 /* Strncat copies (at most) LEN bytes and always appends
2519 the terminating NUL so the specified bound should never
2520 be equal to (or greater than) the size of the destination.
2521 If it is, the copy could overflow. */
2522 location_t loc = gimple_location (stmt);
2523 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2524 cmpdst == 0
2525 ? G_("%qD specified bound %E equals "
2526 "destination size")
2527 : G_("%qD specified bound %E exceeds "
422f9eb7 2528 "destination size %E"),
323026c7
SP
2529 fndecl, len, dstsize);
2530 if (nowarn)
2531 suppress_warning (stmt, OPT_Wstringop_overflow_);
2532 }
2533 }
025d57f0 2534
323026c7
SP
2535 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2536 && tree_int_cst_compare (src_len, len) == 0)
2537 {
2538 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2539 location_t loc = gimple_location (stmt);
323026c7
SP
2540
2541 /* To avoid possible overflow the specified bound should also
2542 not be equal to the length of the source, even when the size
2543 of the destination is unknown (it's not an uncommon mistake
2544 to specify as the bound to strncpy the length of the source). */
2545 if (warning_at (loc, OPT_Wstringop_overflow_,
2546 "%qD specified bound %E equals source length",
2547 fndecl, len))
e9e2bad7 2548 suppress_warning (stmt, OPT_Wstringop_overflow_);
025d57f0
MS
2549 }
2550 }
ad03a744 2551
323026c7
SP
2552 if (!known_lower (stmt, src_len, len))
2553 return false;
ad03a744 2554
025d57f0
MS
2555 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2556
2557 /* If the replacement _DECL isn't initialized, don't do the
2558 transformation. */
2559 if (!fn)
2560 return false;
2561
2562 /* Otherwise, emit a call to strcat. */
2563 gcall *repl = gimple_build_call (fn, 2, dst, src);
2564 replace_call_with_call_and_fold (gsi, repl);
2565 return true;
ad03a744
RB
2566}
2567
745583f9
RB
2568/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2569 LEN, and SIZE. */
2570
2571static bool
2572gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2573{
355fe088 2574 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2575 tree dest = gimple_call_arg (stmt, 0);
2576 tree src = gimple_call_arg (stmt, 1);
2577 tree len = gimple_call_arg (stmt, 2);
2578 tree size = gimple_call_arg (stmt, 3);
2579 tree fn;
2580 const char *p;
2581
2582 p = c_getstr (src);
2583 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2584 if ((p && *p == '\0')
2585 || integer_zerop (len))
2586 {
2587 replace_call_with_value (gsi, dest);
2588 return true;
2589 }
2590
745583f9
RB
2591 if (! integer_all_onesp (size))
2592 {
2593 tree src_len = c_strlen (src, 1);
cea4dab8 2594 if (known_lower (stmt, src_len, len))
745583f9
RB
2595 {
2596 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2597 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2598 if (!fn)
2599 return false;
2600
355fe088 2601 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2602 replace_call_with_call_and_fold (gsi, repl);
2603 return true;
2604 }
2605 return false;
2606 }
2607
2608 /* If __builtin_strncat_chk is used, assume strncat is available. */
2609 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2610 if (!fn)
2611 return false;
2612
355fe088 2613 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2614 replace_call_with_call_and_fold (gsi, repl);
2615 return true;
2616}
2617
a918bfbf
ML
2618/* Build and append gimple statements to STMTS that would load a first
2619 character of a memory location identified by STR. LOC is location
2620 of the statement. */
2621
2622static tree
2623gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2624{
2625 tree var;
2626
2627 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2628 tree cst_uchar_ptr_node
2629 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2630 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2631
2632 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2633 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2634 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2635
2636 gimple_assign_set_lhs (stmt, var);
2637 gimple_seq_add_stmt_without_update (stmts, stmt);
2638
2639 return var;
2640}
2641
d2f8402a 2642/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2643
2644static bool
2645gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2646{
2647 gimple *stmt = gsi_stmt (*gsi);
2648 tree callee = gimple_call_fndecl (stmt);
2649 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2650
2651 tree type = integer_type_node;
2652 tree str1 = gimple_call_arg (stmt, 0);
2653 tree str2 = gimple_call_arg (stmt, 1);
2654 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2655
2656 tree bound_node = NULL_TREE;
d2f8402a 2657 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2658
2659 /* Handle strncmp and strncasecmp functions. */
2660 if (gimple_call_num_args (stmt) == 3)
2661 {
d86d8b35
MS
2662 bound_node = gimple_call_arg (stmt, 2);
2663 if (tree_fits_uhwi_p (bound_node))
2664 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2665 }
2666
d86d8b35 2667 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2668 if (bound == 0)
a918bfbf
ML
2669 {
2670 replace_call_with_value (gsi, integer_zero_node);
2671 return true;
2672 }
2673
2674 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2675 if (operand_equal_p (str1, str2, 0))
2676 {
2677 replace_call_with_value (gsi, integer_zero_node);
2678 return true;
2679 }
2680
d2f8402a
MS
2681 /* Initially set to the number of characters, including the terminating
2682 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2683 the array Sx is not terminated by a nul.
2684 For nul-terminated strings then adjusted to their length so that
2685 LENx == NULPOSx holds. */
2686 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
866626ef
MS
2687 const char *p1 = getbyterep (str1, &len1);
2688 const char *p2 = getbyterep (str2, &len2);
d2f8402a
MS
2689
2690 /* The position of the terminating nul character if one exists, otherwise
2691 a value greater than LENx. */
2692 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2693
2694 if (p1)
2695 {
2696 size_t n = strnlen (p1, len1);
2697 if (n < len1)
2698 len1 = nulpos1 = n;
2699 }
2700
2701 if (p2)
2702 {
2703 size_t n = strnlen (p2, len2);
2704 if (n < len2)
2705 len2 = nulpos2 = n;
2706 }
a918bfbf
ML
2707
2708 /* For known strings, return an immediate value. */
2709 if (p1 && p2)
2710 {
2711 int r = 0;
2712 bool known_result = false;
2713
2714 switch (fcode)
2715 {
2716 case BUILT_IN_STRCMP:
8b0b334a 2717 case BUILT_IN_STRCMP_EQ:
d2f8402a 2718 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2719 break;
d2f8402a
MS
2720
2721 r = strcmp (p1, p2);
2722 known_result = true;
2723 break;
2724
a918bfbf 2725 case BUILT_IN_STRNCMP:
8b0b334a 2726 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2727 {
d86d8b35
MS
2728 if (bound == HOST_WIDE_INT_M1U)
2729 break;
2730
d2f8402a
MS
2731 /* Reduce the bound to be no more than the length
2732 of the shorter of the two strings, or the sizes
2733 of the unterminated arrays. */
2734 unsigned HOST_WIDE_INT n = bound;
2735
2736 if (len1 == nulpos1 && len1 < n)
2737 n = len1 + 1;
2738 if (len2 == nulpos2 && len2 < n)
2739 n = len2 + 1;
2740
2741 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2742 break;
d2f8402a
MS
2743
2744 r = strncmp (p1, p2, n);
a918bfbf
ML
2745 known_result = true;
2746 break;
2747 }
2748 /* Only handleable situation is where the string are equal (result 0),
2749 which is already handled by operand_equal_p case. */
2750 case BUILT_IN_STRCASECMP:
2751 break;
2752 case BUILT_IN_STRNCASECMP:
2753 {
d2f8402a 2754 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2755 break;
d2f8402a 2756 r = strncmp (p1, p2, bound);
a918bfbf
ML
2757 if (r == 0)
2758 known_result = true;
5de73c05 2759 break;
a918bfbf
ML
2760 }
2761 default:
2762 gcc_unreachable ();
2763 }
2764
2765 if (known_result)
2766 {
2767 replace_call_with_value (gsi, build_cmp_result (type, r));
2768 return true;
2769 }
2770 }
2771
d2f8402a 2772 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2773 || fcode == BUILT_IN_STRCMP
8b0b334a 2774 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2775 || fcode == BUILT_IN_STRCASECMP;
2776
2777 location_t loc = gimple_location (stmt);
2778
2779 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2780 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2781 {
2782 gimple_seq stmts = NULL;
2783 tree var = gimple_load_first_char (loc, str1, &stmts);
2784 if (lhs)
2785 {
2786 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2787 gimple_seq_add_stmt_without_update (&stmts, stmt);
2788 }
2789
2790 gsi_replace_with_seq_vops (gsi, stmts);
2791 return true;
2792 }
2793
2794 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2795 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2796 {
2797 gimple_seq stmts = NULL;
2798 tree var = gimple_load_first_char (loc, str2, &stmts);
2799
2800 if (lhs)
2801 {
2802 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2803 stmt = gimple_build_assign (c, NOP_EXPR, var);
2804 gimple_seq_add_stmt_without_update (&stmts, stmt);
2805
2806 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2807 gimple_seq_add_stmt_without_update (&stmts, stmt);
2808 }
2809
2810 gsi_replace_with_seq_vops (gsi, stmts);
2811 return true;
2812 }
2813
d2f8402a 2814 /* If BOUND is one, return an expression corresponding to
a918bfbf 2815 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2816 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2817 {
2818 gimple_seq stmts = NULL;
2819 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2820 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2821
2822 if (lhs)
2823 {
2824 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2825 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2826 gimple_seq_add_stmt_without_update (&stmts, convert1);
2827
2828 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2829 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2830 gimple_seq_add_stmt_without_update (&stmts, convert2);
2831
2832 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2833 gimple_seq_add_stmt_without_update (&stmts, stmt);
2834 }
2835
2836 gsi_replace_with_seq_vops (gsi, stmts);
2837 return true;
2838 }
2839
d2f8402a
MS
2840 /* If BOUND is greater than the length of one constant string,
2841 and the other argument is also a nul-terminated string, replace
2842 strncmp with strcmp. */
2843 if (fcode == BUILT_IN_STRNCMP
2844 && bound > 0 && bound < HOST_WIDE_INT_M1U
2845 && ((p2 && len2 < bound && len2 == nulpos2)
2846 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2847 {
2848 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2849 if (!fn)
2850 return false;
2851 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2852 replace_call_with_call_and_fold (gsi, repl);
2853 return true;
2854 }
2855
a918bfbf
ML
2856 return false;
2857}
2858
488c6247
ML
2859/* Fold a call to the memchr pointed by GSI iterator. */
2860
2861static bool
2862gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2863{
2864 gimple *stmt = gsi_stmt (*gsi);
2865 tree lhs = gimple_call_lhs (stmt);
2866 tree arg1 = gimple_call_arg (stmt, 0);
2867 tree arg2 = gimple_call_arg (stmt, 1);
2868 tree len = gimple_call_arg (stmt, 2);
2869
2870 /* If the LEN parameter is zero, return zero. */
2871 if (integer_zerop (len))
2872 {
2873 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2874 return true;
2875 }
2876
2877 char c;
2878 if (TREE_CODE (arg2) != INTEGER_CST
2879 || !tree_fits_uhwi_p (len)
2880 || !target_char_cst_p (arg2, &c))
2881 return false;
2882
2883 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2884 unsigned HOST_WIDE_INT string_length;
866626ef 2885 const char *p1 = getbyterep (arg1, &string_length);
488c6247
ML
2886
2887 if (p1)
2888 {
2889 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2890 if (r == NULL)
2891 {
5fd336bb 2892 tree mem_size, offset_node;
bb04901d 2893 byte_representation (arg1, &offset_node, &mem_size, NULL);
5fd336bb
JM
2894 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2895 ? 0 : tree_to_uhwi (offset_node);
2896 /* MEM_SIZE is the size of the array the string literal
2897 is stored in. */
2898 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2899 gcc_checking_assert (string_length <= string_size);
2900 if (length <= string_size)
488c6247
ML
2901 {
2902 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2903 return true;
2904 }
2905 }
2906 else
2907 {
2908 unsigned HOST_WIDE_INT offset = r - p1;
2909 gimple_seq stmts = NULL;
2910 if (lhs != NULL_TREE)
2911 {
aec2d684 2912 tree offset_cst = build_int_cst (sizetype, offset);
488c6247
ML
2913 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2914 arg1, offset_cst);
2915 gimple_seq_add_stmt_without_update (&stmts, stmt);
2916 }
2917 else
2918 gimple_seq_add_stmt_without_update (&stmts,
2919 gimple_build_nop ());
2920
2921 gsi_replace_with_seq_vops (gsi, stmts);
2922 return true;
2923 }
2924 }
2925
2926 return false;
2927}
a918bfbf 2928
fef5a0d9
RB
2929/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2930 to the call. IGNORE is true if the value returned
2931 by the builtin will be ignored. UNLOCKED is true is true if this
2932 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2933 the known length of the string. Return NULL_TREE if no simplification
2934 was possible. */
2935
2936static bool
2937gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2938 tree arg0, tree arg1,
dcb7fae2 2939 bool unlocked)
fef5a0d9 2940{
355fe088 2941 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2942
fef5a0d9
RB
2943 /* If we're using an unlocked function, assume the other unlocked
2944 functions exist explicitly. */
2945 tree const fn_fputc = (unlocked
2946 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2947 : builtin_decl_implicit (BUILT_IN_FPUTC));
2948 tree const fn_fwrite = (unlocked
2949 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2950 : builtin_decl_implicit (BUILT_IN_FWRITE));
2951
2952 /* If the return value is used, don't do the transformation. */
dcb7fae2 2953 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2954 return false;
2955
fef5a0d9
RB
2956 /* Get the length of the string passed to fputs. If the length
2957 can't be determined, punt. */
598f7235 2958 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
dbeccab7 2959 if (!len || TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
2960 return false;
2961
2962 switch (compare_tree_int (len, 1))
2963 {
2964 case -1: /* length is 0, delete the call entirely . */
2965 replace_call_with_value (gsi, integer_zero_node);
2966 return true;
2967
2968 case 0: /* length is 1, call fputc. */
2969 {
2970 const char *p = c_getstr (arg0);
2971 if (p != NULL)
2972 {
2973 if (!fn_fputc)
2974 return false;
2975
dbeccab7
JJ
2976 gimple *repl
2977 = gimple_build_call (fn_fputc, 2,
2978 build_int_cst (integer_type_node, p[0]),
2979 arg1);
fef5a0d9
RB
2980 replace_call_with_call_and_fold (gsi, repl);
2981 return true;
2982 }
2983 }
2984 /* FALLTHROUGH */
2985 case 1: /* length is greater than 1, call fwrite. */
2986 {
2987 /* If optimizing for size keep fputs. */
2988 if (optimize_function_for_size_p (cfun))
2989 return false;
2990 /* New argument list transforming fputs(string, stream) to
2991 fwrite(string, 1, len, stream). */
2992 if (!fn_fwrite)
2993 return false;
2994
dbeccab7
JJ
2995 gimple *repl
2996 = gimple_build_call (fn_fwrite, 4, arg0, size_one_node,
2997 fold_convert (size_type_node, len), arg1);
fef5a0d9
RB
2998 replace_call_with_call_and_fold (gsi, repl);
2999 return true;
3000 }
3001 default:
3002 gcc_unreachable ();
3003 }
fef5a0d9
RB
3004}
3005
3006/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3007 DEST, SRC, LEN, and SIZE are the arguments to the call.
3008 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3009 code of the builtin. If MAXLEN is not NULL, it is maximum length
3010 passed as third argument. */
3011
3012static bool
3013gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 3014 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
3015 enum built_in_function fcode)
3016{
355fe088 3017 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
3018 location_t loc = gimple_location (stmt);
3019 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3020 tree fn;
3021
3022 /* If SRC and DEST are the same (and not volatile), return DEST
3023 (resp. DEST+LEN for __mempcpy_chk). */
3024 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3025 {
3026 if (fcode != BUILT_IN_MEMPCPY_CHK)
3027 {
3028 replace_call_with_value (gsi, dest);
3029 return true;
3030 }
3031 else
3032 {
74e3c262
RB
3033 gimple_seq stmts = NULL;
3034 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
3035 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3036 TREE_TYPE (dest), dest, len);
74e3c262 3037 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
3038 replace_call_with_value (gsi, temp);
3039 return true;
3040 }
3041 }
3042
598f7235 3043 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
cea4dab8
SP
3044 if (! integer_all_onesp (size)
3045 && !known_lower (stmt, len, size)
3046 && !known_lower (stmt, maxlen, size))
3047 {
3048 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3049 least try to optimize (void) __mempcpy_chk () into
3050 (void) __memcpy_chk () */
3051 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
fef5a0d9 3052 {
cea4dab8
SP
3053 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3054 if (!fn)
3055 return false;
fef5a0d9 3056
cea4dab8
SP
3057 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3058 replace_call_with_call_and_fold (gsi, repl);
3059 return true;
fef5a0d9 3060 }
cea4dab8 3061 return false;
fef5a0d9
RB
3062 }
3063
3064 fn = NULL_TREE;
3065 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3066 mem{cpy,pcpy,move,set} is available. */
3067 switch (fcode)
3068 {
3069 case BUILT_IN_MEMCPY_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3071 break;
3072 case BUILT_IN_MEMPCPY_CHK:
3073 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3074 break;
3075 case BUILT_IN_MEMMOVE_CHK:
3076 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3077 break;
3078 case BUILT_IN_MEMSET_CHK:
3079 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3080 break;
3081 default:
3082 break;
3083 }
3084
3085 if (!fn)
3086 return false;
3087
355fe088 3088 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3089 replace_call_with_call_and_fold (gsi, repl);
3090 return true;
3091}
3092
d1753b4b
SP
3093/* Print a message in the dump file recording transformation of FROM to TO. */
3094
3095static void
3096dump_transformation (gcall *from, gcall *to)
3097{
3098 if (dump_enabled_p ())
3099 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3100 gimple_call_fn (from), gimple_call_fn (to));
3101}
3102
fef5a0d9
RB
3103/* Fold a call to the __st[rp]cpy_chk builtin.
3104 DEST, SRC, and SIZE are the arguments to the call.
3105 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3106 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3107 strings passed as second argument. */
3108
3109static bool
3110gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3111 tree dest,
fef5a0d9 3112 tree src, tree size,
fef5a0d9
RB
3113 enum built_in_function fcode)
3114{
d1753b4b 3115 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
dcb7fae2
RB
3116 location_t loc = gimple_location (stmt);
3117 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3118 tree len, fn;
3119
3120 /* If SRC and DEST are the same (and not volatile), return DEST. */
3121 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3122 {
8cd95cec
MS
3123 /* Issue -Wrestrict unless the pointers are null (those do
3124 not point to objects and so do not indicate an overlap;
3125 such calls could be the result of sanitization and jump
3126 threading). */
e9e2bad7
MS
3127 if (!integer_zerop (dest)
3128 && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
3129 {
3130 tree func = gimple_call_fndecl (stmt);
cc8bea0a 3131
e9b9fa4c
MS
3132 warning_at (loc, OPT_Wrestrict,
3133 "%qD source argument is the same as destination",
3134 func);
3135 }
cc8bea0a 3136
fef5a0d9
RB
3137 replace_call_with_value (gsi, dest);
3138 return true;
3139 }
3140
598f7235 3141 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
3142 if (! integer_all_onesp (size))
3143 {
3144 len = c_strlen (src, 1);
cea4dab8
SP
3145 if (!known_lower (stmt, len, size, true)
3146 && !known_lower (stmt, maxlen, size, true))
fef5a0d9 3147 {
cea4dab8 3148 if (fcode == BUILT_IN_STPCPY_CHK)
fef5a0d9 3149 {
cea4dab8 3150 if (! ignore)
fef5a0d9
RB
3151 return false;
3152
cea4dab8
SP
3153 /* If return value of __stpcpy_chk is ignored,
3154 optimize into __strcpy_chk. */
3155 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
fef5a0d9
RB
3156 if (!fn)
3157 return false;
3158
cea4dab8 3159 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
3160 replace_call_with_call_and_fold (gsi, repl);
3161 return true;
3162 }
fef5a0d9 3163
cea4dab8
SP
3164 if (! len || TREE_SIDE_EFFECTS (len))
3165 return false;
3166
3167 /* If c_strlen returned something, but not provably less than size,
3168 transform __strcpy_chk into __memcpy_chk. */
3169 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3170 if (!fn)
3171 return false;
3172
3173 gimple_seq stmts = NULL;
3174 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3175 len = gimple_convert (&stmts, loc, size_type_node, len);
3176 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3177 build_int_cst (size_type_node, 1));
3178 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3179 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3180 replace_call_with_call_and_fold (gsi, repl);
3181 return true;
3182 }
e256dfce
RG
3183 }
3184
fef5a0d9 3185 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
d1753b4b 3186 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
fef5a0d9
RB
3187 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3188 if (!fn)
3189 return false;
3190
d1753b4b
SP
3191 gcall *repl = gimple_build_call (fn, 2, dest, src);
3192 dump_transformation (stmt, repl);
fef5a0d9
RB
3193 replace_call_with_call_and_fold (gsi, repl);
3194 return true;
3195}
3196
3197/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3198 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3199 length passed as third argument. IGNORE is true if return value can be
3200 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3201
3202static bool
3203gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3204 tree dest, tree src,
dcb7fae2 3205 tree len, tree size,
fef5a0d9
RB
3206 enum built_in_function fcode)
3207{
d1753b4b 3208 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
dcb7fae2 3209 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3210 tree fn;
3211
598f7235 3212 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
cea4dab8
SP
3213 if (! integer_all_onesp (size)
3214 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
cbdd87d4 3215 {
cea4dab8 3216 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
fe2ef088 3217 {
cea4dab8
SP
3218 /* If return value of __stpncpy_chk is ignored,
3219 optimize into __strncpy_chk. */
3220 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3221 if (fn)
d1753b4b 3222 {
cea4dab8
SP
3223 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3224 replace_call_with_call_and_fold (gsi, repl);
3225 return true;
d1753b4b 3226 }
8a1561bc 3227 }
cea4dab8 3228 return false;
cbdd87d4
RG
3229 }
3230
fef5a0d9 3231 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
d1753b4b 3232 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
fef5a0d9
RB
3233 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3234 if (!fn)
3235 return false;
3236
d1753b4b
SP
3237 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3238 dump_transformation (stmt, repl);
fef5a0d9
RB
3239 replace_call_with_call_and_fold (gsi, repl);
3240 return true;
cbdd87d4
RG
3241}
3242
2625bb5d
RB
3243/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3244 Return NULL_TREE if no simplification can be made. */
3245
3246static bool
3247gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3248{
3249 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3250 location_t loc = gimple_location (stmt);
3251 tree dest = gimple_call_arg (stmt, 0);
3252 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3253 tree fn, lenp1;
2625bb5d
RB
3254
3255 /* If the result is unused, replace stpcpy with strcpy. */
3256 if (gimple_call_lhs (stmt) == NULL_TREE)
3257 {
3258 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3259 if (!fn)
3260 return false;
3261 gimple_call_set_fndecl (stmt, fn);
3262 fold_stmt (gsi);
3263 return true;
3264 }
3265
01b0acb7 3266 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3267 c_strlen_data data = { };
d14c547a
MS
3268 /* The size of the unterminated array if SRC referes to one. */
3269 tree size;
3270 /* True if the size is exact/constant, false if it's the lower bound
3271 of a range. */
3272 bool exact;
7d583f42 3273 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3274 if (!len
3275 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3276 {
d14c547a 3277 data.decl = unterminated_array (src, &size, &exact);
7d583f42 3278 if (!data.decl)
01b0acb7
MS
3279 return false;
3280 }
3281
7d583f42 3282 if (data.decl)
01b0acb7
MS
3283 {
3284 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 3285 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
81d6cdd3 3286 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
d14c547a 3287 exact);
e9e2bad7 3288 suppress_warning (stmt, OPT_Wstringop_overread);
01b0acb7
MS
3289 return false;
3290 }
2625bb5d
RB
3291
3292 if (optimize_function_for_size_p (cfun)
3293 /* If length is zero it's small enough. */
3294 && !integer_zerop (len))
3295 return false;
3296
3297 /* If the source has a known length replace stpcpy with memcpy. */
3298 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3299 if (!fn)
3300 return false;
3301
3302 gimple_seq stmts = NULL;
3303 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3304 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3305 tem, build_int_cst (size_type_node, 1));
3306 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3307 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3308 gimple_move_vops (repl, stmt);
2625bb5d
RB
3309 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3310 /* Replace the result with dest + len. */
3311 stmts = NULL;
3312 tem = gimple_convert (&stmts, loc, sizetype, len);
3313 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3314 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3315 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3316 gsi_replace (gsi, ret, false);
2625bb5d
RB
3317 /* Finally fold the memcpy call. */
3318 gimple_stmt_iterator gsi2 = *gsi;
3319 gsi_prev (&gsi2);
3320 fold_stmt (&gsi2);
3321 return true;
3322}
3323
fef5a0d9
RB
3324/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3325 NULL_TREE if a normal call should be emitted rather than expanding
3326 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3327 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3328 passed as second argument. */
cbdd87d4
RG
3329
3330static bool
fef5a0d9 3331gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3332 enum built_in_function fcode)
cbdd87d4 3333{
538dd0b7 3334 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3335 tree dest, size, len, fn, fmt, flag;
3336 const char *fmt_str;
cbdd87d4 3337
fef5a0d9
RB
3338 /* Verify the required arguments in the original call. */
3339 if (gimple_call_num_args (stmt) < 5)
3340 return false;
cbdd87d4 3341
fef5a0d9
RB
3342 dest = gimple_call_arg (stmt, 0);
3343 len = gimple_call_arg (stmt, 1);
3344 flag = gimple_call_arg (stmt, 2);
3345 size = gimple_call_arg (stmt, 3);
3346 fmt = gimple_call_arg (stmt, 4);
3347
cea4dab8
SP
3348 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3349 if (! integer_all_onesp (size)
3350 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
fef5a0d9
RB
3351 return false;
3352
fef5a0d9
RB
3353 if (!init_target_chars ())
3354 return false;
cbdd87d4 3355
fef5a0d9
RB
3356 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3357 or if format doesn't contain % chars or is "%s". */
3358 if (! integer_zerop (flag))
3359 {
3360 fmt_str = c_getstr (fmt);
3361 if (fmt_str == NULL)
3362 return false;
3363 if (strchr (fmt_str, target_percent) != NULL
3364 && strcmp (fmt_str, target_percent_s))
3365 return false;
cbdd87d4
RG
3366 }
3367
fef5a0d9
RB
3368 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3369 available. */
3370 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3371 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3372 if (!fn)
491e0b9b
RG
3373 return false;
3374
fef5a0d9
RB
3375 /* Replace the called function and the first 5 argument by 3 retaining
3376 trailing varargs. */
3377 gimple_call_set_fndecl (stmt, fn);
3378 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3379 gimple_call_set_arg (stmt, 0, dest);
3380 gimple_call_set_arg (stmt, 1, len);
3381 gimple_call_set_arg (stmt, 2, fmt);
3382 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3383 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3384 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3385 fold_stmt (gsi);
3386 return true;
3387}
cbdd87d4 3388
fef5a0d9
RB
3389/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3390 Return NULL_TREE if a normal call should be emitted rather than
3391 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3392 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3393
fef5a0d9
RB
3394static bool
3395gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3396 enum built_in_function fcode)
3397{
538dd0b7 3398 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3399 tree dest, size, len, fn, fmt, flag;
3400 const char *fmt_str;
3401 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3402
fef5a0d9
RB
3403 /* Verify the required arguments in the original call. */
3404 if (nargs < 4)
3405 return false;
3406 dest = gimple_call_arg (stmt, 0);
3407 flag = gimple_call_arg (stmt, 1);
3408 size = gimple_call_arg (stmt, 2);
3409 fmt = gimple_call_arg (stmt, 3);
3410
fef5a0d9
RB
3411 len = NULL_TREE;
3412
3413 if (!init_target_chars ())
3414 return false;
3415
3416 /* Check whether the format is a literal string constant. */
3417 fmt_str = c_getstr (fmt);
3418 if (fmt_str != NULL)
3419 {
3420 /* If the format doesn't contain % args or %%, we know the size. */
3421 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3422 {
fef5a0d9
RB
3423 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3424 len = build_int_cstu (size_type_node, strlen (fmt_str));
3425 }
3426 /* If the format is "%s" and first ... argument is a string literal,
3427 we know the size too. */
3428 else if (fcode == BUILT_IN_SPRINTF_CHK
3429 && strcmp (fmt_str, target_percent_s) == 0)
3430 {
3431 tree arg;
cbdd87d4 3432
fef5a0d9
RB
3433 if (nargs == 5)
3434 {
3435 arg = gimple_call_arg (stmt, 4);
3436 if (POINTER_TYPE_P (TREE_TYPE (arg)))
cea4dab8 3437 len = c_strlen (arg, 1);
fef5a0d9
RB
3438 }
3439 }
3440 }
cbdd87d4 3441
cea4dab8
SP
3442 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3443 return false;
cbdd87d4 3444
fef5a0d9
RB
3445 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3446 or if format doesn't contain % chars or is "%s". */
3447 if (! integer_zerop (flag))
3448 {
3449 if (fmt_str == NULL)
3450 return false;
3451 if (strchr (fmt_str, target_percent) != NULL
3452 && strcmp (fmt_str, target_percent_s))
3453 return false;
3454 }
cbdd87d4 3455
fef5a0d9
RB
3456 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3457 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3458 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3459 if (!fn)
3460 return false;
3461
3462 /* Replace the called function and the first 4 argument by 2 retaining
3463 trailing varargs. */
3464 gimple_call_set_fndecl (stmt, fn);
3465 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3466 gimple_call_set_arg (stmt, 0, dest);
3467 gimple_call_set_arg (stmt, 1, fmt);
3468 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3469 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3470 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3471 fold_stmt (gsi);
3472 return true;
3473}
3474
35770bb2
RB
3475/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3476 ORIG may be null if this is a 2-argument call. We don't attempt to
3477 simplify calls with more than 3 arguments.
3478
a104bd88 3479 Return true if simplification was possible, otherwise false. */
35770bb2 3480
a104bd88 3481bool
dcb7fae2 3482gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3483{
355fe088 3484 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3485
3486 /* Verify the required arguments in the original call. We deal with two
3487 types of sprintf() calls: 'sprintf (str, fmt)' and
3488 'sprintf (dest, "%s", orig)'. */
3489 if (gimple_call_num_args (stmt) > 3)
3490 return false;
3491
9816f509 3492 tree orig = NULL_TREE;
35770bb2
RB
3493 if (gimple_call_num_args (stmt) == 3)
3494 orig = gimple_call_arg (stmt, 2);
3495
3496 /* Check whether the format is a literal string constant. */
9816f509
MS
3497 tree fmt = gimple_call_arg (stmt, 1);
3498 const char *fmt_str = c_getstr (fmt);
35770bb2
RB
3499 if (fmt_str == NULL)
3500 return false;
3501
9816f509
MS
3502 tree dest = gimple_call_arg (stmt, 0);
3503
35770bb2
RB
3504 if (!init_target_chars ())
3505 return false;
3506
9816f509
MS
3507 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3508 if (!fn)
3509 return false;
3510
35770bb2
RB
3511 /* If the format doesn't contain % args or %%, use strcpy. */
3512 if (strchr (fmt_str, target_percent) == NULL)
3513 {
35770bb2
RB
3514 /* Don't optimize sprintf (buf, "abc", ptr++). */
3515 if (orig)
3516 return false;
3517
3518 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3519 'format' is known to contain no % formats. */
3520 gimple_seq stmts = NULL;
355fe088 3521 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3522
3523 /* Propagate the NO_WARNING bit to avoid issuing the same
3524 warning more than once. */
e9e2bad7 3525 copy_warning (repl, stmt);
01b0acb7 3526
35770bb2 3527 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3528 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3529 {
a73468e8
JJ
3530 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3531 strlen (fmt_str)));
35770bb2
RB
3532 gimple_seq_add_stmt_without_update (&stmts, repl);
3533 gsi_replace_with_seq_vops (gsi, stmts);
3534 /* gsi now points at the assignment to the lhs, get a
3535 stmt iterator to the memcpy call.
3536 ??? We can't use gsi_for_stmt as that doesn't work when the
3537 CFG isn't built yet. */
3538 gimple_stmt_iterator gsi2 = *gsi;
3539 gsi_prev (&gsi2);
3540 fold_stmt (&gsi2);
3541 }
3542 else
3543 {
3544 gsi_replace_with_seq_vops (gsi, stmts);
3545 fold_stmt (gsi);
3546 }
3547 return true;
3548 }
3549
3550 /* If the format is "%s", use strcpy if the result isn't used. */
3551 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3552 {
35770bb2
RB
3553 /* Don't crash on sprintf (str1, "%s"). */
3554 if (!orig)
3555 return false;
3556
9816f509
MS
3557 /* Don't fold calls with source arguments of invalid (nonpointer)
3558 types. */
3559 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3560 return false;
3561
dcb7fae2
RB
3562 tree orig_len = NULL_TREE;
3563 if (gimple_call_lhs (stmt))
35770bb2 3564 {
598f7235 3565 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3566 if (!orig_len)
35770bb2
RB
3567 return false;
3568 }
3569
3570 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3571 gimple_seq stmts = NULL;
355fe088 3572 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3573
3574 /* Propagate the NO_WARNING bit to avoid issuing the same
3575 warning more than once. */
e9e2bad7 3576 copy_warning (repl, stmt);
01b0acb7 3577
35770bb2 3578 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3579 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3580 {
a73468e8 3581 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3582 TREE_TYPE (orig_len)))
a73468e8
JJ
3583 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3584 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3585 gimple_seq_add_stmt_without_update (&stmts, repl);
3586 gsi_replace_with_seq_vops (gsi, stmts);
3587 /* gsi now points at the assignment to the lhs, get a
3588 stmt iterator to the memcpy call.
3589 ??? We can't use gsi_for_stmt as that doesn't work when the
3590 CFG isn't built yet. */
3591 gimple_stmt_iterator gsi2 = *gsi;
3592 gsi_prev (&gsi2);
3593 fold_stmt (&gsi2);
3594 }
3595 else
3596 {
3597 gsi_replace_with_seq_vops (gsi, stmts);
3598 fold_stmt (gsi);
3599 }
3600 return true;
3601 }
3602 return false;
3603}
3604
d7e78447
RB
3605/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3606 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3607 attempt to simplify calls with more than 4 arguments.
35770bb2 3608
a104bd88 3609 Return true if simplification was possible, otherwise false. */
d7e78447 3610
a104bd88 3611bool
dcb7fae2 3612gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3613{
538dd0b7 3614 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3615 tree dest = gimple_call_arg (stmt, 0);
3616 tree destsize = gimple_call_arg (stmt, 1);
3617 tree fmt = gimple_call_arg (stmt, 2);
3618 tree orig = NULL_TREE;
3619 const char *fmt_str = NULL;
3620
3621 if (gimple_call_num_args (stmt) > 4)
3622 return false;
3623
3624 if (gimple_call_num_args (stmt) == 4)
3625 orig = gimple_call_arg (stmt, 3);
3626
d7e78447
RB
3627 /* Check whether the format is a literal string constant. */
3628 fmt_str = c_getstr (fmt);
3629 if (fmt_str == NULL)
3630 return false;
3631
3632 if (!init_target_chars ())
3633 return false;
3634
3635 /* If the format doesn't contain % args or %%, use strcpy. */
3636 if (strchr (fmt_str, target_percent) == NULL)
3637 {
3638 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3639 if (!fn)
3640 return false;
3641
3642 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3643 if (orig)
3644 return false;
3645
323026c7
SP
3646 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3647
d7e78447
RB
3648 /* We could expand this as
3649 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3650 or to
3651 memcpy (str, fmt_with_nul_at_cstm1, cst);
3652 but in the former case that might increase code size
3653 and in the latter case grow .rodata section too much.
3654 So punt for now. */
323026c7 3655 if (!known_lower (stmt, len, destsize, true))
d7e78447
RB
3656 return false;
3657
3658 gimple_seq stmts = NULL;
355fe088 3659 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3660 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3661 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3662 {
a73468e8 3663 repl = gimple_build_assign (lhs,
323026c7 3664 fold_convert (TREE_TYPE (lhs), len));
d7e78447
RB
3665 gimple_seq_add_stmt_without_update (&stmts, repl);
3666 gsi_replace_with_seq_vops (gsi, stmts);
3667 /* gsi now points at the assignment to the lhs, get a
3668 stmt iterator to the memcpy call.
3669 ??? We can't use gsi_for_stmt as that doesn't work when the
3670 CFG isn't built yet. */
3671 gimple_stmt_iterator gsi2 = *gsi;
3672 gsi_prev (&gsi2);
3673 fold_stmt (&gsi2);
3674 }
3675 else
3676 {
3677 gsi_replace_with_seq_vops (gsi, stmts);
3678 fold_stmt (gsi);
3679 }
3680 return true;
3681 }
3682
3683 /* If the format is "%s", use strcpy if the result isn't used. */
3684 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3685 {
3686 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3687 if (!fn)
3688 return false;
3689
3690 /* Don't crash on snprintf (str1, cst, "%s"). */
3691 if (!orig)
3692 return false;
3693
598f7235 3694 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447
RB
3695
3696 /* We could expand this as
3697 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3698 or to
3699 memcpy (str1, str2_with_nul_at_cstm1, cst);
3700 but in the former case that might increase code size
3701 and in the latter case grow .rodata section too much.
3702 So punt for now. */
323026c7 3703 if (!known_lower (stmt, orig_len, destsize, true))
d7e78447
RB
3704 return false;
3705
3706 /* Convert snprintf (str1, cst, "%s", str2) into
3707 strcpy (str1, str2) if strlen (str2) < cst. */
3708 gimple_seq stmts = NULL;
355fe088 3709 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3710 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3711 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3712 {
a73468e8 3713 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3714 TREE_TYPE (orig_len)))
a73468e8
JJ
3715 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3716 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3717 gimple_seq_add_stmt_without_update (&stmts, repl);
3718 gsi_replace_with_seq_vops (gsi, stmts);
3719 /* gsi now points at the assignment to the lhs, get a
3720 stmt iterator to the memcpy call.
3721 ??? We can't use gsi_for_stmt as that doesn't work when the
3722 CFG isn't built yet. */
3723 gimple_stmt_iterator gsi2 = *gsi;
3724 gsi_prev (&gsi2);
3725 fold_stmt (&gsi2);
3726 }
3727 else
3728 {
3729 gsi_replace_with_seq_vops (gsi, stmts);
3730 fold_stmt (gsi);
3731 }
3732 return true;
3733 }
3734 return false;
3735}
35770bb2 3736
edd7ae68
RB
3737/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3738 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3739 more than 3 arguments, and ARG may be null in the 2-argument case.
3740
3741 Return NULL_TREE if no simplification was possible, otherwise return the
3742 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3743 code of the function to be simplified. */
3744
3745static bool
3746gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3747 tree fp, tree fmt, tree arg,
3748 enum built_in_function fcode)
3749{
3750 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3751 tree fn_fputc, fn_fputs;
3752 const char *fmt_str = NULL;
3753
3754 /* If the return value is used, don't do the transformation. */
3755 if (gimple_call_lhs (stmt) != NULL_TREE)
3756 return false;
3757
3758 /* Check whether the format is a literal string constant. */
3759 fmt_str = c_getstr (fmt);
3760 if (fmt_str == NULL)
3761 return false;
3762
3763 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3764 {
3765 /* If we're using an unlocked function, assume the other
3766 unlocked functions exist explicitly. */
3767 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3768 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3769 }
3770 else
3771 {
3772 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3773 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3774 }
3775
3776 if (!init_target_chars ())
3777 return false;
3778
3779 /* If the format doesn't contain % args or %%, use strcpy. */
3780 if (strchr (fmt_str, target_percent) == NULL)
3781 {
3782 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3783 && arg)
3784 return false;
3785
3786 /* If the format specifier was "", fprintf does nothing. */
3787 if (fmt_str[0] == '\0')
3788 {
3789 replace_call_with_value (gsi, NULL_TREE);
3790 return true;
3791 }
3792
3793 /* When "string" doesn't contain %, replace all cases of
3794 fprintf (fp, string) with fputs (string, fp). The fputs
3795 builtin will take care of special cases like length == 1. */
3796 if (fn_fputs)
3797 {
3798 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3799 replace_call_with_call_and_fold (gsi, repl);
3800 return true;
3801 }
3802 }
3803
3804 /* The other optimizations can be done only on the non-va_list variants. */
3805 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3806 return false;
3807
3808 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3809 else if (strcmp (fmt_str, target_percent_s) == 0)
3810 {
3811 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3812 return false;
3813 if (fn_fputs)
3814 {
3815 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3816 replace_call_with_call_and_fold (gsi, repl);
3817 return true;
3818 }
3819 }
3820
3821 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3822 else if (strcmp (fmt_str, target_percent_c) == 0)
3823 {
3824 if (!arg
3825 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3826 return false;
3827 if (fn_fputc)
3828 {
3829 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3830 replace_call_with_call_and_fold (gsi, repl);
3831 return true;
3832 }
3833 }
3834
3835 return false;
3836}
3837
ad03a744
RB
3838/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3839 FMT and ARG are the arguments to the call; we don't fold cases with
3840 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3841
3842 Return NULL_TREE if no simplification was possible, otherwise return the
3843 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3844 code of the function to be simplified. */
3845
3846static bool
3847gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3848 tree arg, enum built_in_function fcode)
3849{
3850 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3851 tree fn_putchar, fn_puts, newarg;
3852 const char *fmt_str = NULL;
3853
3854 /* If the return value is used, don't do the transformation. */
3855 if (gimple_call_lhs (stmt) != NULL_TREE)
3856 return false;
3857
3858 /* Check whether the format is a literal string constant. */
3859 fmt_str = c_getstr (fmt);
3860 if (fmt_str == NULL)
3861 return false;
3862
3863 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3864 {
3865 /* If we're using an unlocked function, assume the other
3866 unlocked functions exist explicitly. */
3867 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3868 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3869 }
3870 else
3871 {
3872 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3873 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3874 }
3875
3876 if (!init_target_chars ())
3877 return false;
3878
3879 if (strcmp (fmt_str, target_percent_s) == 0
3880 || strchr (fmt_str, target_percent) == NULL)
3881 {
3882 const char *str;
3883
3884 if (strcmp (fmt_str, target_percent_s) == 0)
3885 {
3886 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3887 return false;
3888
3889 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3890 return false;
3891
3892 str = c_getstr (arg);
3893 if (str == NULL)
3894 return false;
3895 }
3896 else
3897 {
3898 /* The format specifier doesn't contain any '%' characters. */
3899 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3900 && arg)
3901 return false;
3902 str = fmt_str;
3903 }
3904
3905 /* If the string was "", printf does nothing. */
3906 if (str[0] == '\0')
3907 {
3908 replace_call_with_value (gsi, NULL_TREE);
3909 return true;
3910 }
3911
3912 /* If the string has length of 1, call putchar. */
3913 if (str[1] == '\0')
3914 {
3915 /* Given printf("c"), (where c is any one character,)
3916 convert "c"[0] to an int and pass that to the replacement
3917 function. */
3918 newarg = build_int_cst (integer_type_node, str[0]);
3919 if (fn_putchar)
3920 {
3921 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3922 replace_call_with_call_and_fold (gsi, repl);
3923 return true;
3924 }
3925 }
3926 else
3927 {
3928 /* If the string was "string\n", call puts("string"). */
3929 size_t len = strlen (str);
3930 if ((unsigned char)str[len - 1] == target_newline
3931 && (size_t) (int) len == len
3932 && (int) len > 0)
3933 {
3934 char *newstr;
ad03a744
RB
3935
3936 /* Create a NUL-terminated string that's one char shorter
3937 than the original, stripping off the trailing '\n'. */
a353fec4 3938 newstr = xstrdup (str);
ad03a744 3939 newstr[len - 1] = '\0';
a353fec4
BE
3940 newarg = build_string_literal (len, newstr);
3941 free (newstr);
ad03a744
RB
3942 if (fn_puts)
3943 {
3944 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3945 replace_call_with_call_and_fold (gsi, repl);
3946 return true;
3947 }
3948 }
3949 else
3950 /* We'd like to arrange to call fputs(string,stdout) here,
3951 but we need stdout and don't have a way to get it yet. */
3952 return false;
3953 }
3954 }
3955
3956 /* The other optimizations can be done only on the non-va_list variants. */
3957 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3958 return false;
3959
3960 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3961 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3962 {
3963 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3964 return false;
3965 if (fn_puts)
3966 {
3967 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3968 replace_call_with_call_and_fold (gsi, repl);
3969 return true;
3970 }
3971 }
3972
3973 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3974 else if (strcmp (fmt_str, target_percent_c) == 0)
3975 {
3976 if (!arg || ! useless_type_conversion_p (integer_type_node,
3977 TREE_TYPE (arg)))
3978 return false;
3979 if (fn_putchar)
3980 {
3981 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3982 replace_call_with_call_and_fold (gsi, repl);
3983 return true;
3984 }
3985 }
3986
3987 return false;
3988}
3989
edd7ae68 3990
fef5a0d9
RB
3991
3992/* Fold a call to __builtin_strlen with known length LEN. */
3993
3994static bool
dcb7fae2 3995gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3996{
355fe088 3997 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3998 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3999
4000 wide_int minlen;
4001 wide_int maxlen;
4002
5d6655eb 4003 c_strlen_data lendata = { };
03c4a945 4004 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
4005 && !lendata.decl
4006 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4007 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
4008 {
4009 /* The range of lengths refers to either a single constant
4010 string or to the longest and shortest constant string
4011 referenced by the argument of the strlen() call, or to
4012 the strings that can possibly be stored in the arrays
4013 the argument refers to. */
5d6655eb
MS
4014 minlen = wi::to_wide (lendata.minlen);
4015 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
4016 }
4017 else
4018 {
4019 unsigned prec = TYPE_PRECISION (sizetype);
4020
4021 minlen = wi::shwi (0, prec);
4022 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4023 }
4024
d3eac7d9
JJ
4025 /* For -fsanitize=address, don't optimize the upper bound of the
4026 length to be able to diagnose UB on non-zero terminated arrays. */
4027 if (sanitize_flags_p (SANITIZE_ADDRESS))
4028 maxlen = wi::max_value (TYPE_PRECISION (sizetype), UNSIGNED);
4029
c42d0aa0
MS
4030 if (minlen == maxlen)
4031 {
5d6655eb
MS
4032 /* Fold the strlen call to a constant. */
4033 tree type = TREE_TYPE (lendata.minlen);
4034 tree len = force_gimple_operand_gsi (gsi,
4035 wide_int_to_tree (type, minlen),
4036 true, NULL, true, GSI_SAME_STMT);
4037 replace_call_with_value (gsi, len);
c42d0aa0
MS
4038 return true;
4039 }
4040
d4bf6975 4041 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 4042 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 4043 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
4044
4045 return false;
cbdd87d4
RG
4046}
4047
48126138
NS
4048/* Fold a call to __builtin_acc_on_device. */
4049
4050static bool
4051gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4052{
4053 /* Defer folding until we know which compiler we're in. */
4054 if (symtab->state != EXPANSION)
4055 return false;
4056
4057 unsigned val_host = GOMP_DEVICE_HOST;
4058 unsigned val_dev = GOMP_DEVICE_NONE;
4059
4060#ifdef ACCEL_COMPILER
4061 val_host = GOMP_DEVICE_NOT_HOST;
4062 val_dev = ACCEL_COMPILER_acc_device;
4063#endif
4064
4065 location_t loc = gimple_location (gsi_stmt (*gsi));
4066
4067 tree host_eq = make_ssa_name (boolean_type_node);
4068 gimple *host_ass = gimple_build_assign
4069 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4070 gimple_set_location (host_ass, loc);
4071 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4072
4073 tree dev_eq = make_ssa_name (boolean_type_node);
4074 gimple *dev_ass = gimple_build_assign
4075 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4076 gimple_set_location (dev_ass, loc);
4077 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4078
4079 tree result = make_ssa_name (boolean_type_node);
4080 gimple *result_ass = gimple_build_assign
4081 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4082 gimple_set_location (result_ass, loc);
4083 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4084
4085 replace_call_with_value (gsi, result);
4086
4087 return true;
4088}
cbdd87d4 4089
fe75f732
PK
4090/* Fold realloc (0, n) -> malloc (n). */
4091
4092static bool
4093gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4094{
4095 gimple *stmt = gsi_stmt (*gsi);
4096 tree arg = gimple_call_arg (stmt, 0);
4097 tree size = gimple_call_arg (stmt, 1);
4098
4099 if (operand_equal_p (arg, null_pointer_node, 0))
4100 {
4101 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4102 if (fn_malloc)
4103 {
4104 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4105 replace_call_with_call_and_fold (gsi, repl);
4106 return true;
4107 }
4108 }
4109 return false;
4110}
4111
4f4fa250
JJ
4112/* Number of bytes into which any type but aggregate, vector or
4113 _BitInt types should fit. */
1bea0d0a
JJ
4114static constexpr size_t clear_padding_unit
4115 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4116/* Buffer size on which __builtin_clear_padding folding code works. */
4117static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4118
4119/* Data passed through __builtin_clear_padding folding. */
4120struct clear_padding_struct {
4121 location_t loc;
896048cf
JJ
4122 /* 0 during __builtin_clear_padding folding, nonzero during
4123 clear_type_padding_in_mask. In that case, instead of clearing the
4124 non-padding bits in union_ptr array clear the padding bits in there. */
4125 bool clear_in_mask;
1bea0d0a
JJ
4126 tree base;
4127 tree alias_type;
4128 gimple_stmt_iterator *gsi;
4129 /* Alignment of buf->base + 0. */
4130 unsigned align;
4131 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4132 HOST_WIDE_INT off;
4133 /* Number of padding bytes before buf->off that don't have padding clear
4134 code emitted yet. */
4135 HOST_WIDE_INT padding_bytes;
4136 /* The size of the whole object. Never emit code to touch
4137 buf->base + buf->sz or following bytes. */
4138 HOST_WIDE_INT sz;
4139 /* Number of bytes recorded in buf->buf. */
4140 size_t size;
4141 /* When inside union, instead of emitting code we and bits inside of
4142 the union_ptr array. */
4143 unsigned char *union_ptr;
4144 /* Set bits mean padding bits that need to be cleared by the builtin. */
4145 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4146};
4147
4148/* Emit code to clear padding requested in BUF->buf - set bits
4149 in there stand for padding that should be cleared. FULL is true
4150 if everything from the buffer should be flushed, otherwise
4151 it can leave up to 2 * clear_padding_unit bytes for further
4152 processing. */
4153
4154static void
4155clear_padding_flush (clear_padding_struct *buf, bool full)
4156{
4157 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4158 if (!full && buf->size < 2 * clear_padding_unit)
4159 return;
4160 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4161 size_t end = buf->size;
4162 if (!full)
4163 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4164 * clear_padding_unit);
4165 size_t padding_bytes = buf->padding_bytes;
4166 if (buf->union_ptr)
4167 {
896048cf
JJ
4168 if (buf->clear_in_mask)
4169 {
4170 /* During clear_type_padding_in_mask, clear the padding
4171 bits set in buf->buf in the buf->union_ptr mask. */
4172 for (size_t i = 0; i < end; i++)
4173 {
4174 if (buf->buf[i] == (unsigned char) ~0)
4175 padding_bytes++;
4176 else
4177 {
4178 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4179 0, padding_bytes);
4180 padding_bytes = 0;
4181 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4182 }
4183 }
4184 if (full)
4185 {
4186 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4187 0, padding_bytes);
4188 buf->off = 0;
4189 buf->size = 0;
4190 buf->padding_bytes = 0;
4191 }
4192 else
4193 {
4194 memmove (buf->buf, buf->buf + end, buf->size - end);
4195 buf->off += end;
4196 buf->size -= end;
4197 buf->padding_bytes = padding_bytes;
4198 }
4199 return;
4200 }
1bea0d0a
JJ
4201 /* Inside of a union, instead of emitting any code, instead
4202 clear all bits in the union_ptr buffer that are clear
4203 in buf. Whole padding bytes don't clear anything. */
4204 for (size_t i = 0; i < end; i++)
4205 {
4206 if (buf->buf[i] == (unsigned char) ~0)
4207 padding_bytes++;
4208 else
4209 {
4210 padding_bytes = 0;
4211 buf->union_ptr[buf->off + i] &= buf->buf[i];
4212 }
4213 }
4214 if (full)
4215 {
4216 buf->off = 0;
4217 buf->size = 0;
4218 buf->padding_bytes = 0;
4219 }
4220 else
4221 {
4222 memmove (buf->buf, buf->buf + end, buf->size - end);
4223 buf->off += end;
4224 buf->size -= end;
4225 buf->padding_bytes = padding_bytes;
4226 }
4227 return;
4228 }
4229 size_t wordsize = UNITS_PER_WORD;
4230 for (size_t i = 0; i < end; i += wordsize)
4231 {
4232 size_t nonzero_first = wordsize;
4233 size_t nonzero_last = 0;
4adfcea0
JJ
4234 size_t zero_first = wordsize;
4235 size_t zero_last = 0;
4236 bool all_ones = true, bytes_only = true;
1bea0d0a
JJ
4237 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4238 > (unsigned HOST_WIDE_INT) buf->sz)
4239 {
4240 gcc_assert (wordsize > 1);
4241 wordsize /= 2;
4242 i -= wordsize;
4243 continue;
4244 }
4245 for (size_t j = i; j < i + wordsize && j < end; j++)
4246 {
4247 if (buf->buf[j])
4248 {
4249 if (nonzero_first == wordsize)
4250 {
4251 nonzero_first = j - i;
4252 nonzero_last = j - i;
4253 }
4254 if (nonzero_last != j - i)
4255 all_ones = false;
4256 nonzero_last = j + 1 - i;
4257 }
4adfcea0
JJ
4258 else
4259 {
4260 if (zero_first == wordsize)
4261 zero_first = j - i;
4262 zero_last = j + 1 - i;
4263 }
1bea0d0a 4264 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4adfcea0
JJ
4265 {
4266 all_ones = false;
4267 bytes_only = false;
4268 }
1bea0d0a 4269 }
4adfcea0 4270 size_t padding_end = i;
1bea0d0a
JJ
4271 if (padding_bytes)
4272 {
4273 if (nonzero_first == 0
4274 && nonzero_last == wordsize
4275 && all_ones)
4276 {
4277 /* All bits are padding and we had some padding
4278 before too. Just extend it. */
4279 padding_bytes += wordsize;
4280 continue;
4281 }
1bea0d0a
JJ
4282 if (all_ones && nonzero_first == 0)
4283 {
4284 padding_bytes += nonzero_last;
4285 padding_end += nonzero_last;
4286 nonzero_first = wordsize;
4287 nonzero_last = 0;
4288 }
4adfcea0
JJ
4289 else if (bytes_only && nonzero_first == 0)
4290 {
4291 gcc_assert (zero_first && zero_first != wordsize);
4292 padding_bytes += zero_first;
4293 padding_end += zero_first;
4294 }
4295 tree atype, src;
4296 if (padding_bytes == 1)
4297 {
4298 atype = char_type_node;
4299 src = build_zero_cst (char_type_node);
4300 }
4301 else
4302 {
4303 atype = build_array_type_nelts (char_type_node, padding_bytes);
4304 src = build_constructor (atype, NULL);
4305 }
1bea0d0a
JJ
4306 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4307 build_int_cst (buf->alias_type,
4308 buf->off + padding_end
4309 - padding_bytes));
1bea0d0a
JJ
4310 gimple *g = gimple_build_assign (dst, src);
4311 gimple_set_location (g, buf->loc);
4312 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4313 padding_bytes = 0;
4314 buf->padding_bytes = 0;
4315 }
4316 if (nonzero_first == wordsize)
4317 /* All bits in a word are 0, there are no padding bits. */
4318 continue;
4319 if (all_ones && nonzero_last == wordsize)
4320 {
4321 /* All bits between nonzero_first and end of word are padding
4322 bits, start counting padding_bytes. */
4323 padding_bytes = nonzero_last - nonzero_first;
4324 continue;
4325 }
4adfcea0
JJ
4326 if (bytes_only)
4327 {
4328 /* If bitfields aren't involved in this word, prefer storing
4329 individual bytes or groups of them over performing a RMW
4330 operation on the whole word. */
4331 gcc_assert (i + zero_last <= end);
4332 for (size_t j = padding_end; j < i + zero_last; j++)
4333 {
4334 if (buf->buf[j])
4335 {
4336 size_t k;
4337 for (k = j; k < i + zero_last; k++)
4338 if (buf->buf[k] == 0)
4339 break;
4340 HOST_WIDE_INT off = buf->off + j;
4341 tree atype, src;
4342 if (k - j == 1)
4343 {
4344 atype = char_type_node;
4345 src = build_zero_cst (char_type_node);
4346 }
4347 else
4348 {
4349 atype = build_array_type_nelts (char_type_node, k - j);
4350 src = build_constructor (atype, NULL);
4351 }
4352 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4353 buf->base,
4354 build_int_cst (buf->alias_type, off));
4355 gimple *g = gimple_build_assign (dst, src);
4356 gimple_set_location (g, buf->loc);
4357 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4358 j = k;
4359 }
4360 }
4361 if (nonzero_last == wordsize)
4362 padding_bytes = nonzero_last - zero_last;
4363 continue;
4364 }
1bea0d0a
JJ
4365 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4366 {
4367 if (nonzero_last - nonzero_first <= eltsz
4368 && ((nonzero_first & ~(eltsz - 1))
4369 == ((nonzero_last - 1) & ~(eltsz - 1))))
4370 {
4371 tree type;
4372 if (eltsz == 1)
4373 type = char_type_node;
4374 else
4375 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4376 0);
4377 size_t start = nonzero_first & ~(eltsz - 1);
4378 HOST_WIDE_INT off = buf->off + i + start;
4379 tree atype = type;
4380 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4381 atype = build_aligned_type (type, buf->align);
4382 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4383 build_int_cst (buf->alias_type, off));
4384 tree src;
4385 gimple *g;
4386 if (all_ones
4387 && nonzero_first == start
4388 && nonzero_last == start + eltsz)
4389 src = build_zero_cst (type);
4390 else
4391 {
4392 src = make_ssa_name (type);
3f3246eb
QZ
4393 tree tmp_dst = unshare_expr (dst);
4394 /* The folding introduces a read from the tmp_dst, we should
4395 prevent uninitialized warning analysis from issuing warning
4396 for such fake read. In order to suppress warning only for
4397 this expr, we should set the location of tmp_dst to
4398 UNKNOWN_LOCATION first, then suppress_warning will call
4399 set_no_warning_bit to set the no_warning flag only for
4400 tmp_dst. */
4401 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4402 suppress_warning (tmp_dst, OPT_Wuninitialized);
4403 g = gimple_build_assign (src, tmp_dst);
1bea0d0a
JJ
4404 gimple_set_location (g, buf->loc);
4405 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4406 tree mask = native_interpret_expr (type,
4407 buf->buf + i + start,
4408 eltsz);
4409 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4410 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4411 tree src_masked = make_ssa_name (type);
4412 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4413 src, mask);
4414 gimple_set_location (g, buf->loc);
4415 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4416 src = src_masked;
4417 }
4418 g = gimple_build_assign (dst, src);
4419 gimple_set_location (g, buf->loc);
4420 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4421 break;
4422 }
4423 }
4424 }
4425 if (full)
4426 {
4427 if (padding_bytes)
4428 {
4adfcea0
JJ
4429 tree atype, src;
4430 if (padding_bytes == 1)
4431 {
4432 atype = char_type_node;
4433 src = build_zero_cst (char_type_node);
4434 }
4435 else
4436 {
4437 atype = build_array_type_nelts (char_type_node, padding_bytes);
4438 src = build_constructor (atype, NULL);
4439 }
1bea0d0a
JJ
4440 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4441 build_int_cst (buf->alias_type,
4442 buf->off + end
4443 - padding_bytes));
1bea0d0a
JJ
4444 gimple *g = gimple_build_assign (dst, src);
4445 gimple_set_location (g, buf->loc);
4446 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4447 }
4448 size_t end_rem = end % UNITS_PER_WORD;
4449 buf->off += end - end_rem;
4450 buf->size = end_rem;
4451 memset (buf->buf, 0, buf->size);
4452 buf->padding_bytes = 0;
4453 }
4454 else
4455 {
4456 memmove (buf->buf, buf->buf + end, buf->size - end);
4457 buf->off += end;
4458 buf->size -= end;
4459 buf->padding_bytes = padding_bytes;
4460 }
4461}
4462
4463/* Append PADDING_BYTES padding bytes. */
4464
4465static void
4466clear_padding_add_padding (clear_padding_struct *buf,
4467 HOST_WIDE_INT padding_bytes)
4468{
4469 if (padding_bytes == 0)
4470 return;
4471 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4472 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4473 clear_padding_flush (buf, false);
4474 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4475 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4476 {
4477 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4478 padding_bytes -= clear_padding_buf_size - buf->size;
4479 buf->size = clear_padding_buf_size;
4480 clear_padding_flush (buf, false);
4481 gcc_assert (buf->padding_bytes);
4482 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4483 is guaranteed to be all ones. */
4484 padding_bytes += buf->size;
4485 buf->size = padding_bytes % UNITS_PER_WORD;
4486 memset (buf->buf, ~0, buf->size);
4487 buf->off += padding_bytes - buf->size;
4488 buf->padding_bytes += padding_bytes - buf->size;
4489 }
4490 else
4491 {
4492 memset (buf->buf + buf->size, ~0, padding_bytes);
4493 buf->size += padding_bytes;
4494 }
4495}
4496
a25e0b5e 4497static void clear_padding_type (clear_padding_struct *, tree,
4498 HOST_WIDE_INT, bool);
1bea0d0a
JJ
4499
4500/* Clear padding bits of union type TYPE. */
4501
4502static void
a25e0b5e 4503clear_padding_union (clear_padding_struct *buf, tree type,
4504 HOST_WIDE_INT sz, bool for_auto_init)
1bea0d0a
JJ
4505{
4506 clear_padding_struct *union_buf;
4507 HOST_WIDE_INT start_off = 0, next_off = 0;
4508 size_t start_size = 0;
4509 if (buf->union_ptr)
4510 {
4511 start_off = buf->off + buf->size;
4512 next_off = start_off + sz;
4513 start_size = start_off % UNITS_PER_WORD;
4514 start_off -= start_size;
4515 clear_padding_flush (buf, true);
4516 union_buf = buf;
4517 }
4518 else
4519 {
4520 if (sz + buf->size > clear_padding_buf_size)
4521 clear_padding_flush (buf, false);
4522 union_buf = XALLOCA (clear_padding_struct);
4523 union_buf->loc = buf->loc;
896048cf 4524 union_buf->clear_in_mask = buf->clear_in_mask;
1bea0d0a
JJ
4525 union_buf->base = NULL_TREE;
4526 union_buf->alias_type = NULL_TREE;
4527 union_buf->gsi = NULL;
4528 union_buf->align = 0;
4529 union_buf->off = 0;
4530 union_buf->padding_bytes = 0;
4531 union_buf->sz = sz;
4532 union_buf->size = 0;
4533 if (sz + buf->size <= clear_padding_buf_size)
4534 union_buf->union_ptr = buf->buf + buf->size;
4535 else
4536 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4537 memset (union_buf->union_ptr, ~0, sz);
4538 }
4539
4540 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4541 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4542 {
a7285c86
JJ
4543 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4544 {
4545 if (TREE_TYPE (field) == error_mark_node)
4546 continue;
4547 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4548 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
a25e0b5e 4549 if (!buf->clear_in_mask && !for_auto_init)
896048cf
JJ
4550 error_at (buf->loc, "flexible array member %qD does not have "
4551 "well defined padding bits for %qs",
4552 field, "__builtin_clear_padding");
a7285c86
JJ
4553 continue;
4554 }
1bea0d0a
JJ
4555 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4556 gcc_assert (union_buf->size == 0);
4557 union_buf->off = start_off;
4558 union_buf->size = start_size;
4559 memset (union_buf->buf, ~0, start_size);
a25e0b5e 4560 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
1bea0d0a
JJ
4561 clear_padding_add_padding (union_buf, sz - fldsz);
4562 clear_padding_flush (union_buf, true);
4563 }
4564
4565 if (buf == union_buf)
4566 {
4567 buf->off = next_off;
4568 buf->size = next_off % UNITS_PER_WORD;
4569 buf->off -= buf->size;
4570 memset (buf->buf, ~0, buf->size);
4571 }
4572 else if (sz + buf->size <= clear_padding_buf_size)
4573 buf->size += sz;
4574 else
4575 {
4576 unsigned char *union_ptr = union_buf->union_ptr;
4577 while (sz)
4578 {
4579 clear_padding_flush (buf, false);
4580 HOST_WIDE_INT this_sz
4581 = MIN ((unsigned HOST_WIDE_INT) sz,
4582 clear_padding_buf_size - buf->size);
4583 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4584 buf->size += this_sz;
4585 union_ptr += this_sz;
4586 sz -= this_sz;
4587 }
4588 XDELETE (union_buf->union_ptr);
4589 }
4590}
4591
4592/* The only known floating point formats with padding bits are the
4593 IEEE extended ones. */
4594
4595static bool
4596clear_padding_real_needs_padding_p (tree type)
4597{
4598 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4599 return (fmt->b == 2
4600 && fmt->signbit_ro == fmt->signbit_rw
4601 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4602}
4603
4f4fa250
JJ
4604/* _BitInt has padding bits if it isn't extended in the ABI and has smaller
4605 precision than bits in limb or corresponding number of limbs. */
4606
4607static bool
4608clear_padding_bitint_needs_padding_p (tree type)
4609{
4610 struct bitint_info info;
18c90eaa
JJ
4611 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4612 gcc_assert (ok);
4f4fa250
JJ
4613 if (info.extended)
4614 return false;
a98a3932 4615 scalar_int_mode limb_mode = as_a <scalar_int_mode> (info.abi_limb_mode);
4f4fa250
JJ
4616 if (TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4617 return true;
4618 else if (TYPE_PRECISION (type) == GET_MODE_PRECISION (limb_mode))
4619 return false;
4620 else
4621 return (((unsigned) TYPE_PRECISION (type))
4622 % GET_MODE_PRECISION (limb_mode)) != 0;
4623}
4624
1bea0d0a
JJ
4625/* Return true if TYPE might contain any padding bits. */
4626
8e1fe3f7 4627bool
1bea0d0a
JJ
4628clear_padding_type_may_have_padding_p (tree type)
4629{
4630 switch (TREE_CODE (type))
4631 {
4632 case RECORD_TYPE:
4633 case UNION_TYPE:
4634 return true;
4635 case ARRAY_TYPE:
4636 case COMPLEX_TYPE:
4637 case VECTOR_TYPE:
4638 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4639 case REAL_TYPE:
4640 return clear_padding_real_needs_padding_p (type);
4f4fa250
JJ
4641 case BITINT_TYPE:
4642 return clear_padding_bitint_needs_padding_p (type);
1bea0d0a
JJ
4643 default:
4644 return false;
4645 }
4646}
4647
4648/* Emit a runtime loop:
4649 for (; buf.base != end; buf.base += sz)
4650 __builtin_clear_padding (buf.base); */
4651
4652static void
a25e0b5e 4653clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4654 tree end, bool for_auto_init)
1bea0d0a
JJ
4655{
4656 tree l1 = create_artificial_label (buf->loc);
4657 tree l2 = create_artificial_label (buf->loc);
4658 tree l3 = create_artificial_label (buf->loc);
4659 gimple *g = gimple_build_goto (l2);
4660 gimple_set_location (g, buf->loc);
4661 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4662 g = gimple_build_label (l1);
4663 gimple_set_location (g, buf->loc);
4664 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
a25e0b5e 4665 clear_padding_type (buf, type, buf->sz, for_auto_init);
1bea0d0a
JJ
4666 clear_padding_flush (buf, true);
4667 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4668 size_int (buf->sz));
4669 gimple_set_location (g, buf->loc);
4670 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4671 g = gimple_build_label (l2);
4672 gimple_set_location (g, buf->loc);
4673 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4674 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4675 gimple_set_location (g, buf->loc);
4676 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4677 g = gimple_build_label (l3);
4678 gimple_set_location (g, buf->loc);
4679 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4680}
4681
4682/* Clear padding bits for TYPE. Called recursively from
a25e0b5e 4683 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4684 the __builtin_clear_padding is not called by the end user,
4685 instead, it's inserted by the compiler to initialize the
4686 paddings of automatic variable. Therefore, we should not
4687 emit the error messages for flexible array members to confuse
4688 the end user. */
1bea0d0a
JJ
4689
4690static void
a25e0b5e 4691clear_padding_type (clear_padding_struct *buf, tree type,
4692 HOST_WIDE_INT sz, bool for_auto_init)
1bea0d0a
JJ
4693{
4694 switch (TREE_CODE (type))
4695 {
4696 case RECORD_TYPE:
4697 HOST_WIDE_INT cur_pos;
4698 cur_pos = 0;
4699 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4700 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4701 {
a7285c86 4702 tree ftype = TREE_TYPE (field);
1bea0d0a
JJ
4703 if (DECL_BIT_FIELD (field))
4704 {
a7285c86 4705 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
1bea0d0a
JJ
4706 if (fldsz == 0)
4707 continue;
4708 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4709 if (pos >= sz)
4710 continue;
1bea0d0a
JJ
4711 HOST_WIDE_INT bpos
4712 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4713 bpos %= BITS_PER_UNIT;
4714 HOST_WIDE_INT end
4715 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4716 if (pos + end > cur_pos)
4717 {
4718 clear_padding_add_padding (buf, pos + end - cur_pos);
4719 cur_pos = pos + end;
4720 }
4721 gcc_assert (cur_pos > pos
4722 && ((unsigned HOST_WIDE_INT) buf->size
4723 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4724 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4725 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4726 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4727 " in %qs", "__builtin_clear_padding");
4728 else if (BYTES_BIG_ENDIAN)
4729 {
4730 /* Big endian. */
4731 if (bpos + fldsz <= BITS_PER_UNIT)
4732 *p &= ~(((1 << fldsz) - 1)
4733 << (BITS_PER_UNIT - bpos - fldsz));
4734 else
4735 {
4736 if (bpos)
4737 {
4738 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4739 p++;
4740 fldsz -= BITS_PER_UNIT - bpos;
4741 }
4742 memset (p, 0, fldsz / BITS_PER_UNIT);
4743 p += fldsz / BITS_PER_UNIT;
4744 fldsz %= BITS_PER_UNIT;
4745 if (fldsz)
4746 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4747 }
4748 }
4749 else
4750 {
4751 /* Little endian. */
4752 if (bpos + fldsz <= BITS_PER_UNIT)
4753 *p &= ~(((1 << fldsz) - 1) << bpos);
4754 else
4755 {
4756 if (bpos)
4757 {
4758 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4759 p++;
4760 fldsz -= BITS_PER_UNIT - bpos;
4761 }
4762 memset (p, 0, fldsz / BITS_PER_UNIT);
4763 p += fldsz / BITS_PER_UNIT;
4764 fldsz %= BITS_PER_UNIT;
4765 if (fldsz)
4766 *p &= ~((1 << fldsz) - 1);
4767 }
4768 }
4769 }
a7285c86
JJ
4770 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4771 {
4772 if (ftype == error_mark_node)
4773 continue;
4774 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4775 && !COMPLETE_TYPE_P (ftype));
a25e0b5e 4776 if (!buf->clear_in_mask && !for_auto_init)
896048cf
JJ
4777 error_at (buf->loc, "flexible array member %qD does not "
4778 "have well defined padding bits for %qs",
4779 field, "__builtin_clear_padding");
a7285c86 4780 }
46c739d4 4781 else if (is_empty_type (ftype))
bf0a63a1 4782 continue;
1bea0d0a
JJ
4783 else
4784 {
4785 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4786 if (pos >= sz)
4787 continue;
1bea0d0a
JJ
4788 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4789 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4790 clear_padding_add_padding (buf, pos - cur_pos);
4791 cur_pos = pos;
46c739d4
JJ
4792 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4793 ftype = asbase;
4794 clear_padding_type (buf, ftype, fldsz, for_auto_init);
1bea0d0a
JJ
4795 cur_pos += fldsz;
4796 }
4797 }
4798 gcc_assert (sz >= cur_pos);
4799 clear_padding_add_padding (buf, sz - cur_pos);
4800 break;
4801 case ARRAY_TYPE:
4802 HOST_WIDE_INT nelts, fldsz;
4803 fldsz = int_size_in_bytes (TREE_TYPE (type));
337d6362
ML
4804 if (fldsz == 0)
4805 break;
1bea0d0a
JJ
4806 nelts = sz / fldsz;
4807 if (nelts > 1
4808 && sz > 8 * UNITS_PER_WORD
4809 && buf->union_ptr == NULL
4810 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4811 {
4812 /* For sufficiently large array of more than one elements,
4813 emit a runtime loop to keep code size manageable. */
4814 tree base = buf->base;
4815 unsigned int prev_align = buf->align;
4816 HOST_WIDE_INT off = buf->off + buf->size;
4817 HOST_WIDE_INT prev_sz = buf->sz;
4818 clear_padding_flush (buf, true);
4819 tree elttype = TREE_TYPE (type);
4820 buf->base = create_tmp_var (build_pointer_type (elttype));
4821 tree end = make_ssa_name (TREE_TYPE (buf->base));
4822 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4823 base, size_int (off));
4824 gimple_set_location (g, buf->loc);
4825 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4826 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4827 size_int (sz));
4828 gimple_set_location (g, buf->loc);
4829 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4830 buf->sz = fldsz;
4831 buf->align = TYPE_ALIGN (elttype);
4832 buf->off = 0;
4833 buf->size = 0;
a25e0b5e 4834 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
1bea0d0a
JJ
4835 buf->base = base;
4836 buf->sz = prev_sz;
4837 buf->align = prev_align;
4838 buf->size = off % UNITS_PER_WORD;
4839 buf->off = off - buf->size;
4840 memset (buf->buf, 0, buf->size);
4841 break;
4842 }
4843 for (HOST_WIDE_INT i = 0; i < nelts; i++)
a25e0b5e 4844 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
1bea0d0a
JJ
4845 break;
4846 case UNION_TYPE:
a25e0b5e 4847 clear_padding_union (buf, type, sz, for_auto_init);
1bea0d0a
JJ
4848 break;
4849 case REAL_TYPE:
4850 gcc_assert ((size_t) sz <= clear_padding_unit);
4851 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4852 clear_padding_flush (buf, false);
4853 if (clear_padding_real_needs_padding_p (type))
4854 {
2801f23f 4855 /* Use native_interpret_real + native_encode_expr to figure out
1bea0d0a
JJ
4856 which bits are padding. */
4857 memset (buf->buf + buf->size, ~0, sz);
2801f23f 4858 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
1bea0d0a
JJ
4859 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4860 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4861 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4862 for (size_t i = 0; i < (size_t) sz; i++)
4863 buf->buf[buf->size + i] ^= ~0;
4864 }
4865 else
4866 memset (buf->buf + buf->size, 0, sz);
4867 buf->size += sz;
4868 break;
4869 case COMPLEX_TYPE:
4870 fldsz = int_size_in_bytes (TREE_TYPE (type));
a25e0b5e 4871 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4872 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
1bea0d0a
JJ
4873 break;
4874 case VECTOR_TYPE:
4875 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4876 fldsz = int_size_in_bytes (TREE_TYPE (type));
4877 for (HOST_WIDE_INT i = 0; i < nelts; i++)
a25e0b5e 4878 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
1bea0d0a
JJ
4879 break;
4880 case NULLPTR_TYPE:
4881 gcc_assert ((size_t) sz <= clear_padding_unit);
4882 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4883 clear_padding_flush (buf, false);
4884 memset (buf->buf + buf->size, ~0, sz);
4885 buf->size += sz;
4886 break;
4f4fa250
JJ
4887 case BITINT_TYPE:
4888 {
4889 struct bitint_info info;
18c90eaa
JJ
4890 bool ok = targetm.c.bitint_type_info (TYPE_PRECISION (type), &info);
4891 gcc_assert (ok);
a98a3932
JJ
4892 scalar_int_mode limb_mode
4893 = as_a <scalar_int_mode> (info.abi_limb_mode);
4f4fa250
JJ
4894 if (TYPE_PRECISION (type) <= GET_MODE_PRECISION (limb_mode))
4895 {
4896 gcc_assert ((size_t) sz <= clear_padding_unit);
4897 if ((unsigned HOST_WIDE_INT) sz + buf->size
4898 > clear_padding_buf_size)
4899 clear_padding_flush (buf, false);
4900 if (!info.extended
4901 && TYPE_PRECISION (type) < GET_MODE_PRECISION (limb_mode))
4902 {
4903 int tprec = GET_MODE_PRECISION (limb_mode);
4904 int prec = TYPE_PRECISION (type);
4905 tree t = build_nonstandard_integer_type (tprec, 1);
4906 tree cst = wide_int_to_tree (t, wi::mask (prec, true, tprec));
4907 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4908 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4909 }
4910 else
4911 memset (buf->buf + buf->size, 0, sz);
4912 buf->size += sz;
4913 break;
4914 }
4915 tree limbtype
4916 = build_nonstandard_integer_type (GET_MODE_PRECISION (limb_mode), 1);
4917 fldsz = int_size_in_bytes (limbtype);
4918 nelts = int_size_in_bytes (type) / fldsz;
4919 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4920 {
4921 if (!info.extended
4922 && i == (info.big_endian ? 0 : nelts - 1)
4923 && (((unsigned) TYPE_PRECISION (type))
4924 % TYPE_PRECISION (limbtype)) != 0)
4925 {
4926 int tprec = GET_MODE_PRECISION (limb_mode);
4927 int prec = (((unsigned) TYPE_PRECISION (type)) % tprec);
4928 tree cst = wide_int_to_tree (limbtype,
4929 wi::mask (prec, true, tprec));
4930 int len = native_encode_expr (cst, buf->buf + buf->size,
4931 fldsz);
4932 gcc_assert (len > 0 && (size_t) len == (size_t) fldsz);
4933 buf->size += fldsz;
4934 }
4935 else
4936 clear_padding_type (buf, limbtype, fldsz, for_auto_init);
4937 }
4938 break;
4939 }
1bea0d0a
JJ
4940 default:
4941 gcc_assert ((size_t) sz <= clear_padding_unit);
4942 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4943 clear_padding_flush (buf, false);
4944 memset (buf->buf + buf->size, 0, sz);
4945 buf->size += sz;
4946 break;
4947 }
4948}
4949
896048cf
JJ
4950/* Clear padding bits of TYPE in MASK. */
4951
4952void
4953clear_type_padding_in_mask (tree type, unsigned char *mask)
4954{
4955 clear_padding_struct buf;
4956 buf.loc = UNKNOWN_LOCATION;
4957 buf.clear_in_mask = true;
4958 buf.base = NULL_TREE;
4959 buf.alias_type = NULL_TREE;
4960 buf.gsi = NULL;
4961 buf.align = 0;
4962 buf.off = 0;
4963 buf.padding_bytes = 0;
4964 buf.sz = int_size_in_bytes (type);
4965 buf.size = 0;
4966 buf.union_ptr = mask;
a25e0b5e 4967 clear_padding_type (&buf, type, buf.sz, false);
896048cf
JJ
4968 clear_padding_flush (&buf, true);
4969}
4970
1bea0d0a
JJ
4971/* Fold __builtin_clear_padding builtin. */
4972
4973static bool
4974gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4975{
4976 gimple *stmt = gsi_stmt (*gsi);
b56ad958 4977 gcc_assert (gimple_call_num_args (stmt) == 2);
1bea0d0a
JJ
4978 tree ptr = gimple_call_arg (stmt, 0);
4979 tree typearg = gimple_call_arg (stmt, 1);
b56ad958
JJ
4980 /* The 2nd argument of __builtin_clear_padding's value is used to
4981 distinguish whether this call is made by the user or by the compiler
4982 for automatic variable initialization. */
4983 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
1bea0d0a
JJ
4984 tree type = TREE_TYPE (TREE_TYPE (typearg));
4985 location_t loc = gimple_location (stmt);
4986 clear_padding_struct buf;
4987 gimple_stmt_iterator gsiprev = *gsi;
4988 /* This should be folded during the lower pass. */
4989 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4990 gcc_assert (COMPLETE_TYPE_P (type));
4991 gsi_prev (&gsiprev);
4992
4993 buf.loc = loc;
896048cf 4994 buf.clear_in_mask = false;
1bea0d0a
JJ
4995 buf.base = ptr;
4996 buf.alias_type = NULL_TREE;
4997 buf.gsi = gsi;
4998 buf.align = get_pointer_alignment (ptr);
4999 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
5000 buf.align = MAX (buf.align, talign);
5001 buf.off = 0;
5002 buf.padding_bytes = 0;
5003 buf.size = 0;
5004 buf.sz = int_size_in_bytes (type);
5005 buf.union_ptr = NULL;
5006 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
5007 sorry_at (loc, "%s not supported for variable length aggregates",
5008 "__builtin_clear_padding");
5009 /* The implementation currently assumes 8-bit host and target
5010 chars which is the case for all currently supported targets
5011 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
5012 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
5013 sorry_at (loc, "%s not supported on this target",
5014 "__builtin_clear_padding");
5015 else if (!clear_padding_type_may_have_padding_p (type))
5016 ;
5017 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
5018 {
5019 tree sz = TYPE_SIZE_UNIT (type);
5020 tree elttype = type;
5021 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
5022 while (TREE_CODE (elttype) == ARRAY_TYPE
5023 && int_size_in_bytes (elttype) < 0)
5024 elttype = TREE_TYPE (elttype);
5025 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
5026 gcc_assert (eltsz >= 0);
5027 if (eltsz)
5028 {
5029 buf.base = create_tmp_var (build_pointer_type (elttype));
5030 tree end = make_ssa_name (TREE_TYPE (buf.base));
5031 gimple *g = gimple_build_assign (buf.base, ptr);
5032 gimple_set_location (g, loc);
5033 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5034 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
5035 gimple_set_location (g, loc);
5036 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5037 buf.sz = eltsz;
5038 buf.align = TYPE_ALIGN (elttype);
5039 buf.alias_type = build_pointer_type (elttype);
a25e0b5e 5040 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
1bea0d0a
JJ
5041 }
5042 }
5043 else
5044 {
5045 if (!is_gimple_mem_ref_addr (buf.base))
5046 {
5047 buf.base = make_ssa_name (TREE_TYPE (ptr));
5048 gimple *g = gimple_build_assign (buf.base, ptr);
5049 gimple_set_location (g, loc);
5050 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5051 }
5052 buf.alias_type = build_pointer_type (type);
a25e0b5e 5053 clear_padding_type (&buf, type, buf.sz, for_auto_init);
1bea0d0a
JJ
5054 clear_padding_flush (&buf, true);
5055 }
5056
5057 gimple_stmt_iterator gsiprev2 = *gsi;
5058 gsi_prev (&gsiprev2);
5059 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
5060 gsi_replace (gsi, gimple_build_nop (), true);
5061 else
5062 {
5063 gsi_remove (gsi, true);
5064 *gsi = gsiprev2;
5065 }
5066 return true;
5067}
5068
dcb7fae2
RB
5069/* Fold the non-target builtin at *GSI and return whether any simplification
5070 was made. */
cbdd87d4 5071
fef5a0d9 5072static bool
dcb7fae2 5073gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 5074{
538dd0b7 5075 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 5076 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 5077
dcb7fae2
RB
5078 /* Give up for always_inline inline builtins until they are
5079 inlined. */
5080 if (avoid_folding_inline_builtin (callee))
5081 return false;
cbdd87d4 5082
edd7ae68
RB
5083 unsigned n = gimple_call_num_args (stmt);
5084 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5085 switch (fcode)
cbdd87d4 5086 {
b3d8d88e
MS
5087 case BUILT_IN_BCMP:
5088 return gimple_fold_builtin_bcmp (gsi);
5089 case BUILT_IN_BCOPY:
5090 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 5091 case BUILT_IN_BZERO:
b3d8d88e
MS
5092 return gimple_fold_builtin_bzero (gsi);
5093
dcb7fae2
RB
5094 case BUILT_IN_MEMSET:
5095 return gimple_fold_builtin_memset (gsi,
5096 gimple_call_arg (stmt, 1),
5097 gimple_call_arg (stmt, 2));
dcb7fae2 5098 case BUILT_IN_MEMCPY:
dcb7fae2 5099 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
5100 case BUILT_IN_MEMMOVE:
5101 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 5102 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
5103 case BUILT_IN_SPRINTF_CHK:
5104 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 5105 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
5106 case BUILT_IN_STRCAT_CHK:
5107 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
5108 case BUILT_IN_STRNCAT_CHK:
5109 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 5110 case BUILT_IN_STRLEN:
dcb7fae2 5111 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 5112 case BUILT_IN_STRCPY:
dcb7fae2 5113 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 5114 gimple_call_arg (stmt, 0),
dcb7fae2 5115 gimple_call_arg (stmt, 1));
cbdd87d4 5116 case BUILT_IN_STRNCPY:
dcb7fae2 5117 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
5118 gimple_call_arg (stmt, 0),
5119 gimple_call_arg (stmt, 1),
dcb7fae2 5120 gimple_call_arg (stmt, 2));
9a7eefec 5121 case BUILT_IN_STRCAT:
dcb7fae2
RB
5122 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5123 gimple_call_arg (stmt, 1));
ad03a744
RB
5124 case BUILT_IN_STRNCAT:
5125 return gimple_fold_builtin_strncat (gsi);
71dea1dd 5126 case BUILT_IN_INDEX:
912d9ec3 5127 case BUILT_IN_STRCHR:
71dea1dd
WD
5128 return gimple_fold_builtin_strchr (gsi, false);
5129 case BUILT_IN_RINDEX:
5130 case BUILT_IN_STRRCHR:
5131 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
5132 case BUILT_IN_STRSTR:
5133 return gimple_fold_builtin_strstr (gsi);
a918bfbf 5134 case BUILT_IN_STRCMP:
8b0b334a 5135 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
5136 case BUILT_IN_STRCASECMP:
5137 case BUILT_IN_STRNCMP:
8b0b334a 5138 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
5139 case BUILT_IN_STRNCASECMP:
5140 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
5141 case BUILT_IN_MEMCHR:
5142 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 5143 case BUILT_IN_FPUTS:
dcb7fae2
RB
5144 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5145 gimple_call_arg (stmt, 1), false);
cbdd87d4 5146 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
5147 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5148 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
5149 case BUILT_IN_MEMCPY_CHK:
5150 case BUILT_IN_MEMPCPY_CHK:
5151 case BUILT_IN_MEMMOVE_CHK:
5152 case BUILT_IN_MEMSET_CHK:
dcb7fae2 5153 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
5154 gimple_call_arg (stmt, 0),
5155 gimple_call_arg (stmt, 1),
5156 gimple_call_arg (stmt, 2),
5157 gimple_call_arg (stmt, 3),
edd7ae68 5158 fcode);
2625bb5d
RB
5159 case BUILT_IN_STPCPY:
5160 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
5161 case BUILT_IN_STRCPY_CHK:
5162 case BUILT_IN_STPCPY_CHK:
dcb7fae2 5163 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
5164 gimple_call_arg (stmt, 0),
5165 gimple_call_arg (stmt, 1),
5166 gimple_call_arg (stmt, 2),
edd7ae68 5167 fcode);
cbdd87d4 5168 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 5169 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
5170 return gimple_fold_builtin_stxncpy_chk (gsi,
5171 gimple_call_arg (stmt, 0),
5172 gimple_call_arg (stmt, 1),
5173 gimple_call_arg (stmt, 2),
5174 gimple_call_arg (stmt, 3),
edd7ae68 5175 fcode);
cbdd87d4
RG
5176 case BUILT_IN_SNPRINTF_CHK:
5177 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 5178 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 5179
edd7ae68
RB
5180 case BUILT_IN_FPRINTF:
5181 case BUILT_IN_FPRINTF_UNLOCKED:
5182 case BUILT_IN_VFPRINTF:
5183 if (n == 2 || n == 3)
5184 return gimple_fold_builtin_fprintf (gsi,
5185 gimple_call_arg (stmt, 0),
5186 gimple_call_arg (stmt, 1),
5187 n == 3
5188 ? gimple_call_arg (stmt, 2)
5189 : NULL_TREE,
5190 fcode);
5191 break;
5192 case BUILT_IN_FPRINTF_CHK:
5193 case BUILT_IN_VFPRINTF_CHK:
5194 if (n == 3 || n == 4)
5195 return gimple_fold_builtin_fprintf (gsi,
5196 gimple_call_arg (stmt, 0),
5197 gimple_call_arg (stmt, 2),
5198 n == 4
5199 ? gimple_call_arg (stmt, 3)
5200 : NULL_TREE,
5201 fcode);
5202 break;
ad03a744
RB
5203 case BUILT_IN_PRINTF:
5204 case BUILT_IN_PRINTF_UNLOCKED:
5205 case BUILT_IN_VPRINTF:
5206 if (n == 1 || n == 2)
5207 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5208 n == 2
5209 ? gimple_call_arg (stmt, 1)
5210 : NULL_TREE, fcode);
5211 break;
5212 case BUILT_IN_PRINTF_CHK:
5213 case BUILT_IN_VPRINTF_CHK:
5214 if (n == 2 || n == 3)
5215 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5216 n == 3
5217 ? gimple_call_arg (stmt, 2)
5218 : NULL_TREE, fcode);
242a37f1 5219 break;
48126138
NS
5220 case BUILT_IN_ACC_ON_DEVICE:
5221 return gimple_fold_builtin_acc_on_device (gsi,
5222 gimple_call_arg (stmt, 0));
fe75f732
PK
5223 case BUILT_IN_REALLOC:
5224 return gimple_fold_builtin_realloc (gsi);
5225
1bea0d0a
JJ
5226 case BUILT_IN_CLEAR_PADDING:
5227 return gimple_fold_builtin_clear_padding (gsi);
5228
fef5a0d9
RB
5229 default:;
5230 }
5231
5232 /* Try the generic builtin folder. */
5233 bool ignore = (gimple_call_lhs (stmt) == NULL);
5234 tree result = fold_call_stmt (stmt, ignore);
5235 if (result)
5236 {
5237 if (ignore)
5238 STRIP_NOPS (result);
5239 else
5240 result = fold_convert (gimple_call_return_type (stmt), result);
52a5515e 5241 gimplify_and_update_call_from_tree (gsi, result);
fef5a0d9
RB
5242 return true;
5243 }
5244
5245 return false;
5246}
5247
451e8dae
NS
5248/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5249 function calls to constants, where possible. */
5250
5251static tree
5252fold_internal_goacc_dim (const gimple *call)
5253{
629b3d75
MJ
5254 int axis = oacc_get_ifn_dim_arg (call);
5255 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 5256 tree result = NULL_TREE;
67d2229e 5257 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 5258
67d2229e 5259 switch (gimple_call_internal_fn (call))
451e8dae 5260 {
67d2229e
TV
5261 case IFN_GOACC_DIM_POS:
5262 /* If the size is 1, we know the answer. */
5263 if (size == 1)
5264 result = build_int_cst (type, 0);
5265 break;
5266 case IFN_GOACC_DIM_SIZE:
5267 /* If the size is not dynamic, we know the answer. */
5268 if (size)
5269 result = build_int_cst (type, size);
5270 break;
5271 default:
5272 break;
451e8dae
NS
5273 }
5274
5275 return result;
5276}
5277
849a76a5
JJ
5278/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5279 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5280 &var where var is only addressable because of such calls. */
5281
5282bool
5283optimize_atomic_compare_exchange_p (gimple *stmt)
5284{
5285 if (gimple_call_num_args (stmt) != 6
5286 || !flag_inline_atomics
5287 || !optimize
45b2222a 5288 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
5289 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5290 || !gimple_vdef (stmt)
5291 || !gimple_vuse (stmt))
5292 return false;
5293
5294 tree fndecl = gimple_call_fndecl (stmt);
5295 switch (DECL_FUNCTION_CODE (fndecl))
5296 {
5297 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5298 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5299 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5300 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5301 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5302 break;
5303 default:
5304 return false;
5305 }
5306
5307 tree expected = gimple_call_arg (stmt, 1);
5308 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
5309 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5310 return false;
5311
5312 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5313 if (!is_gimple_reg_type (etype)
849a76a5 5314 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
5315 || TREE_THIS_VOLATILE (etype)
5316 || VECTOR_TYPE_P (etype)
5317 || TREE_CODE (etype) == COMPLEX_TYPE
5318 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5319 might not preserve all the bits. See PR71716. */
5320 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
5321 || maybe_ne (TYPE_PRECISION (etype),
5322 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
5323 return false;
5324
5325 tree weak = gimple_call_arg (stmt, 3);
5326 if (!integer_zerop (weak) && !integer_onep (weak))
5327 return false;
5328
5329 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5330 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5331 machine_mode mode = TYPE_MODE (itype);
5332
5333 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5334 == CODE_FOR_nothing
5335 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5336 return false;
5337
cf098191 5338 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
5339 return false;
5340
5341 return true;
5342}
5343
5344/* Fold
5345 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5346 into
5347 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5348 i = IMAGPART_EXPR <t>;
5349 r = (_Bool) i;
5350 e = REALPART_EXPR <t>; */
5351
5352void
5353fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5354{
5355 gimple *stmt = gsi_stmt (*gsi);
5356 tree fndecl = gimple_call_fndecl (stmt);
5357 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5358 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5359 tree ctype = build_complex_type (itype);
5360 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
5361 bool throws = false;
5362 edge e = NULL;
849a76a5
JJ
5363 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5364 expected);
5365 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5366 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5367 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5368 {
5369 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5370 build1 (VIEW_CONVERT_EXPR, itype,
5371 gimple_assign_lhs (g)));
5372 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5373 }
5374 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5375 + int_size_in_bytes (itype);
5376 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5377 gimple_call_arg (stmt, 0),
5378 gimple_assign_lhs (g),
5379 gimple_call_arg (stmt, 2),
5380 build_int_cst (integer_type_node, flag),
5381 gimple_call_arg (stmt, 4),
5382 gimple_call_arg (stmt, 5));
5383 tree lhs = make_ssa_name (ctype);
5384 gimple_call_set_lhs (g, lhs);
779724a5 5385 gimple_move_vops (g, stmt);
cc195d46 5386 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 5387 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
5388 {
5389 throws = true;
5390 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5391 }
5392 gimple_call_set_nothrow (as_a <gcall *> (g),
5393 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5394 gimple_call_set_lhs (stmt, NULL_TREE);
5395 gsi_replace (gsi, g, true);
5396 if (oldlhs)
849a76a5 5397 {
849a76a5
JJ
5398 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5399 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
5400 if (throws)
5401 {
5402 gsi_insert_on_edge_immediate (e, g);
5403 *gsi = gsi_for_stmt (g);
5404 }
5405 else
5406 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5407 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5408 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 5409 }
849a76a5
JJ
5410 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5411 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
5412 if (throws && oldlhs == NULL_TREE)
5413 {
5414 gsi_insert_on_edge_immediate (e, g);
5415 *gsi = gsi_for_stmt (g);
5416 }
5417 else
5418 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
5419 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5420 {
5421 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5422 VIEW_CONVERT_EXPR,
5423 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5424 gimple_assign_lhs (g)));
5425 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5426 }
5427 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5428 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5429 *gsi = gsiret;
5430}
5431
1304953e
JJ
5432/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5433 doesn't fit into TYPE. The test for overflow should be regardless of
5434 -fwrapv, and even for unsigned types. */
5435
5436bool
5437arith_overflowed_p (enum tree_code code, const_tree type,
5438 const_tree arg0, const_tree arg1)
5439{
1304953e
JJ
5440 widest2_int warg0 = widest2_int_cst (arg0);
5441 widest2_int warg1 = widest2_int_cst (arg1);
5442 widest2_int wres;
5443 switch (code)
5444 {
5445 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5446 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5447 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5448 default: gcc_unreachable ();
5449 }
5450 signop sign = TYPE_SIGN (type);
5451 if (sign == UNSIGNED && wi::neg_p (wres))
5452 return true;
5453 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5454}
5455
bd68b33f 5456/* If IFN_{MASK,LEN,MASK_LEN}_LOAD/STORE call CALL is unconditional,
ef09afa4
JZZ
5457 return a MEM_REF for the memory it references, otherwise return null.
5458 VECTYPE is the type of the memory vector. MASK_P indicates it's for
5459 MASK if true, otherwise it's for LEN. */
868363d4
RS
5460
5461static tree
8408120f 5462gimple_fold_partial_load_store_mem_ref (gcall *call, tree vectype, bool mask_p)
868363d4
RS
5463{
5464 tree ptr = gimple_call_arg (call, 0);
5465 tree alias_align = gimple_call_arg (call, 1);
8408120f 5466 if (!tree_fits_uhwi_p (alias_align))
868363d4
RS
5467 return NULL_TREE;
5468
8408120f
KL
5469 if (mask_p)
5470 {
5471 tree mask = gimple_call_arg (call, 2);
5472 if (!integer_all_onesp (mask))
5473 return NULL_TREE;
380d62c1
KL
5474 }
5475 else
5476 {
b8806f6f
JZZ
5477 internal_fn ifn = gimple_call_internal_fn (call);
5478 int len_index = internal_fn_len_index (ifn);
5479 tree basic_len = gimple_call_arg (call, len_index);
380d62c1 5480 if (!poly_int_tree_p (basic_len))
8408120f 5481 return NULL_TREE;
b8806f6f 5482 tree bias = gimple_call_arg (call, len_index + 1);
380d62c1 5483 gcc_assert (TREE_CODE (bias) == INTEGER_CST);
bd68b33f 5484 /* For LEN_LOAD/LEN_STORE/MASK_LEN_LOAD/MASK_LEN_STORE,
f0deeefe
JZZ
5485 we don't fold when (bias + len) != VF. */
5486 if (maybe_ne (wi::to_poly_widest (basic_len) + wi::to_widest (bias),
5487 GET_MODE_NUNITS (TYPE_MODE (vectype))))
8408120f 5488 return NULL_TREE;
ef09afa4 5489
bd68b33f 5490 /* For MASK_LEN_{LOAD,STORE}, we should also check whether
ef09afa4 5491 the mask is all ones mask. */
bd68b33f 5492 if (ifn == IFN_MASK_LEN_LOAD || ifn == IFN_MASK_LEN_STORE)
ef09afa4
JZZ
5493 {
5494 tree mask = gimple_call_arg (call, internal_fn_mask_index (ifn));
5495 if (!integer_all_onesp (mask))
5496 return NULL_TREE;
5497 }
8408120f
KL
5498 }
5499
aa204d51 5500 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
868363d4
RS
5501 if (TYPE_ALIGN (vectype) != align)
5502 vectype = build_aligned_type (vectype, align);
5503 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5504 return fold_build2 (MEM_REF, vectype, ptr, offset);
5505}
5506
8408120f
KL
5507/* Try to fold IFN_{MASK,LEN}_LOAD call CALL. Return true on success.
5508 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
868363d4
RS
5509
5510static bool
8408120f 5511gimple_fold_partial_load (gimple_stmt_iterator *gsi, gcall *call, bool mask_p)
868363d4
RS
5512{
5513 tree lhs = gimple_call_lhs (call);
5514 if (!lhs)
5515 return false;
5516
8408120f
KL
5517 if (tree rhs
5518 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (lhs), mask_p))
868363d4
RS
5519 {
5520 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5521 gimple_set_location (new_stmt, gimple_location (call));
5522 gimple_move_vops (new_stmt, call);
5523 gsi_replace (gsi, new_stmt, false);
5524 return true;
5525 }
5526 return false;
5527}
5528
8408120f
KL
5529/* Try to fold IFN_{MASK,LEN}_STORE call CALL. Return true on success.
5530 MASK_P indicates it's for MASK if true, otherwise it's for LEN. */
868363d4
RS
5531
5532static bool
8408120f
KL
5533gimple_fold_partial_store (gimple_stmt_iterator *gsi, gcall *call,
5534 bool mask_p)
868363d4 5535{
ef09afa4
JZZ
5536 internal_fn ifn = gimple_call_internal_fn (call);
5537 tree rhs = gimple_call_arg (call, internal_fn_stored_value_index (ifn));
8408120f
KL
5538 if (tree lhs
5539 = gimple_fold_partial_load_store_mem_ref (call, TREE_TYPE (rhs), mask_p))
868363d4
RS
5540 {
5541 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5542 gimple_set_location (new_stmt, gimple_location (call));
5543 gimple_move_vops (new_stmt, call);
5544 gsi_replace (gsi, new_stmt, false);
5545 return true;
5546 }
5547 return false;
5548}
5549
cbdd87d4
RG
5550/* Attempt to fold a call statement referenced by the statement iterator GSI.
5551 The statement may be replaced by another statement, e.g., if the call
5552 simplifies to a constant value. Return true if any changes were made.
5553 It is assumed that the operands have been previously folded. */
5554
e021c122 5555static bool
ceeffab0 5556gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 5557{
538dd0b7 5558 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 5559 tree callee;
e021c122 5560 bool changed = false;
3b45a007
RG
5561
5562 /* Check for virtual calls that became direct calls. */
5563 callee = gimple_call_fn (stmt);
25583c4f 5564 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 5565 {
49c471e3
MJ
5566 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5567 {
450ad0cd
JH
5568 if (dump_file && virtual_method_call_p (callee)
5569 && !possible_polymorphic_call_target_p
6f8091fc
JH
5570 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5571 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
5572 {
5573 fprintf (dump_file,
a70e9985 5574 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
5575 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5576 fprintf (dump_file, " to ");
5577 print_generic_expr (dump_file, callee, TDF_SLIM);
5578 fprintf (dump_file, "\n");
5579 }
5580
49c471e3 5581 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
5582 changed = true;
5583 }
a70e9985 5584 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 5585 {
61dd6a2e
JH
5586 bool final;
5587 vec <cgraph_node *>targets
058d0a90 5588 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 5589 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 5590 {
a70e9985 5591 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
5592 if (dump_enabled_p ())
5593 {
4f5b9c80 5594 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
5595 "folding virtual function call to %s\n",
5596 targets.length () == 1
5597 ? targets[0]->name ()
5598 : "__builtin_unreachable");
5599 }
61dd6a2e 5600 if (targets.length () == 1)
cf3e5a89 5601 {
18954840
JJ
5602 tree fndecl = targets[0]->decl;
5603 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 5604 changed = true;
18954840
JJ
5605 /* If changing the call to __cxa_pure_virtual
5606 or similar noreturn function, adjust gimple_call_fntype
5607 too. */
865f7046 5608 if (gimple_call_noreturn_p (stmt)
18954840
JJ
5609 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5610 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5611 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5612 == void_type_node))
5613 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 5614 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
5615 if (lhs
5616 && gimple_call_noreturn_p (stmt)
18954840 5617 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 5618 || should_remove_lhs_p (lhs)))
a70e9985
JJ
5619 {
5620 if (TREE_CODE (lhs) == SSA_NAME)
5621 {
b731b390 5622 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 5623 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 5624 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
5625 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5626 }
5627 gimple_call_set_lhs (stmt, NULL_TREE);
5628 }
0b986c6a 5629 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 5630 }
a70e9985 5631 else
cf3e5a89 5632 {
d68d3664
JM
5633 location_t loc = gimple_location (stmt);
5634 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
2c867232 5635 gimple_call_set_ctrl_altering (new_stmt, false);
2da6996c
RB
5636 /* If the call had a SSA name as lhs morph that into
5637 an uninitialized value. */
a70e9985
JJ
5638 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5639 {
b731b390 5640 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
5641 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5642 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5643 set_ssa_default_def (cfun, var, lhs);
42e52a51 5644 }
779724a5 5645 gimple_move_vops (new_stmt, stmt);
2da6996c 5646 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
5647 return true;
5648 }
e021c122 5649 }
49c471e3 5650 }
e021c122 5651 }
49c471e3 5652
f2d3d07e
RH
5653 /* Check for indirect calls that became direct calls, and then
5654 no longer require a static chain. */
5655 if (gimple_call_chain (stmt))
5656 {
5657 tree fn = gimple_call_fndecl (stmt);
5658 if (fn && !DECL_STATIC_CHAIN (fn))
5659 {
5660 gimple_call_set_chain (stmt, NULL);
5661 changed = true;
5662 }
f2d3d07e
RH
5663 }
5664
e021c122
RG
5665 if (inplace)
5666 return changed;
5667
5668 /* Check for builtins that CCP can handle using information not
5669 available in the generic fold routines. */
fef5a0d9
RB
5670 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5671 {
5672 if (gimple_fold_builtin (gsi))
5673 changed = true;
5674 }
5675 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 5676 {
ea679d55 5677 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 5678 }
368b454d 5679 else if (gimple_call_internal_p (stmt))
ed9c79e1 5680 {
368b454d
JJ
5681 enum tree_code subcode = ERROR_MARK;
5682 tree result = NULL_TREE;
1304953e 5683 bool cplx_result = false;
43a3252c 5684 bool uaddc_usubc = false;
1304953e 5685 tree overflow = NULL_TREE;
368b454d
JJ
5686 switch (gimple_call_internal_fn (stmt))
5687 {
5688 case IFN_BUILTIN_EXPECT:
5689 result = fold_builtin_expect (gimple_location (stmt),
5690 gimple_call_arg (stmt, 0),
5691 gimple_call_arg (stmt, 1),
1e9168b2
ML
5692 gimple_call_arg (stmt, 2),
5693 NULL_TREE);
368b454d 5694 break;
0e82f089 5695 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
5696 {
5697 tree offset = gimple_call_arg (stmt, 1);
5698 tree objsize = gimple_call_arg (stmt, 2);
5699 if (integer_all_onesp (objsize)
5700 || (TREE_CODE (offset) == INTEGER_CST
5701 && TREE_CODE (objsize) == INTEGER_CST
5702 && tree_int_cst_le (offset, objsize)))
5703 {
5704 replace_call_with_value (gsi, NULL_TREE);
5705 return true;
5706 }
5707 }
5708 break;
5709 case IFN_UBSAN_PTR:
5710 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 5711 {
ca1150f0 5712 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
5713 return true;
5714 }
5715 break;
ca1150f0
JJ
5716 case IFN_UBSAN_BOUNDS:
5717 {
5718 tree index = gimple_call_arg (stmt, 1);
5719 tree bound = gimple_call_arg (stmt, 2);
5720 if (TREE_CODE (index) == INTEGER_CST
5721 && TREE_CODE (bound) == INTEGER_CST)
5722 {
5723 index = fold_convert (TREE_TYPE (bound), index);
5724 if (TREE_CODE (index) == INTEGER_CST
c7728805 5725 && tree_int_cst_lt (index, bound))
ca1150f0
JJ
5726 {
5727 replace_call_with_value (gsi, NULL_TREE);
5728 return true;
5729 }
5730 }
5731 }
5732 break;
451e8dae
NS
5733 case IFN_GOACC_DIM_SIZE:
5734 case IFN_GOACC_DIM_POS:
5735 result = fold_internal_goacc_dim (stmt);
5736 break;
368b454d
JJ
5737 case IFN_UBSAN_CHECK_ADD:
5738 subcode = PLUS_EXPR;
5739 break;
5740 case IFN_UBSAN_CHECK_SUB:
5741 subcode = MINUS_EXPR;
5742 break;
5743 case IFN_UBSAN_CHECK_MUL:
5744 subcode = MULT_EXPR;
5745 break;
1304953e
JJ
5746 case IFN_ADD_OVERFLOW:
5747 subcode = PLUS_EXPR;
5748 cplx_result = true;
5749 break;
5750 case IFN_SUB_OVERFLOW:
5751 subcode = MINUS_EXPR;
5752 cplx_result = true;
5753 break;
5754 case IFN_MUL_OVERFLOW:
5755 subcode = MULT_EXPR;
5756 cplx_result = true;
5757 break;
43a3252c
JJ
5758 case IFN_UADDC:
5759 subcode = PLUS_EXPR;
5760 cplx_result = true;
5761 uaddc_usubc = true;
5762 break;
5763 case IFN_USUBC:
5764 subcode = MINUS_EXPR;
5765 cplx_result = true;
5766 uaddc_usubc = true;
5767 break;
868363d4 5768 case IFN_MASK_LOAD:
8408120f 5769 changed |= gimple_fold_partial_load (gsi, stmt, true);
868363d4
RS
5770 break;
5771 case IFN_MASK_STORE:
8408120f
KL
5772 changed |= gimple_fold_partial_store (gsi, stmt, true);
5773 break;
5774 case IFN_LEN_LOAD:
bd68b33f 5775 case IFN_MASK_LEN_LOAD:
8408120f
KL
5776 changed |= gimple_fold_partial_load (gsi, stmt, false);
5777 break;
5778 case IFN_LEN_STORE:
bd68b33f 5779 case IFN_MASK_LEN_STORE:
8408120f 5780 changed |= gimple_fold_partial_store (gsi, stmt, false);
868363d4 5781 break;
368b454d
JJ
5782 default:
5783 break;
5784 }
5785 if (subcode != ERROR_MARK)
5786 {
5787 tree arg0 = gimple_call_arg (stmt, 0);
5788 tree arg1 = gimple_call_arg (stmt, 1);
43a3252c 5789 tree arg2 = NULL_TREE;
1304953e
JJ
5790 tree type = TREE_TYPE (arg0);
5791 if (cplx_result)
5792 {
5793 tree lhs = gimple_call_lhs (stmt);
5794 if (lhs == NULL_TREE)
5795 type = NULL_TREE;
5796 else
5797 type = TREE_TYPE (TREE_TYPE (lhs));
43a3252c
JJ
5798 if (uaddc_usubc)
5799 arg2 = gimple_call_arg (stmt, 2);
1304953e
JJ
5800 }
5801 if (type == NULL_TREE)
5802 ;
43a3252c
JJ
5803 else if (uaddc_usubc)
5804 {
5805 if (!integer_zerop (arg2))
5806 ;
5807 /* x = y + 0 + 0; x = y - 0 - 0; */
5808 else if (integer_zerop (arg1))
5809 result = arg0;
5810 /* x = 0 + y + 0; */
5811 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5812 result = arg1;
5813 /* x = y - y - 0; */
5814 else if (subcode == MINUS_EXPR
5815 && operand_equal_p (arg0, arg1, 0))
5816 result = integer_zero_node;
5817 }
368b454d 5818 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
5819 else if (integer_zerop (arg1))
5820 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
5821 /* x = 0 + y; x = 0 * y; */
5822 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 5823 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
5824 /* x = y - y; */
5825 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 5826 result = integer_zero_node;
368b454d 5827 /* x = y * 1; x = 1 * y; */
1304953e
JJ
5828 else if (subcode == MULT_EXPR && integer_onep (arg1))
5829 result = arg0;
5830 else if (subcode == MULT_EXPR && integer_onep (arg0))
5831 result = arg1;
1304953e
JJ
5832 if (result)
5833 {
5834 if (result == integer_zero_node)
5835 result = build_zero_cst (type);
5836 else if (cplx_result && TREE_TYPE (result) != type)
5837 {
5838 if (TREE_CODE (result) == INTEGER_CST)
5839 {
5840 if (arith_overflowed_p (PLUS_EXPR, type, result,
5841 integer_zero_node))
5842 overflow = build_one_cst (type);
5843 }
5844 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5845 && TYPE_UNSIGNED (type))
5846 || (TYPE_PRECISION (type)
5847 < (TYPE_PRECISION (TREE_TYPE (result))
5848 + (TYPE_UNSIGNED (TREE_TYPE (result))
5849 && !TYPE_UNSIGNED (type)))))
5850 result = NULL_TREE;
5851 if (result)
5852 result = fold_convert (type, result);
5853 }
368b454d
JJ
5854 }
5855 }
1304953e 5856
ed9c79e1
JJ
5857 if (result)
5858 {
1304953e
JJ
5859 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5860 result = drop_tree_overflow (result);
5861 if (cplx_result)
5862 {
5863 if (overflow == NULL_TREE)
5864 overflow = build_zero_cst (TREE_TYPE (result));
5865 tree ctype = build_complex_type (TREE_TYPE (result));
5866 if (TREE_CODE (result) == INTEGER_CST
5867 && TREE_CODE (overflow) == INTEGER_CST)
5868 result = build_complex (ctype, result, overflow);
5869 else
5870 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5871 ctype, result, overflow);
5872 }
52a5515e 5873 gimplify_and_update_call_from_tree (gsi, result);
ed9c79e1
JJ
5874 changed = true;
5875 }
5876 }
3b45a007 5877
e021c122 5878 return changed;
cbdd87d4
RG
5879}
5880
e0ee10ed 5881
295adfc9
RB
5882/* Return true whether NAME has a use on STMT. Note this can return
5883 false even though there's a use on STMT if SSA operands are not
5884 up-to-date. */
89a79e96
RB
5885
5886static bool
355fe088 5887has_use_on_stmt (tree name, gimple *stmt)
89a79e96 5888{
295adfc9
RB
5889 ssa_op_iter iter;
5890 tree op;
5891 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5892 if (op == name)
89a79e96
RB
5893 return true;
5894 return false;
5895}
5896
e0ee10ed
RB
5897/* Worker for fold_stmt_1 dispatch to pattern based folding with
5898 gimple_simplify.
5899
5900 Replaces *GSI with the simplification result in RCODE and OPS
5901 and the associated statements in *SEQ. Does the replacement
5902 according to INPLACE and returns true if the operation succeeded. */
5903
5904static bool
5905replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 5906 gimple_match_op *res_op,
e0ee10ed
RB
5907 gimple_seq *seq, bool inplace)
5908{
355fe088 5909 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
5910 tree *ops = res_op->ops;
5911 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
5912
5913 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
5914 newly created statements. See also maybe_push_res_to_seq.
5915 As an exception allow such uses if there was a use of the
5916 same SSA name on the old stmt. */
5d75ad95
RS
5917 for (unsigned int i = 0; i < num_ops; ++i)
5918 if (TREE_CODE (ops[i]) == SSA_NAME
5919 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5920 && !has_use_on_stmt (ops[i], stmt))
5921 return false;
5922
5923 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5924 for (unsigned int i = 0; i < 2; ++i)
5925 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5926 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5927 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5928 return false;
e0ee10ed 5929
fec40d06
RS
5930 /* Don't insert new statements when INPLACE is true, even if we could
5931 reuse STMT for the final statement. */
5932 if (inplace && !gimple_seq_empty_p (*seq))
5933 return false;
5934
538dd0b7 5935 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 5936 {
5d75ad95 5937 gcc_assert (res_op->code.is_tree_code ());
0c1fb64d
RS
5938 auto code = tree_code (res_op->code);
5939 if (TREE_CODE_CLASS (code) == tcc_comparison
e0ee10ed
RB
5940 /* GIMPLE_CONDs condition may not throw. */
5941 && (!flag_exceptions
5942 || !cfun->can_throw_non_call_exceptions
0c1fb64d 5943 || !operation_could_trap_p (code,
e0ee10ed
RB
5944 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5945 false, NULL_TREE)))
0c1fb64d
RS
5946 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5947 else if (code == SSA_NAME)
538dd0b7 5948 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 5949 build_zero_cst (TREE_TYPE (ops[0])));
0c1fb64d 5950 else if (code == INTEGER_CST)
e0ee10ed
RB
5951 {
5952 if (integer_zerop (ops[0]))
538dd0b7 5953 gimple_cond_make_false (cond_stmt);
e0ee10ed 5954 else
538dd0b7 5955 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
5956 }
5957 else if (!inplace)
5958 {
5d75ad95 5959 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
5960 if (!res)
5961 return false;
538dd0b7 5962 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
5963 build_zero_cst (TREE_TYPE (res)));
5964 }
5965 else
5966 return false;
5967 if (dump_file && (dump_flags & TDF_DETAILS))
5968 {
5969 fprintf (dump_file, "gimple_simplified to ");
5970 if (!gimple_seq_empty_p (*seq))
5971 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5972 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5973 0, TDF_SLIM);
5974 }
5975 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5976 return true;
5977 }
5978 else if (is_gimple_assign (stmt)
5d75ad95 5979 && res_op->code.is_tree_code ())
e0ee10ed 5980 {
0c1fb64d 5981 auto code = tree_code (res_op->code);
e0ee10ed 5982 if (!inplace
0c1fb64d 5983 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
e0ee10ed 5984 {
5d75ad95 5985 maybe_build_generic_op (res_op);
0c1fb64d 5986 gimple_assign_set_rhs_with_ops (gsi, code,
5d75ad95
RS
5987 res_op->op_or_null (0),
5988 res_op->op_or_null (1),
5989 res_op->op_or_null (2));
e0ee10ed
RB
5990 if (dump_file && (dump_flags & TDF_DETAILS))
5991 {
5992 fprintf (dump_file, "gimple_simplified to ");
5993 if (!gimple_seq_empty_p (*seq))
5994 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5995 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5996 0, TDF_SLIM);
5997 }
5998 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5999 return true;
6000 }
6001 }
5d75ad95 6002 else if (res_op->code.is_fn_code ()
0c1fb64d 6003 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
37d486ab 6004 {
5d75ad95
RS
6005 gcc_assert (num_ops == gimple_call_num_args (stmt));
6006 for (unsigned int i = 0; i < num_ops; ++i)
6007 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
6008 if (dump_file && (dump_flags & TDF_DETAILS))
6009 {
6010 fprintf (dump_file, "gimple_simplified to ");
6011 if (!gimple_seq_empty_p (*seq))
6012 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6013 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
6014 }
6015 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
6016 return true;
6017 }
e0ee10ed
RB
6018 else if (!inplace)
6019 {
6020 if (gimple_has_lhs (stmt))
6021 {
6022 tree lhs = gimple_get_lhs (stmt);
5d75ad95 6023 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 6024 return false;
e0ee10ed
RB
6025 if (dump_file && (dump_flags & TDF_DETAILS))
6026 {
6027 fprintf (dump_file, "gimple_simplified to ");
6028 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
6029 }
6030 gsi_replace_with_seq_vops (gsi, *seq);
6031 return true;
6032 }
6033 else
6034 gcc_unreachable ();
6035 }
6036
6037 return false;
6038}
6039
040292e7
RB
6040/* Canonicalize MEM_REFs invariant address operand after propagation. */
6041
6042static bool
fabe0ede 6043maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
040292e7
RB
6044{
6045 bool res = false;
fe8c8f1e 6046 tree *orig_t = t;
040292e7
RB
6047
6048 if (TREE_CODE (*t) == ADDR_EXPR)
6049 t = &TREE_OPERAND (*t, 0);
6050
f17a223d
RB
6051 /* The C and C++ frontends use an ARRAY_REF for indexing with their
6052 generic vector extension. The actual vector referenced is
6053 view-converted to an array type for this purpose. If the index
6054 is constant the canonical representation in the middle-end is a
6055 BIT_FIELD_REF so re-write the former to the latter here. */
6056 if (TREE_CODE (*t) == ARRAY_REF
6057 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
6058 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
6059 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
6060 {
6061 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
6062 if (VECTOR_TYPE_P (vtype))
6063 {
6064 tree low = array_ref_low_bound (*t);
6065 if (TREE_CODE (low) == INTEGER_CST)
6066 {
6067 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
6068 {
6069 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
6070 wi::to_widest (low));
6071 idx = wi::mul (idx, wi::to_widest
6072 (TYPE_SIZE (TREE_TYPE (*t))));
6073 widest_int ext
6074 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
6075 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
6076 {
6077 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
6078 TREE_TYPE (*t),
6079 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
6080 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 6081 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
6082 res = true;
6083 }
6084 }
6085 }
6086 }
6087 }
6088
040292e7
RB
6089 while (handled_component_p (*t))
6090 t = &TREE_OPERAND (*t, 0);
6091
6092 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
6093 of invariant addresses into a SSA name MEM_REF address. */
6094 if (TREE_CODE (*t) == MEM_REF
6095 || TREE_CODE (*t) == TARGET_MEM_REF)
6096 {
6097 tree addr = TREE_OPERAND (*t, 0);
6098 if (TREE_CODE (addr) == ADDR_EXPR
6099 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
6100 || handled_component_p (TREE_OPERAND (addr, 0))))
6101 {
6102 tree base;
a90c8804 6103 poly_int64 coffset;
040292e7
RB
6104 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
6105 &coffset);
6106 if (!base)
fabe0ede
JJ
6107 {
6108 if (is_debug)
6109 return false;
6110 gcc_unreachable ();
6111 }
040292e7
RB
6112
6113 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6114 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6115 TREE_OPERAND (*t, 1),
6116 size_int (coffset));
6117 res = true;
6118 }
6119 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6120 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6121 }
6122
6123 /* Canonicalize back MEM_REFs to plain reference trees if the object
6124 accessed is a decl that has the same access semantics as the MEM_REF. */
6125 if (TREE_CODE (*t) == MEM_REF
6126 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
6127 && integer_zerop (TREE_OPERAND (*t, 1))
6128 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
6129 {
6130 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6131 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6132 if (/* Same volatile qualification. */
6133 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6134 /* Same TBAA behavior with -fstrict-aliasing. */
6135 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6136 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6137 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6138 /* Same alignment. */
6139 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6140 /* We have to look out here to not drop a required conversion
6141 from the rhs to the lhs if *t appears on the lhs or vice-versa
6142 if it appears on the rhs. Thus require strict type
6143 compatibility. */
6144 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6145 {
6146 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6147 res = true;
6148 }
6149 }
6150
fe8c8f1e
RB
6151 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6152 && TREE_CODE (*t) == MEM_REF
6153 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6154 {
6155 tree base;
6156 poly_int64 coffset;
6157 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6158 &coffset);
6159 if (base)
6160 {
6161 gcc_assert (TREE_CODE (base) == MEM_REF);
6162 poly_int64 moffset;
6163 if (mem_ref_offset (base).to_shwi (&moffset))
6164 {
6165 coffset += moffset;
6166 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6167 {
6168 coffset += moffset;
6169 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6170 return true;
6171 }
6172 }
6173 }
6174 }
6175
040292e7
RB
6176 /* Canonicalize TARGET_MEM_REF in particular with respect to
6177 the indexes becoming constant. */
6178 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6179 {
6180 tree tem = maybe_fold_tmr (*t);
6181 if (tem)
6182 {
6183 *t = tem;
c7789683
RS
6184 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6185 recompute_tree_invariant_for_addr_expr (*orig_t);
040292e7
RB
6186 res = true;
6187 }
6188 }
6189
6190 return res;
6191}
6192
cbdd87d4
RG
6193/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6194 distinguishes both cases. */
6195
6196static bool
e0ee10ed 6197fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
6198{
6199 bool changed = false;
355fe088 6200 gimple *stmt = gsi_stmt (*gsi);
e9e2bad7 6201 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
cbdd87d4 6202 unsigned i;
a8b85ce9 6203 fold_defer_overflow_warnings ();
cbdd87d4 6204
040292e7
RB
6205 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6206 after propagation.
6207 ??? This shouldn't be done in generic folding but in the
6208 propagation helpers which also know whether an address was
89a79e96
RB
6209 propagated.
6210 Also canonicalize operand order. */
040292e7
RB
6211 switch (gimple_code (stmt))
6212 {
6213 case GIMPLE_ASSIGN:
6214 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6215 {
6216 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6217 if ((REFERENCE_CLASS_P (*rhs)
6218 || TREE_CODE (*rhs) == ADDR_EXPR)
6219 && maybe_canonicalize_mem_ref_addr (rhs))
6220 changed = true;
6221 tree *lhs = gimple_assign_lhs_ptr (stmt);
6222 if (REFERENCE_CLASS_P (*lhs)
6223 && maybe_canonicalize_mem_ref_addr (lhs))
6224 changed = true;
911b6338
AP
6225 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6226 This cannot be done in maybe_canonicalize_mem_ref_addr
6227 as the gimple now has two operands rather than one.
6228 The same reason why this can't be done in
6229 maybe_canonicalize_mem_ref_addr is the same reason why
6230 this can't be done inplace. */
6231 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6232 {
6233 tree inner = TREE_OPERAND (*rhs, 0);
6234 if (TREE_CODE (inner) == MEM_REF
6235 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6236 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6237 {
6238 tree ptr = TREE_OPERAND (inner, 0);
6239 tree addon = TREE_OPERAND (inner, 1);
6240 addon = fold_convert (sizetype, addon);
6241 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6242 ptr, addon);
6243 changed = true;
6244 stmt = gsi_stmt (*gsi);
6245 }
6246 }
040292e7 6247 }
89a79e96
RB
6248 else
6249 {
6250 /* Canonicalize operand order. */
6251 enum tree_code code = gimple_assign_rhs_code (stmt);
6252 if (TREE_CODE_CLASS (code) == tcc_comparison
6253 || commutative_tree_code (code)
6254 || commutative_ternary_tree_code (code))
6255 {
6256 tree rhs1 = gimple_assign_rhs1 (stmt);
6257 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 6258 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
6259 {
6260 gimple_assign_set_rhs1 (stmt, rhs2);
6261 gimple_assign_set_rhs2 (stmt, rhs1);
6262 if (TREE_CODE_CLASS (code) == tcc_comparison)
6263 gimple_assign_set_rhs_code (stmt,
6264 swap_tree_comparison (code));
6265 changed = true;
6266 }
6267 }
6268 }
040292e7
RB
6269 break;
6270 case GIMPLE_CALL:
6271 {
e9fff24c
RS
6272 gcall *call = as_a<gcall *> (stmt);
6273 for (i = 0; i < gimple_call_num_args (call); ++i)
040292e7 6274 {
e9fff24c 6275 tree *arg = gimple_call_arg_ptr (call, i);
040292e7
RB
6276 if (REFERENCE_CLASS_P (*arg)
6277 && maybe_canonicalize_mem_ref_addr (arg))
6278 changed = true;
6279 }
e9fff24c 6280 tree *lhs = gimple_call_lhs_ptr (call);
040292e7
RB
6281 if (*lhs
6282 && REFERENCE_CLASS_P (*lhs)
6283 && maybe_canonicalize_mem_ref_addr (lhs))
6284 changed = true;
e9fff24c
RS
6285 if (*lhs)
6286 {
6287 combined_fn cfn = gimple_call_combined_fn (call);
6288 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6289 int opno = first_commutative_argument (ifn);
6290 if (opno >= 0)
6291 {
6292 tree arg1 = gimple_call_arg (call, opno);
6293 tree arg2 = gimple_call_arg (call, opno + 1);
6294 if (tree_swap_operands_p (arg1, arg2))
6295 {
6296 gimple_call_set_arg (call, opno, arg2);
6297 gimple_call_set_arg (call, opno + 1, arg1);
6298 changed = true;
6299 }
6300 }
6301 }
040292e7
RB
6302 break;
6303 }
6304 case GIMPLE_ASM:
6305 {
538dd0b7
DM
6306 gasm *asm_stmt = as_a <gasm *> (stmt);
6307 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 6308 {
538dd0b7 6309 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
6310 tree op = TREE_VALUE (link);
6311 if (REFERENCE_CLASS_P (op)
6312 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6313 changed = true;
6314 }
538dd0b7 6315 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 6316 {
538dd0b7 6317 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
6318 tree op = TREE_VALUE (link);
6319 if ((REFERENCE_CLASS_P (op)
6320 || TREE_CODE (op) == ADDR_EXPR)
6321 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6322 changed = true;
6323 }
6324 }
6325 break;
6326 case GIMPLE_DEBUG:
6327 if (gimple_debug_bind_p (stmt))
6328 {
6329 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6330 if (*val
6331 && (REFERENCE_CLASS_P (*val)
6332 || TREE_CODE (*val) == ADDR_EXPR)
fabe0ede 6333 && maybe_canonicalize_mem_ref_addr (val, true))
040292e7
RB
6334 changed = true;
6335 }
6336 break;
89a79e96
RB
6337 case GIMPLE_COND:
6338 {
6339 /* Canonicalize operand order. */
6340 tree lhs = gimple_cond_lhs (stmt);
6341 tree rhs = gimple_cond_rhs (stmt);
14e72812 6342 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
6343 {
6344 gcond *gc = as_a <gcond *> (stmt);
6345 gimple_cond_set_lhs (gc, rhs);
6346 gimple_cond_set_rhs (gc, lhs);
6347 gimple_cond_set_code (gc,
6348 swap_tree_comparison (gimple_cond_code (gc)));
6349 changed = true;
6350 }
6351 }
040292e7
RB
6352 default:;
6353 }
6354
e0ee10ed
RB
6355 /* Dispatch to pattern-based folding. */
6356 if (!inplace
6357 || is_gimple_assign (stmt)
6358 || gimple_code (stmt) == GIMPLE_COND)
6359 {
6360 gimple_seq seq = NULL;
5d75ad95
RS
6361 gimple_match_op res_op;
6362 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 6363 valueize, valueize))
e0ee10ed 6364 {
5d75ad95 6365 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
6366 changed = true;
6367 else
6368 gimple_seq_discard (seq);
6369 }
6370 }
6371
6372 stmt = gsi_stmt (*gsi);
6373
cbdd87d4
RG
6374 /* Fold the main computation performed by the statement. */
6375 switch (gimple_code (stmt))
6376 {
6377 case GIMPLE_ASSIGN:
6378 {
819ec64c
RB
6379 /* Try to canonicalize for boolean-typed X the comparisons
6380 X == 0, X == 1, X != 0, and X != 1. */
6381 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6382 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 6383 {
819ec64c
RB
6384 tree lhs = gimple_assign_lhs (stmt);
6385 tree op1 = gimple_assign_rhs1 (stmt);
6386 tree op2 = gimple_assign_rhs2 (stmt);
6387 tree type = TREE_TYPE (op1);
6388
6389 /* Check whether the comparison operands are of the same boolean
6390 type as the result type is.
6391 Check that second operand is an integer-constant with value
6392 one or zero. */
6393 if (TREE_CODE (op2) == INTEGER_CST
6394 && (integer_zerop (op2) || integer_onep (op2))
6395 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6396 {
6397 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6398 bool is_logical_not = false;
6399
6400 /* X == 0 and X != 1 is a logical-not.of X
6401 X == 1 and X != 0 is X */
6402 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6403 || (cmp_code == NE_EXPR && integer_onep (op2)))
6404 is_logical_not = true;
6405
6406 if (is_logical_not == false)
6407 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6408 /* Only for one-bit precision typed X the transformation
6409 !X -> ~X is valied. */
6410 else if (TYPE_PRECISION (type) == 1)
6411 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6412 /* Otherwise we use !X -> X ^ 1. */
6413 else
6414 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6415 build_int_cst (type, 1));
6416 changed = true;
6417 break;
6418 }
5fbcc0ed 6419 }
819ec64c
RB
6420
6421 unsigned old_num_ops = gimple_num_ops (stmt);
6422 tree lhs = gimple_assign_lhs (stmt);
6423 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
6424 if (new_rhs
6425 && !useless_type_conversion_p (TREE_TYPE (lhs),
6426 TREE_TYPE (new_rhs)))
6427 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6428 if (new_rhs
6429 && (!inplace
6430 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6431 {
6432 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6433 changed = true;
6434 }
6435 break;
6436 }
6437
cbdd87d4 6438 case GIMPLE_CALL:
ceeffab0 6439 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
6440 break;
6441
bd422c4a
RG
6442 case GIMPLE_DEBUG:
6443 if (gimple_debug_bind_p (stmt))
6444 {
6445 tree val = gimple_debug_bind_get_value (stmt);
a4c2e62d 6446 if (val && REFERENCE_CLASS_P (val))
bd422c4a 6447 {
0bf8cd9d 6448 tree tem = maybe_fold_reference (val);
bd422c4a
RG
6449 if (tem)
6450 {
6451 gimple_debug_bind_set_value (stmt, tem);
6452 changed = true;
6453 }
6454 }
6455 }
6456 break;
6457
cfe3d653
PK
6458 case GIMPLE_RETURN:
6459 {
6460 greturn *ret_stmt = as_a<greturn *> (stmt);
6461 tree ret = gimple_return_retval(ret_stmt);
6462
6463 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6464 {
6465 tree val = valueize (ret);
1af928db
RB
6466 if (val && val != ret
6467 && may_propagate_copy (ret, val))
cfe3d653
PK
6468 {
6469 gimple_return_set_retval (ret_stmt, val);
6470 changed = true;
6471 }
6472 }
6473 }
6474 break;
6475
cbdd87d4
RG
6476 default:;
6477 }
6478
6479 stmt = gsi_stmt (*gsi);
6480
a8b85ce9 6481 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
6482 return changed;
6483}
6484
e0ee10ed
RB
6485/* Valueziation callback that ends up not following SSA edges. */
6486
6487tree
6488no_follow_ssa_edges (tree)
6489{
6490 return NULL_TREE;
6491}
6492
45cc9f96
RB
6493/* Valueization callback that ends up following single-use SSA edges only. */
6494
6495tree
6496follow_single_use_edges (tree val)
6497{
6498 if (TREE_CODE (val) == SSA_NAME
6499 && !has_single_use (val))
6500 return NULL_TREE;
6501 return val;
6502}
6503
c566cc9f
RS
6504/* Valueization callback that follows all SSA edges. */
6505
6506tree
6507follow_all_ssa_edges (tree val)
6508{
6509 return val;
6510}
6511
cbdd87d4
RG
6512/* Fold the statement pointed to by GSI. In some cases, this function may
6513 replace the whole statement with a new one. Returns true iff folding
6514 makes any changes.
6515 The statement pointed to by GSI should be in valid gimple form but may
6516 be in unfolded state as resulting from for example constant propagation
6517 which can produce *&x = 0. */
6518
6519bool
6520fold_stmt (gimple_stmt_iterator *gsi)
6521{
e0ee10ed
RB
6522 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6523}
6524
6525bool
6526fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6527{
6528 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
6529}
6530
59401b92 6531/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
6532 *&x created by constant propagation are handled. The statement cannot
6533 be replaced with a new one. Return true if the statement was
6534 changed, false otherwise.
59401b92 6535 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
6536 be in unfolded state as resulting from for example constant propagation
6537 which can produce *&x = 0. */
6538
6539bool
59401b92 6540fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 6541{
355fe088 6542 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 6543 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 6544 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
6545 return changed;
6546}
6547
e89065a1
SL
6548/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6549 if EXPR is null or we don't know how.
6550 If non-null, the result always has boolean type. */
6551
6552static tree
6553canonicalize_bool (tree expr, bool invert)
6554{
6555 if (!expr)
6556 return NULL_TREE;
6557 else if (invert)
6558 {
6559 if (integer_nonzerop (expr))
6560 return boolean_false_node;
6561 else if (integer_zerop (expr))
6562 return boolean_true_node;
6563 else if (TREE_CODE (expr) == SSA_NAME)
6564 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6565 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6566 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6567 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6568 boolean_type_node,
6569 TREE_OPERAND (expr, 0),
6570 TREE_OPERAND (expr, 1));
6571 else
6572 return NULL_TREE;
6573 }
6574 else
6575 {
6576 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6577 return expr;
6578 if (integer_nonzerop (expr))
6579 return boolean_true_node;
6580 else if (integer_zerop (expr))
6581 return boolean_false_node;
6582 else if (TREE_CODE (expr) == SSA_NAME)
6583 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6584 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6585 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6586 return fold_build2 (TREE_CODE (expr),
6587 boolean_type_node,
6588 TREE_OPERAND (expr, 0),
6589 TREE_OPERAND (expr, 1));
6590 else
6591 return NULL_TREE;
6592 }
6593}
6594
6595/* Check to see if a boolean expression EXPR is logically equivalent to the
6596 comparison (OP1 CODE OP2). Check for various identities involving
6597 SSA_NAMEs. */
6598
6599static bool
6600same_bool_comparison_p (const_tree expr, enum tree_code code,
6601 const_tree op1, const_tree op2)
6602{
355fe088 6603 gimple *s;
e89065a1
SL
6604
6605 /* The obvious case. */
6606 if (TREE_CODE (expr) == code
6607 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6608 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6609 return true;
6610
6611 /* Check for comparing (name, name != 0) and the case where expr
6612 is an SSA_NAME with a definition matching the comparison. */
6613 if (TREE_CODE (expr) == SSA_NAME
6614 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6615 {
6616 if (operand_equal_p (expr, op1, 0))
6617 return ((code == NE_EXPR && integer_zerop (op2))
6618 || (code == EQ_EXPR && integer_nonzerop (op2)));
6619 s = SSA_NAME_DEF_STMT (expr);
6620 if (is_gimple_assign (s)
6621 && gimple_assign_rhs_code (s) == code
6622 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6623 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6624 return true;
6625 }
6626
6627 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6628 of name is a comparison, recurse. */
6629 if (TREE_CODE (op1) == SSA_NAME
6630 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6631 {
6632 s = SSA_NAME_DEF_STMT (op1);
6633 if (is_gimple_assign (s)
6634 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6635 {
6636 enum tree_code c = gimple_assign_rhs_code (s);
6637 if ((c == NE_EXPR && integer_zerop (op2))
6638 || (c == EQ_EXPR && integer_nonzerop (op2)))
6639 return same_bool_comparison_p (expr, c,
6640 gimple_assign_rhs1 (s),
6641 gimple_assign_rhs2 (s));
6642 if ((c == EQ_EXPR && integer_zerop (op2))
6643 || (c == NE_EXPR && integer_nonzerop (op2)))
6644 return same_bool_comparison_p (expr,
6645 invert_tree_comparison (c, false),
6646 gimple_assign_rhs1 (s),
6647 gimple_assign_rhs2 (s));
6648 }
6649 }
6650 return false;
6651}
6652
6653/* Check to see if two boolean expressions OP1 and OP2 are logically
6654 equivalent. */
6655
6656static bool
6657same_bool_result_p (const_tree op1, const_tree op2)
6658{
6659 /* Simple cases first. */
6660 if (operand_equal_p (op1, op2, 0))
6661 return true;
6662
6663 /* Check the cases where at least one of the operands is a comparison.
6664 These are a bit smarter than operand_equal_p in that they apply some
6665 identifies on SSA_NAMEs. */
98209db3 6666 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
6667 && same_bool_comparison_p (op1, TREE_CODE (op2),
6668 TREE_OPERAND (op2, 0),
6669 TREE_OPERAND (op2, 1)))
6670 return true;
98209db3 6671 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
6672 && same_bool_comparison_p (op2, TREE_CODE (op1),
6673 TREE_OPERAND (op1, 0),
6674 TREE_OPERAND (op1, 1)))
6675 return true;
6676
6677 /* Default case. */
6678 return false;
6679}
6680
6681/* Forward declarations for some mutually recursive functions. */
6682
6683static tree
5f487a34 6684and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
fc8d9e44 6685 enum tree_code code2, tree op2a, tree op2b, basic_block);
e89065a1 6686static tree
5f487a34 6687and_var_with_comparison (tree type, tree var, bool invert,
fc8d9e44
RB
6688 enum tree_code code2, tree op2a, tree op2b,
6689 basic_block);
e89065a1 6690static tree
5f487a34 6691and_var_with_comparison_1 (tree type, gimple *stmt,
fc8d9e44
RB
6692 enum tree_code code2, tree op2a, tree op2b,
6693 basic_block);
e89065a1 6694static tree
5f487a34 6695or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
fc8d9e44
RB
6696 enum tree_code code2, tree op2a, tree op2b,
6697 basic_block);
e89065a1 6698static tree
5f487a34 6699or_var_with_comparison (tree, tree var, bool invert,
fc8d9e44
RB
6700 enum tree_code code2, tree op2a, tree op2b,
6701 basic_block);
e89065a1 6702static tree
5f487a34 6703or_var_with_comparison_1 (tree, gimple *stmt,
fc8d9e44
RB
6704 enum tree_code code2, tree op2a, tree op2b,
6705 basic_block);
e89065a1
SL
6706
6707/* Helper function for and_comparisons_1: try to simplify the AND of the
6708 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6709 If INVERT is true, invert the value of the VAR before doing the AND.
6710 Return NULL_EXPR if we can't simplify this to a single expression. */
6711
6712static tree
5f487a34 6713and_var_with_comparison (tree type, tree var, bool invert,
fc8d9e44
RB
6714 enum tree_code code2, tree op2a, tree op2b,
6715 basic_block outer_cond_bb)
e89065a1
SL
6716{
6717 tree t;
355fe088 6718 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6719
6720 /* We can only deal with variables whose definitions are assignments. */
6721 if (!is_gimple_assign (stmt))
6722 return NULL_TREE;
6723
6724 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6725 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6726 Then we only have to consider the simpler non-inverted cases. */
6727 if (invert)
5f487a34 6728 t = or_var_with_comparison_1 (type, stmt,
e89065a1 6729 invert_tree_comparison (code2, false),
fc8d9e44 6730 op2a, op2b, outer_cond_bb);
e89065a1 6731 else
fc8d9e44
RB
6732 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6733 outer_cond_bb);
e89065a1
SL
6734 return canonicalize_bool (t, invert);
6735}
6736
6737/* Try to simplify the AND of the ssa variable defined by the assignment
6738 STMT with the comparison specified by (OP2A CODE2 OP2B).
6739 Return NULL_EXPR if we can't simplify this to a single expression. */
6740
6741static tree
5f487a34 6742and_var_with_comparison_1 (tree type, gimple *stmt,
fc8d9e44
RB
6743 enum tree_code code2, tree op2a, tree op2b,
6744 basic_block outer_cond_bb)
e89065a1
SL
6745{
6746 tree var = gimple_assign_lhs (stmt);
6747 tree true_test_var = NULL_TREE;
6748 tree false_test_var = NULL_TREE;
6749 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6750
6751 /* Check for identities like (var AND (var == 0)) => false. */
6752 if (TREE_CODE (op2a) == SSA_NAME
6753 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6754 {
6755 if ((code2 == NE_EXPR && integer_zerop (op2b))
6756 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6757 {
6758 true_test_var = op2a;
6759 if (var == true_test_var)
6760 return var;
6761 }
6762 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6763 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6764 {
6765 false_test_var = op2a;
6766 if (var == false_test_var)
6767 return boolean_false_node;
6768 }
6769 }
6770
6771 /* If the definition is a comparison, recurse on it. */
6772 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6773 {
5f487a34 6774 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
6775 gimple_assign_rhs1 (stmt),
6776 gimple_assign_rhs2 (stmt),
6777 code2,
6778 op2a,
fc8d9e44 6779 op2b, outer_cond_bb);
e89065a1
SL
6780 if (t)
6781 return t;
6782 }
6783
6784 /* If the definition is an AND or OR expression, we may be able to
6785 simplify by reassociating. */
eb9820c0
KT
6786 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6787 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6788 {
6789 tree inner1 = gimple_assign_rhs1 (stmt);
6790 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6791 gimple *s;
e89065a1
SL
6792 tree t;
6793 tree partial = NULL_TREE;
eb9820c0 6794 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
6795
6796 /* Check for boolean identities that don't require recursive examination
6797 of inner1/inner2:
6798 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6799 inner1 AND (inner1 OR inner2) => inner1
6800 !inner1 AND (inner1 AND inner2) => false
6801 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6802 Likewise for similar cases involving inner2. */
6803 if (inner1 == true_test_var)
6804 return (is_and ? var : inner1);
6805 else if (inner2 == true_test_var)
6806 return (is_and ? var : inner2);
6807 else if (inner1 == false_test_var)
6808 return (is_and
6809 ? boolean_false_node
5f487a34 6810 : and_var_with_comparison (type, inner2, false, code2, op2a,
fc8d9e44 6811 op2b, outer_cond_bb));
e89065a1
SL
6812 else if (inner2 == false_test_var)
6813 return (is_and
6814 ? boolean_false_node
5f487a34 6815 : and_var_with_comparison (type, inner1, false, code2, op2a,
fc8d9e44 6816 op2b, outer_cond_bb));
e89065a1
SL
6817
6818 /* Next, redistribute/reassociate the AND across the inner tests.
6819 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6820 if (TREE_CODE (inner1) == SSA_NAME
6821 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6822 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6823 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6824 gimple_assign_rhs1 (s),
6825 gimple_assign_rhs2 (s),
fc8d9e44
RB
6826 code2, op2a, op2b,
6827 outer_cond_bb)))
e89065a1
SL
6828 {
6829 /* Handle the AND case, where we are reassociating:
6830 (inner1 AND inner2) AND (op2a code2 op2b)
6831 => (t AND inner2)
6832 If the partial result t is a constant, we win. Otherwise
6833 continue on to try reassociating with the other inner test. */
6834 if (is_and)
6835 {
6836 if (integer_onep (t))
6837 return inner2;
6838 else if (integer_zerop (t))
6839 return boolean_false_node;
6840 }
6841
6842 /* Handle the OR case, where we are redistributing:
6843 (inner1 OR inner2) AND (op2a code2 op2b)
6844 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
6845 else if (integer_onep (t))
6846 return boolean_true_node;
6847
6848 /* Save partial result for later. */
6849 partial = t;
e89065a1
SL
6850 }
6851
6852 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6853 if (TREE_CODE (inner2) == SSA_NAME
6854 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6855 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6856 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6857 gimple_assign_rhs1 (s),
6858 gimple_assign_rhs2 (s),
fc8d9e44
RB
6859 code2, op2a, op2b,
6860 outer_cond_bb)))
e89065a1
SL
6861 {
6862 /* Handle the AND case, where we are reassociating:
6863 (inner1 AND inner2) AND (op2a code2 op2b)
6864 => (inner1 AND t) */
6865 if (is_and)
6866 {
6867 if (integer_onep (t))
6868 return inner1;
6869 else if (integer_zerop (t))
6870 return boolean_false_node;
8236c8eb
JJ
6871 /* If both are the same, we can apply the identity
6872 (x AND x) == x. */
6873 else if (partial && same_bool_result_p (t, partial))
6874 return t;
e89065a1
SL
6875 }
6876
6877 /* Handle the OR case. where we are redistributing:
6878 (inner1 OR inner2) AND (op2a code2 op2b)
6879 => (t OR (inner1 AND (op2a code2 op2b)))
6880 => (t OR partial) */
6881 else
6882 {
6883 if (integer_onep (t))
6884 return boolean_true_node;
6885 else if (partial)
6886 {
6887 /* We already got a simplification for the other
6888 operand to the redistributed OR expression. The
6889 interesting case is when at least one is false.
6890 Or, if both are the same, we can apply the identity
6891 (x OR x) == x. */
6892 if (integer_zerop (partial))
6893 return t;
6894 else if (integer_zerop (t))
6895 return partial;
6896 else if (same_bool_result_p (t, partial))
6897 return t;
6898 }
6899 }
6900 }
6901 }
6902 return NULL_TREE;
6903}
6904
6905/* Try to simplify the AND of two comparisons defined by
6906 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6907 If this can be done without constructing an intermediate value,
6908 return the resulting tree; otherwise NULL_TREE is returned.
6909 This function is deliberately asymmetric as it recurses on SSA_DEFs
6910 in the first comparison but not the second. */
6911
6912static tree
5f487a34 6913and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
fc8d9e44
RB
6914 enum tree_code code2, tree op2a, tree op2b,
6915 basic_block outer_cond_bb)
e89065a1 6916{
ae22ac3c 6917 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6918
e89065a1
SL
6919 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6920 if (operand_equal_p (op1a, op2a, 0)
6921 && operand_equal_p (op1b, op2b, 0))
6922 {
eb9820c0 6923 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6924 tree t = combine_comparisons (UNKNOWN_LOCATION,
6925 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 6926 truth_type, op1a, op1b);
e89065a1
SL
6927 if (t)
6928 return t;
6929 }
6930
6931 /* Likewise the swapped case of the above. */
6932 if (operand_equal_p (op1a, op2b, 0)
6933 && operand_equal_p (op1b, op2a, 0))
6934 {
eb9820c0 6935 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6936 tree t = combine_comparisons (UNKNOWN_LOCATION,
6937 TRUTH_ANDIF_EXPR, code1,
6938 swap_tree_comparison (code2),
31ed6226 6939 truth_type, op1a, op1b);
e89065a1
SL
6940 if (t)
6941 return t;
6942 }
6943
e89065a1
SL
6944 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6945 NAME's definition is a truth value. See if there are any simplifications
6946 that can be done against the NAME's definition. */
6947 if (TREE_CODE (op1a) == SSA_NAME
6948 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6949 && (integer_zerop (op1b) || integer_onep (op1b)))
6950 {
6951 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6952 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6953 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6954 switch (gimple_code (stmt))
6955 {
6956 case GIMPLE_ASSIGN:
6957 /* Try to simplify by copy-propagating the definition. */
5f487a34 6958 return and_var_with_comparison (type, op1a, invert, code2, op2a,
fc8d9e44 6959 op2b, outer_cond_bb);
e89065a1
SL
6960
6961 case GIMPLE_PHI:
6962 /* If every argument to the PHI produces the same result when
6963 ANDed with the second comparison, we win.
6964 Do not do this unless the type is bool since we need a bool
6965 result here anyway. */
6966 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6967 {
6968 tree result = NULL_TREE;
6969 unsigned i;
6970 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6971 {
6972 tree arg = gimple_phi_arg_def (stmt, i);
6973
6974 /* If this PHI has itself as an argument, ignore it.
6975 If all the other args produce the same result,
6976 we're still OK. */
6977 if (arg == gimple_phi_result (stmt))
6978 continue;
6979 else if (TREE_CODE (arg) == INTEGER_CST)
6980 {
6981 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6982 {
6983 if (!result)
6984 result = boolean_false_node;
6985 else if (!integer_zerop (result))
6986 return NULL_TREE;
6987 }
6988 else if (!result)
6989 result = fold_build2 (code2, boolean_type_node,
6990 op2a, op2b);
6991 else if (!same_bool_comparison_p (result,
6992 code2, op2a, op2b))
6993 return NULL_TREE;
6994 }
0e8b84ec
JJ
6995 else if (TREE_CODE (arg) == SSA_NAME
6996 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6997 {
6c66f733 6998 tree temp;
355fe088 6999 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
7000 /* In simple cases we can look through PHI nodes,
7001 but we have to be careful with loops.
7002 See PR49073. */
7003 if (! dom_info_available_p (CDI_DOMINATORS)
7004 || gimple_bb (def_stmt) == gimple_bb (stmt)
7005 || dominated_by_p (CDI_DOMINATORS,
7006 gimple_bb (def_stmt),
7007 gimple_bb (stmt)))
7008 return NULL_TREE;
5f487a34 7009 temp = and_var_with_comparison (type, arg, invert, code2,
fc8d9e44
RB
7010 op2a, op2b,
7011 outer_cond_bb);
e89065a1
SL
7012 if (!temp)
7013 return NULL_TREE;
7014 else if (!result)
7015 result = temp;
7016 else if (!same_bool_result_p (result, temp))
7017 return NULL_TREE;
7018 }
7019 else
7020 return NULL_TREE;
7021 }
7022 return result;
7023 }
7024
7025 default:
7026 break;
7027 }
7028 }
7029 return NULL_TREE;
7030}
7031
fc8d9e44 7032static basic_block fosa_bb;
a86d5eca 7033static vec<std::pair<tree, flow_sensitive_info_storage> > *fosa_unwind;
fc8d9e44
RB
7034static tree
7035follow_outer_ssa_edges (tree val)
7036{
7037 if (TREE_CODE (val) == SSA_NAME
7038 && !SSA_NAME_IS_DEFAULT_DEF (val))
7039 {
7040 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
7041 if (!def_bb
7042 || def_bb == fosa_bb
7043 || (dom_info_available_p (CDI_DOMINATORS)
7044 && (def_bb == fosa_bb
7045 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
7046 return val;
4b3874d8
RB
7047 /* We cannot temporarily rewrite stmts with undefined overflow
7048 behavior, so avoid expanding them. */
7049 if ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (val))
7050 || POINTER_TYPE_P (TREE_TYPE (val)))
7051 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (val)))
7052 return NULL_TREE;
a86d5eca
AP
7053 flow_sensitive_info_storage storage;
7054 storage.save_and_clear (val);
4b3874d8
RB
7055 /* If the definition does not dominate fosa_bb temporarily reset
7056 flow-sensitive info. */
a86d5eca 7057 fosa_unwind->safe_push (std::make_pair (val, storage));
4b3874d8 7058 return val;
fc8d9e44
RB
7059 }
7060 return val;
7061}
7062
5f487a34
LJH
7063/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
7064 : try to simplify the AND/OR of the ssa variable VAR with the comparison
7065 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
7066 simplify this to a single expression. As we are going to lower the cost
7067 of building SSA names / gimple stmts significantly, we need to allocate
7068 them ont the stack. This will cause the code to be a bit ugly. */
7069
7070static tree
7071maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
7072 enum tree_code code1,
7073 tree op1a, tree op1b,
7074 enum tree_code code2, tree op2a,
fc8d9e44
RB
7075 tree op2b,
7076 basic_block outer_cond_bb)
5f487a34
LJH
7077{
7078 /* Allocate gimple stmt1 on the stack. */
7079 gassign *stmt1
7080 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7081 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
7082 gimple_assign_set_rhs_code (stmt1, code1);
7083 gimple_assign_set_rhs1 (stmt1, op1a);
7084 gimple_assign_set_rhs2 (stmt1, op1b);
fc8d9e44 7085 gimple_set_bb (stmt1, NULL);
5f487a34
LJH
7086
7087 /* Allocate gimple stmt2 on the stack. */
7088 gassign *stmt2
7089 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
7090 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
7091 gimple_assign_set_rhs_code (stmt2, code2);
7092 gimple_assign_set_rhs1 (stmt2, op2a);
7093 gimple_assign_set_rhs2 (stmt2, op2b);
fc8d9e44 7094 gimple_set_bb (stmt2, NULL);
5f487a34
LJH
7095
7096 /* Allocate SSA names(lhs1) on the stack. */
b99353c3
JJ
7097 alignas (tree_node) unsigned char lhs1buf[sizeof (tree_ssa_name)];
7098 tree lhs1 = (tree) &lhs1buf[0];
5f487a34
LJH
7099 memset (lhs1, 0, sizeof (tree_ssa_name));
7100 TREE_SET_CODE (lhs1, SSA_NAME);
7101 TREE_TYPE (lhs1) = type;
7102 init_ssa_name_imm_use (lhs1);
7103
7104 /* Allocate SSA names(lhs2) on the stack. */
b99353c3
JJ
7105 alignas (tree_node) unsigned char lhs2buf[sizeof (tree_ssa_name)];
7106 tree lhs2 = (tree) &lhs2buf[0];
5f487a34
LJH
7107 memset (lhs2, 0, sizeof (tree_ssa_name));
7108 TREE_SET_CODE (lhs2, SSA_NAME);
7109 TREE_TYPE (lhs2) = type;
7110 init_ssa_name_imm_use (lhs2);
7111
7112 gimple_assign_set_lhs (stmt1, lhs1);
7113 gimple_assign_set_lhs (stmt2, lhs2);
7114
7115 gimple_match_op op (gimple_match_cond::UNCOND, code,
7116 type, gimple_assign_lhs (stmt1),
7117 gimple_assign_lhs (stmt2));
fc8d9e44 7118 fosa_bb = outer_cond_bb;
a86d5eca 7119 auto_vec<std::pair<tree, flow_sensitive_info_storage>, 8> unwind_stack;
4b3874d8 7120 fosa_unwind = &unwind_stack;
fc8d9e44
RB
7121 if (op.resimplify (NULL, (!outer_cond_bb
7122 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
5f487a34 7123 {
4b3874d8
RB
7124 fosa_unwind = NULL;
7125 for (auto p : unwind_stack)
a86d5eca 7126 p.second.restore (p.first);
5f487a34
LJH
7127 if (gimple_simplified_result_is_gimple_val (&op))
7128 {
7129 tree res = op.ops[0];
7130 if (res == lhs1)
7131 return build2 (code1, type, op1a, op1b);
7132 else if (res == lhs2)
7133 return build2 (code2, type, op2a, op2b);
7134 else
7135 return res;
7136 }
ae9c3507
ML
7137 else if (op.code.is_tree_code ()
7138 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
7139 {
7140 tree op0 = op.ops[0];
7141 tree op1 = op.ops[1];
7142 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
7143 return NULL_TREE; /* not simple */
7144
7145 return build2 ((enum tree_code)op.code, op.type, op0, op1);
7146 }
5f487a34 7147 }
4b3874d8
RB
7148 fosa_unwind = NULL;
7149 for (auto p : unwind_stack)
a86d5eca 7150 p.second.restore (p.first);
5f487a34
LJH
7151
7152 return NULL_TREE;
7153}
7154
e89065a1
SL
7155/* Try to simplify the AND of two comparisons, specified by
7156 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7157 If this can be simplified to a single expression (without requiring
7158 introducing more SSA variables to hold intermediate values),
7159 return the resulting tree. Otherwise return NULL_TREE.
7160 If the result expression is non-null, it has boolean type. */
7161
7162tree
5f487a34
LJH
7163maybe_fold_and_comparisons (tree type,
7164 enum tree_code code1, tree op1a, tree op1b,
fc8d9e44
RB
7165 enum tree_code code2, tree op2a, tree op2b,
7166 basic_block outer_cond_bb)
e89065a1 7167{
fc8d9e44
RB
7168 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7169 outer_cond_bb))
e89065a1 7170 return t;
5f487a34 7171
fc8d9e44
RB
7172 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7173 outer_cond_bb))
5f487a34
LJH
7174 return t;
7175
7176 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7177 op1a, op1b, code2, op2a,
fc8d9e44 7178 op2b, outer_cond_bb))
5f487a34
LJH
7179 return t;
7180
7181 return NULL_TREE;
e89065a1
SL
7182}
7183
7184/* Helper function for or_comparisons_1: try to simplify the OR of the
7185 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7186 If INVERT is true, invert the value of VAR before doing the OR.
7187 Return NULL_EXPR if we can't simplify this to a single expression. */
7188
7189static tree
5f487a34 7190or_var_with_comparison (tree type, tree var, bool invert,
fc8d9e44
RB
7191 enum tree_code code2, tree op2a, tree op2b,
7192 basic_block outer_cond_bb)
e89065a1
SL
7193{
7194 tree t;
355fe088 7195 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
7196
7197 /* We can only deal with variables whose definitions are assignments. */
7198 if (!is_gimple_assign (stmt))
7199 return NULL_TREE;
7200
7201 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7202 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7203 Then we only have to consider the simpler non-inverted cases. */
7204 if (invert)
5f487a34 7205 t = and_var_with_comparison_1 (type, stmt,
e89065a1 7206 invert_tree_comparison (code2, false),
fc8d9e44 7207 op2a, op2b, outer_cond_bb);
e89065a1 7208 else
fc8d9e44
RB
7209 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7210 outer_cond_bb);
e89065a1
SL
7211 return canonicalize_bool (t, invert);
7212}
7213
7214/* Try to simplify the OR of the ssa variable defined by the assignment
7215 STMT with the comparison specified by (OP2A CODE2 OP2B).
7216 Return NULL_EXPR if we can't simplify this to a single expression. */
7217
7218static tree
5f487a34 7219or_var_with_comparison_1 (tree type, gimple *stmt,
fc8d9e44
RB
7220 enum tree_code code2, tree op2a, tree op2b,
7221 basic_block outer_cond_bb)
e89065a1
SL
7222{
7223 tree var = gimple_assign_lhs (stmt);
7224 tree true_test_var = NULL_TREE;
7225 tree false_test_var = NULL_TREE;
7226 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7227
7228 /* Check for identities like (var OR (var != 0)) => true . */
7229 if (TREE_CODE (op2a) == SSA_NAME
7230 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7231 {
7232 if ((code2 == NE_EXPR && integer_zerop (op2b))
7233 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7234 {
7235 true_test_var = op2a;
7236 if (var == true_test_var)
7237 return var;
7238 }
7239 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7240 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7241 {
7242 false_test_var = op2a;
7243 if (var == false_test_var)
7244 return boolean_true_node;
7245 }
7246 }
7247
7248 /* If the definition is a comparison, recurse on it. */
7249 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7250 {
5f487a34 7251 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
7252 gimple_assign_rhs1 (stmt),
7253 gimple_assign_rhs2 (stmt),
fc8d9e44 7254 code2, op2a, op2b, outer_cond_bb);
e89065a1
SL
7255 if (t)
7256 return t;
7257 }
7258
7259 /* If the definition is an AND or OR expression, we may be able to
7260 simplify by reassociating. */
eb9820c0
KT
7261 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7262 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
7263 {
7264 tree inner1 = gimple_assign_rhs1 (stmt);
7265 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 7266 gimple *s;
e89065a1
SL
7267 tree t;
7268 tree partial = NULL_TREE;
eb9820c0 7269 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
7270
7271 /* Check for boolean identities that don't require recursive examination
7272 of inner1/inner2:
7273 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7274 inner1 OR (inner1 AND inner2) => inner1
7275 !inner1 OR (inner1 OR inner2) => true
7276 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7277 */
7278 if (inner1 == true_test_var)
7279 return (is_or ? var : inner1);
7280 else if (inner2 == true_test_var)
7281 return (is_or ? var : inner2);
7282 else if (inner1 == false_test_var)
7283 return (is_or
7284 ? boolean_true_node
5f487a34 7285 : or_var_with_comparison (type, inner2, false, code2, op2a,
fc8d9e44 7286 op2b, outer_cond_bb));
e89065a1
SL
7287 else if (inner2 == false_test_var)
7288 return (is_or
7289 ? boolean_true_node
5f487a34 7290 : or_var_with_comparison (type, inner1, false, code2, op2a,
fc8d9e44 7291 op2b, outer_cond_bb));
e89065a1
SL
7292
7293 /* Next, redistribute/reassociate the OR across the inner tests.
7294 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7295 if (TREE_CODE (inner1) == SSA_NAME
7296 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7297 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7298 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7299 gimple_assign_rhs1 (s),
7300 gimple_assign_rhs2 (s),
fc8d9e44
RB
7301 code2, op2a, op2b,
7302 outer_cond_bb)))
e89065a1
SL
7303 {
7304 /* Handle the OR case, where we are reassociating:
7305 (inner1 OR inner2) OR (op2a code2 op2b)
7306 => (t OR inner2)
7307 If the partial result t is a constant, we win. Otherwise
7308 continue on to try reassociating with the other inner test. */
8236c8eb 7309 if (is_or)
e89065a1
SL
7310 {
7311 if (integer_onep (t))
7312 return boolean_true_node;
7313 else if (integer_zerop (t))
7314 return inner2;
7315 }
7316
7317 /* Handle the AND case, where we are redistributing:
7318 (inner1 AND inner2) OR (op2a code2 op2b)
7319 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
7320 else if (integer_zerop (t))
7321 return boolean_false_node;
7322
7323 /* Save partial result for later. */
7324 partial = t;
e89065a1
SL
7325 }
7326
7327 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7328 if (TREE_CODE (inner2) == SSA_NAME
7329 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7330 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7331 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7332 gimple_assign_rhs1 (s),
7333 gimple_assign_rhs2 (s),
fc8d9e44
RB
7334 code2, op2a, op2b,
7335 outer_cond_bb)))
e89065a1
SL
7336 {
7337 /* Handle the OR case, where we are reassociating:
7338 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
7339 => (inner1 OR t)
7340 => (t OR partial) */
7341 if (is_or)
e89065a1
SL
7342 {
7343 if (integer_zerop (t))
7344 return inner1;
7345 else if (integer_onep (t))
7346 return boolean_true_node;
8236c8eb
JJ
7347 /* If both are the same, we can apply the identity
7348 (x OR x) == x. */
7349 else if (partial && same_bool_result_p (t, partial))
7350 return t;
e89065a1
SL
7351 }
7352
7353 /* Handle the AND case, where we are redistributing:
7354 (inner1 AND inner2) OR (op2a code2 op2b)
7355 => (t AND (inner1 OR (op2a code2 op2b)))
7356 => (t AND partial) */
7357 else
7358 {
7359 if (integer_zerop (t))
7360 return boolean_false_node;
7361 else if (partial)
7362 {
7363 /* We already got a simplification for the other
7364 operand to the redistributed AND expression. The
7365 interesting case is when at least one is true.
7366 Or, if both are the same, we can apply the identity
8236c8eb 7367 (x AND x) == x. */
e89065a1
SL
7368 if (integer_onep (partial))
7369 return t;
7370 else if (integer_onep (t))
7371 return partial;
7372 else if (same_bool_result_p (t, partial))
8236c8eb 7373 return t;
e89065a1
SL
7374 }
7375 }
7376 }
7377 }
7378 return NULL_TREE;
7379}
7380
7381/* Try to simplify the OR of two comparisons defined by
7382 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7383 If this can be done without constructing an intermediate value,
7384 return the resulting tree; otherwise NULL_TREE is returned.
7385 This function is deliberately asymmetric as it recurses on SSA_DEFs
7386 in the first comparison but not the second. */
7387
7388static tree
5f487a34 7389or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
fc8d9e44
RB
7390 enum tree_code code2, tree op2a, tree op2b,
7391 basic_block outer_cond_bb)
e89065a1 7392{
ae22ac3c 7393 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 7394
e89065a1
SL
7395 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7396 if (operand_equal_p (op1a, op2a, 0)
7397 && operand_equal_p (op1b, op2b, 0))
7398 {
eb9820c0 7399 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7400 tree t = combine_comparisons (UNKNOWN_LOCATION,
7401 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 7402 truth_type, op1a, op1b);
e89065a1
SL
7403 if (t)
7404 return t;
7405 }
7406
7407 /* Likewise the swapped case of the above. */
7408 if (operand_equal_p (op1a, op2b, 0)
7409 && operand_equal_p (op1b, op2a, 0))
7410 {
eb9820c0 7411 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7412 tree t = combine_comparisons (UNKNOWN_LOCATION,
7413 TRUTH_ORIF_EXPR, code1,
7414 swap_tree_comparison (code2),
31ed6226 7415 truth_type, op1a, op1b);
e89065a1
SL
7416 if (t)
7417 return t;
7418 }
7419
e89065a1
SL
7420 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7421 NAME's definition is a truth value. See if there are any simplifications
7422 that can be done against the NAME's definition. */
7423 if (TREE_CODE (op1a) == SSA_NAME
7424 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7425 && (integer_zerop (op1b) || integer_onep (op1b)))
7426 {
7427 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7428 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 7429 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
7430 switch (gimple_code (stmt))
7431 {
7432 case GIMPLE_ASSIGN:
7433 /* Try to simplify by copy-propagating the definition. */
5f487a34 7434 return or_var_with_comparison (type, op1a, invert, code2, op2a,
fc8d9e44 7435 op2b, outer_cond_bb);
e89065a1
SL
7436
7437 case GIMPLE_PHI:
7438 /* If every argument to the PHI produces the same result when
7439 ORed with the second comparison, we win.
7440 Do not do this unless the type is bool since we need a bool
7441 result here anyway. */
7442 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7443 {
7444 tree result = NULL_TREE;
7445 unsigned i;
7446 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7447 {
7448 tree arg = gimple_phi_arg_def (stmt, i);
7449
7450 /* If this PHI has itself as an argument, ignore it.
7451 If all the other args produce the same result,
7452 we're still OK. */
7453 if (arg == gimple_phi_result (stmt))
7454 continue;
7455 else if (TREE_CODE (arg) == INTEGER_CST)
7456 {
7457 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7458 {
7459 if (!result)
7460 result = boolean_true_node;
7461 else if (!integer_onep (result))
7462 return NULL_TREE;
7463 }
7464 else if (!result)
7465 result = fold_build2 (code2, boolean_type_node,
7466 op2a, op2b);
7467 else if (!same_bool_comparison_p (result,
7468 code2, op2a, op2b))
7469 return NULL_TREE;
7470 }
0e8b84ec
JJ
7471 else if (TREE_CODE (arg) == SSA_NAME
7472 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 7473 {
6c66f733 7474 tree temp;
355fe088 7475 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
7476 /* In simple cases we can look through PHI nodes,
7477 but we have to be careful with loops.
7478 See PR49073. */
7479 if (! dom_info_available_p (CDI_DOMINATORS)
7480 || gimple_bb (def_stmt) == gimple_bb (stmt)
7481 || dominated_by_p (CDI_DOMINATORS,
7482 gimple_bb (def_stmt),
7483 gimple_bb (stmt)))
7484 return NULL_TREE;
5f487a34 7485 temp = or_var_with_comparison (type, arg, invert, code2,
fc8d9e44 7486 op2a, op2b, outer_cond_bb);
e89065a1
SL
7487 if (!temp)
7488 return NULL_TREE;
7489 else if (!result)
7490 result = temp;
7491 else if (!same_bool_result_p (result, temp))
7492 return NULL_TREE;
7493 }
7494 else
7495 return NULL_TREE;
7496 }
7497 return result;
7498 }
7499
7500 default:
7501 break;
7502 }
7503 }
7504 return NULL_TREE;
7505}
7506
7507/* Try to simplify the OR of two comparisons, specified by
7508 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7509 If this can be simplified to a single expression (without requiring
7510 introducing more SSA variables to hold intermediate values),
7511 return the resulting tree. Otherwise return NULL_TREE.
7512 If the result expression is non-null, it has boolean type. */
7513
7514tree
5f487a34
LJH
7515maybe_fold_or_comparisons (tree type,
7516 enum tree_code code1, tree op1a, tree op1b,
fc8d9e44
RB
7517 enum tree_code code2, tree op2a, tree op2b,
7518 basic_block outer_cond_bb)
e89065a1 7519{
fc8d9e44
RB
7520 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7521 outer_cond_bb))
e89065a1 7522 return t;
cfef45c8 7523
fc8d9e44
RB
7524 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7525 outer_cond_bb))
5f487a34
LJH
7526 return t;
7527
7528 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7529 op1a, op1b, code2, op2a,
fc8d9e44 7530 op2b, outer_cond_bb))
5f487a34
LJH
7531 return t;
7532
7533 return NULL_TREE;
7534}
cfef45c8
RG
7535
7536/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7537
7538 Either NULL_TREE, a simplified but non-constant or a constant
7539 is returned.
7540
7541 ??? This should go into a gimple-fold-inline.h file to be eventually
7542 privatized with the single valueize function used in the various TUs
7543 to avoid the indirect function call overhead. */
7544
7545tree
355fe088 7546gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 7547 tree (*gvalueize) (tree))
cfef45c8 7548{
5d75ad95 7549 gimple_match_op res_op;
45cc9f96
RB
7550 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7551 edges if there are intermediate VARYING defs. For this reason
7552 do not follow SSA edges here even though SCCVN can technically
7553 just deal fine with that. */
5d75ad95 7554 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 7555 {
34050b6b 7556 tree res = NULL_TREE;
5d75ad95
RS
7557 if (gimple_simplified_result_is_gimple_val (&res_op))
7558 res = res_op.ops[0];
34050b6b 7559 else if (mprts_hook)
5d75ad95 7560 res = mprts_hook (&res_op);
34050b6b 7561 if (res)
45cc9f96 7562 {
34050b6b
RB
7563 if (dump_file && dump_flags & TDF_DETAILS)
7564 {
7565 fprintf (dump_file, "Match-and-simplified ");
7566 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7567 fprintf (dump_file, " to ");
ef6cb4c7 7568 print_generic_expr (dump_file, res);
34050b6b
RB
7569 fprintf (dump_file, "\n");
7570 }
7571 return res;
45cc9f96 7572 }
45cc9f96
RB
7573 }
7574
cfef45c8
RG
7575 location_t loc = gimple_location (stmt);
7576 switch (gimple_code (stmt))
7577 {
7578 case GIMPLE_ASSIGN:
7579 {
7580 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7581
7582 switch (get_gimple_rhs_class (subcode))
7583 {
7584 case GIMPLE_SINGLE_RHS:
7585 {
7586 tree rhs = gimple_assign_rhs1 (stmt);
7587 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7588
7589 if (TREE_CODE (rhs) == SSA_NAME)
7590 {
7591 /* If the RHS is an SSA_NAME, return its known constant value,
7592 if any. */
7593 return (*valueize) (rhs);
7594 }
7595 /* Handle propagating invariant addresses into address
7596 operations. */
7597 else if (TREE_CODE (rhs) == ADDR_EXPR
7598 && !is_gimple_min_invariant (rhs))
7599 {
a90c8804 7600 poly_int64 offset = 0;
cfef45c8
RG
7601 tree base;
7602 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7603 &offset,
7604 valueize);
7605 if (base
7606 && (CONSTANT_CLASS_P (base)
7607 || decl_address_invariant_p (base)))
7608 return build_invariant_address (TREE_TYPE (rhs),
7609 base, offset);
7610 }
7611 else if (TREE_CODE (rhs) == CONSTRUCTOR
7612 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
7613 && known_eq (CONSTRUCTOR_NELTS (rhs),
7614 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 7615 {
794e3180
RS
7616 unsigned i, nelts;
7617 tree val;
cfef45c8 7618
928686b1 7619 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 7620 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
7621 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7622 {
7623 val = (*valueize) (val);
7624 if (TREE_CODE (val) == INTEGER_CST
7625 || TREE_CODE (val) == REAL_CST
7626 || TREE_CODE (val) == FIXED_CST)
794e3180 7627 vec.quick_push (val);
cfef45c8
RG
7628 else
7629 return NULL_TREE;
7630 }
7631
5ebaa477 7632 return vec.build ();
cfef45c8 7633 }
bdf37f7a
JH
7634 if (subcode == OBJ_TYPE_REF)
7635 {
7636 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7637 /* If callee is constant, we can fold away the wrapper. */
7638 if (is_gimple_min_invariant (val))
7639 return val;
7640 }
cfef45c8
RG
7641
7642 if (kind == tcc_reference)
7643 {
7644 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7645 || TREE_CODE (rhs) == REALPART_EXPR
7646 || TREE_CODE (rhs) == IMAGPART_EXPR)
7647 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7648 {
7649 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7650 return fold_unary_loc (EXPR_LOCATION (rhs),
7651 TREE_CODE (rhs),
7652 TREE_TYPE (rhs), val);
7653 }
7654 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7655 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7656 {
7657 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7658 return fold_ternary_loc (EXPR_LOCATION (rhs),
7659 TREE_CODE (rhs),
7660 TREE_TYPE (rhs), val,
7661 TREE_OPERAND (rhs, 1),
7662 TREE_OPERAND (rhs, 2));
7663 }
7664 else if (TREE_CODE (rhs) == MEM_REF
7665 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7666 {
7667 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7668 if (TREE_CODE (val) == ADDR_EXPR
7669 && is_gimple_min_invariant (val))
7670 {
7671 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7672 unshare_expr (val),
7673 TREE_OPERAND (rhs, 1));
7674 if (tem)
7675 rhs = tem;
7676 }
7677 }
7678 return fold_const_aggregate_ref_1 (rhs, valueize);
7679 }
7680 else if (kind == tcc_declaration)
7681 return get_symbol_constant_value (rhs);
7682 return rhs;
7683 }
7684
7685 case GIMPLE_UNARY_RHS:
f3582e54 7686 return NULL_TREE;
cfef45c8
RG
7687
7688 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
7689 /* Translate &x + CST into an invariant form suitable for
7690 further propagation. */
7691 if (subcode == POINTER_PLUS_EXPR)
7692 {
4b1b9e64
RB
7693 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7694 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
7695 if (TREE_CODE (op0) == ADDR_EXPR
7696 && TREE_CODE (op1) == INTEGER_CST)
7697 {
7698 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
7699 return build1_loc
7700 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
7701 fold_build2 (MEM_REF,
7702 TREE_TYPE (TREE_TYPE (op0)),
7703 unshare_expr (op0), off));
7704 }
7705 }
59c20dc7
RB
7706 /* Canonicalize bool != 0 and bool == 0 appearing after
7707 valueization. While gimple_simplify handles this
7708 it can get confused by the ~X == 1 -> X == 0 transform
7709 which we cant reduce to a SSA name or a constant
7710 (and we have no way to tell gimple_simplify to not
7711 consider those transforms in the first place). */
7712 else if (subcode == EQ_EXPR
7713 || subcode == NE_EXPR)
7714 {
7715 tree lhs = gimple_assign_lhs (stmt);
7716 tree op0 = gimple_assign_rhs1 (stmt);
7717 if (useless_type_conversion_p (TREE_TYPE (lhs),
7718 TREE_TYPE (op0)))
7719 {
7720 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7721 op0 = (*valueize) (op0);
8861704d
RB
7722 if (TREE_CODE (op0) == INTEGER_CST)
7723 std::swap (op0, op1);
7724 if (TREE_CODE (op1) == INTEGER_CST
7725 && ((subcode == NE_EXPR && integer_zerop (op1))
7726 || (subcode == EQ_EXPR && integer_onep (op1))))
7727 return op0;
59c20dc7
RB
7728 }
7729 }
4b1b9e64 7730 return NULL_TREE;
cfef45c8
RG
7731
7732 case GIMPLE_TERNARY_RHS:
7733 {
7734 /* Handle ternary operators that can appear in GIMPLE form. */
7735 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7736 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7737 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8 7738 return fold_ternary_loc (loc, subcode,
ce777eae
RB
7739 TREE_TYPE (gimple_assign_lhs (stmt)),
7740 op0, op1, op2);
cfef45c8
RG
7741 }
7742
7743 default:
7744 gcc_unreachable ();
7745 }
7746 }
7747
7748 case GIMPLE_CALL:
7749 {
25583c4f 7750 tree fn;
538dd0b7 7751 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
7752
7753 if (gimple_call_internal_p (stmt))
31e071ae
MP
7754 {
7755 enum tree_code subcode = ERROR_MARK;
7756 switch (gimple_call_internal_fn (stmt))
7757 {
7758 case IFN_UBSAN_CHECK_ADD:
7759 subcode = PLUS_EXPR;
7760 break;
7761 case IFN_UBSAN_CHECK_SUB:
7762 subcode = MINUS_EXPR;
7763 break;
7764 case IFN_UBSAN_CHECK_MUL:
7765 subcode = MULT_EXPR;
7766 break;
68fa96d6
ML
7767 case IFN_BUILTIN_EXPECT:
7768 {
7769 tree arg0 = gimple_call_arg (stmt, 0);
7770 tree op0 = (*valueize) (arg0);
7771 if (TREE_CODE (op0) == INTEGER_CST)
7772 return op0;
7773 return NULL_TREE;
7774 }
31e071ae
MP
7775 default:
7776 return NULL_TREE;
7777 }
368b454d
JJ
7778 tree arg0 = gimple_call_arg (stmt, 0);
7779 tree arg1 = gimple_call_arg (stmt, 1);
7780 tree op0 = (*valueize) (arg0);
7781 tree op1 = (*valueize) (arg1);
31e071ae
MP
7782
7783 if (TREE_CODE (op0) != INTEGER_CST
7784 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
7785 {
7786 switch (subcode)
7787 {
7788 case MULT_EXPR:
7789 /* x * 0 = 0 * x = 0 without overflow. */
7790 if (integer_zerop (op0) || integer_zerop (op1))
7791 return build_zero_cst (TREE_TYPE (arg0));
7792 break;
7793 case MINUS_EXPR:
7794 /* y - y = 0 without overflow. */
7795 if (operand_equal_p (op0, op1, 0))
7796 return build_zero_cst (TREE_TYPE (arg0));
7797 break;
7798 default:
7799 break;
7800 }
7801 }
7802 tree res
7803 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
7804 if (res
7805 && TREE_CODE (res) == INTEGER_CST
7806 && !TREE_OVERFLOW (res))
7807 return res;
7808 return NULL_TREE;
7809 }
25583c4f
RS
7810
7811 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 7812 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 7813 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 7814 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
7815 && gimple_builtin_call_types_compatible_p (stmt,
7816 TREE_OPERAND (fn, 0)))
cfef45c8
RG
7817 {
7818 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 7819 tree retval;
cfef45c8
RG
7820 unsigned i;
7821 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7822 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 7823 retval = fold_builtin_call_array (loc,
538dd0b7 7824 gimple_call_return_type (call_stmt),
cfef45c8 7825 fn, gimple_call_num_args (stmt), args);
cfef45c8 7826 if (retval)
5c944c6c
RB
7827 {
7828 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7829 STRIP_NOPS (retval);
538dd0b7
DM
7830 retval = fold_convert (gimple_call_return_type (call_stmt),
7831 retval);
5c944c6c 7832 }
cfef45c8
RG
7833 return retval;
7834 }
7835 return NULL_TREE;
7836 }
7837
7838 default:
7839 return NULL_TREE;
7840 }
7841}
7842
7843/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7844 Returns NULL_TREE if folding to a constant is not possible, otherwise
7845 returns a constant according to is_gimple_min_invariant. */
7846
7847tree
355fe088 7848gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
7849{
7850 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7851 if (res && is_gimple_min_invariant (res))
7852 return res;
7853 return NULL_TREE;
7854}
7855
7856
7857/* The following set of functions are supposed to fold references using
7858 their constant initializers. */
7859
cfef45c8
RG
7860/* See if we can find constructor defining value of BASE.
7861 When we know the consructor with constant offset (such as
7862 base is array[40] and we do know constructor of array), then
7863 BIT_OFFSET is adjusted accordingly.
7864
7865 As a special case, return error_mark_node when constructor
7866 is not explicitly available, but it is known to be zero
7867 such as 'static const int a;'. */
7868static tree
eaa41a6d 7869get_base_constructor (tree base, poly_int64 *bit_offset,
cfef45c8
RG
7870 tree (*valueize)(tree))
7871{
588db50c 7872 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
7873 bool reverse;
7874
cfef45c8
RG
7875 if (TREE_CODE (base) == MEM_REF)
7876 {
6a5aca53
ML
7877 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7878 if (!boff.to_shwi (bit_offset))
7879 return NULL_TREE;
cfef45c8
RG
7880
7881 if (valueize
7882 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7883 base = valueize (TREE_OPERAND (base, 0));
7884 if (!base || TREE_CODE (base) != ADDR_EXPR)
7885 return NULL_TREE;
7886 base = TREE_OPERAND (base, 0);
7887 }
13e88953
RB
7888 else if (valueize
7889 && TREE_CODE (base) == SSA_NAME)
7890 base = valueize (base);
cfef45c8
RG
7891
7892 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7893 DECL_INITIAL. If BASE is a nested reference into another
7894 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7895 the inner reference. */
7896 switch (TREE_CODE (base))
7897 {
7898 case VAR_DECL:
cfef45c8 7899 case CONST_DECL:
6a6dac52
JH
7900 {
7901 tree init = ctor_for_folding (base);
7902
688010ba 7903 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
7904 NULL means unknown, while error_mark_node is 0. */
7905 if (init == error_mark_node)
7906 return NULL_TREE;
7907 if (!init)
7908 return error_mark_node;
7909 return init;
7910 }
cfef45c8 7911
13e88953
RB
7912 case VIEW_CONVERT_EXPR:
7913 return get_base_constructor (TREE_OPERAND (base, 0),
7914 bit_offset, valueize);
7915
cfef45c8
RG
7916 case ARRAY_REF:
7917 case COMPONENT_REF:
ee45a32d
EB
7918 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7919 &reverse);
588db50c 7920 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
7921 return NULL_TREE;
7922 *bit_offset += bit_offset2;
7923 return get_base_constructor (base, bit_offset, valueize);
7924
cfef45c8
RG
7925 case CONSTRUCTOR:
7926 return base;
7927
7928 default:
13e88953
RB
7929 if (CONSTANT_CLASS_P (base))
7930 return base;
7931
cfef45c8
RG
7932 return NULL_TREE;
7933 }
7934}
7935
db5d7063
EB
7936/* CTOR is a CONSTRUCTOR of an array or vector type. Fold a reference of SIZE
7937 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
7938 the reference; otherwise the type of the referenced element is used instead.
7939 When SIZE is zero, attempt to fold a reference to the entire element OFFSET
7940 refers to. Increment *SUBOFF by the bit offset of the accessed element. */
cfef45c8
RG
7941
7942static tree
7943fold_array_ctor_reference (tree type, tree ctor,
7944 unsigned HOST_WIDE_INT offset,
c44c2088 7945 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7946 tree from_decl,
7947 unsigned HOST_WIDE_INT *suboff)
cfef45c8 7948{
807e902e
KZ
7949 offset_int low_bound;
7950 offset_int elt_size;
807e902e 7951 offset_int access_index;
6a636014 7952 tree domain_type = NULL_TREE;
cfef45c8
RG
7953 HOST_WIDE_INT inner_offset;
7954
7955 /* Compute low bound and elt size. */
eb8f1123
RG
7956 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7957 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
7958 if (domain_type && TYPE_MIN_VALUE (domain_type))
7959 {
6aa238a1 7960 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7961 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7962 return NULL_TREE;
807e902e 7963 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
7964 }
7965 else
807e902e 7966 low_bound = 0;
6aa238a1 7967 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7968 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7969 return NULL_TREE;
807e902e 7970 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 7971
35b4d3a6 7972 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 7973 access of a multiple of the array element size. Avoid division
6aa238a1
MS
7974 by zero below when ELT_SIZE is zero, such as with the result of
7975 an initializer for a zero-length array or an empty struct. */
7976 if (elt_size == 0
7977 || (type
7978 && (!TYPE_SIZE_UNIT (type)
831e688a 7979 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
7980 return NULL_TREE;
7981
7982 /* Compute the array index we look for. */
807e902e
KZ
7983 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7984 elt_size);
27bcd47c 7985 access_index += low_bound;
cfef45c8
RG
7986
7987 /* And offset within the access. */
27bcd47c 7988 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 7989
3c076c96
JJ
7990 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7991 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
7992 {
7993 /* native_encode_expr constraints. */
7994 if (size > MAX_BITSIZE_MODE_ANY_MODE
7995 || size % BITS_PER_UNIT != 0
3c076c96
JJ
7996 || inner_offset % BITS_PER_UNIT != 0
7997 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
7998 return NULL_TREE;
7999
8000 unsigned ctor_idx;
8001 tree val = get_array_ctor_element_at_index (ctor, access_index,
8002 &ctor_idx);
8003 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
8004 return build_zero_cst (type);
8005
8006 /* native-encode adjacent ctor elements. */
8007 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8008 unsigned bufoff = 0;
8009 offset_int index = 0;
8010 offset_int max_index = access_index;
8011 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8012 if (!val)
8013 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8014 else if (!CONSTANT_CLASS_P (val))
8015 return NULL_TREE;
8016 if (!elt->index)
8017 ;
8018 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8019 {
8020 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8021 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8022 }
8023 else
8024 index = max_index = wi::to_offset (elt->index);
8025 index = wi::umax (index, access_index);
8026 do
8027 {
3c076c96
JJ
8028 if (bufoff + elt_sz > sizeof (buf))
8029 elt_sz = sizeof (buf) - bufoff;
8030 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 8031 inner_offset / BITS_PER_UNIT);
3c076c96 8032 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
8033 return NULL_TREE;
8034 inner_offset = 0;
8035 bufoff += len;
8036
8037 access_index += 1;
8038 if (wi::cmpu (access_index, index) == 0)
8039 val = elt->value;
8040 else if (wi::cmpu (access_index, max_index) > 0)
8041 {
8042 ctor_idx++;
8043 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
8044 {
8045 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8046 ++max_index;
8047 }
8048 else
8049 {
8050 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
8051 index = 0;
8052 max_index = access_index;
8053 if (!elt->index)
8054 ;
8055 else if (TREE_CODE (elt->index) == RANGE_EXPR)
8056 {
8057 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
8058 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
8059 }
8060 else
8061 index = max_index = wi::to_offset (elt->index);
8062 index = wi::umax (index, access_index);
8063 if (wi::cmpu (access_index, index) == 0)
8064 val = elt->value;
8065 else
8066 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
8067 }
8068 }
8069 }
8070 while (bufoff < size / BITS_PER_UNIT);
8071 *suboff += size;
8072 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
8073 }
8074
6a636014 8075 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
8076 {
8077 if (!size && TREE_CODE (val) != CONSTRUCTOR)
8078 {
8079 /* For the final reference to the entire accessed element
8080 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
8081 may be null) in favor of the type of the element, and set
8082 SIZE to the size of the accessed element. */
8083 inner_offset = 0;
8084 type = TREE_TYPE (val);
6e41c27b 8085 size = elt_sz * BITS_PER_UNIT;
35b4d3a6 8086 }
6e41c27b
RB
8087 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
8088 && TREE_CODE (val) == CONSTRUCTOR
8089 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
8090 /* If this isn't the last element in the CTOR and a CTOR itself
8091 and it does not cover the whole object we are requesting give up
8092 since we're not set up for combining from multiple CTORs. */
8093 return NULL_TREE;
35b4d3a6 8094
6e41c27b 8095 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
35b4d3a6
MS
8096 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
8097 suboff);
8098 }
cfef45c8 8099
35b4d3a6
MS
8100 /* Memory not explicitly mentioned in constructor is 0 (or
8101 the reference is out of range). */
8102 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
8103}
8104
db5d7063
EB
8105/* CTOR is a CONSTRUCTOR of a record or union type. Fold a reference of SIZE
8106 bits to the memory at bit OFFSET. If non-null, TYPE is the expected type of
8107 the reference; otherwise the type of the referenced member is used instead.
8108 When SIZE is zero, attempt to fold a reference to the entire member OFFSET
8109 refers to. Increment *SUBOFF by the bit offset of the accessed member. */
cfef45c8
RG
8110
8111static tree
8112fold_nonarray_ctor_reference (tree type, tree ctor,
8113 unsigned HOST_WIDE_INT offset,
c44c2088 8114 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
8115 tree from_decl,
8116 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
8117{
8118 unsigned HOST_WIDE_INT cnt;
8119 tree cfield, cval;
8120
db5d7063 8121 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
cfef45c8
RG
8122 {
8123 tree byte_offset = DECL_FIELD_OFFSET (cfield);
8124 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
8125 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
8126
8127 if (!field_size)
8128 {
8129 /* Determine the size of the flexible array member from
8130 the size of the initializer provided for it. */
8131 field_size = TYPE_SIZE (TREE_TYPE (cval));
8132 }
cfef45c8
RG
8133
8134 /* Variable sized objects in static constructors makes no sense,
8135 but field_size can be NULL for flexible array members. */
8136 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
8137 && TREE_CODE (byte_offset) == INTEGER_CST
8138 && (field_size != NULL_TREE
8139 ? TREE_CODE (field_size) == INTEGER_CST
8140 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
8141
8142 /* Compute bit offset of the field. */
35b4d3a6
MS
8143 offset_int bitoffset
8144 = (wi::to_offset (field_offset)
8145 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 8146 /* Compute bit offset where the field ends. */
35b4d3a6 8147 offset_int bitoffset_end;
cfef45c8 8148 if (field_size != NULL_TREE)
807e902e 8149 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 8150 else
807e902e 8151 bitoffset_end = 0;
cfef45c8 8152
35b4d3a6
MS
8153 /* Compute the bit offset of the end of the desired access.
8154 As a special case, if the size of the desired access is
8155 zero, assume the access is to the entire field (and let
8156 the caller make any necessary adjustments by storing
8157 the actual bounds of the field in FIELDBOUNDS). */
8158 offset_int access_end = offset_int (offset);
8159 if (size)
8160 access_end += size;
8161 else
8162 access_end = bitoffset_end;
b8b2b009 8163
35b4d3a6
MS
8164 /* Is there any overlap between the desired access at
8165 [OFFSET, OFFSET+SIZE) and the offset of the field within
8166 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 8167 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 8168 && (field_size == NULL_TREE
807e902e 8169 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 8170 {
35b4d3a6
MS
8171 *suboff += bitoffset.to_uhwi ();
8172
8173 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8174 {
8175 /* For the final reference to the entire accessed member
8176 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8177 be null) in favor of the type of the member, and set
8178 SIZE to the size of the accessed member. */
8179 offset = bitoffset.to_uhwi ();
8180 type = TREE_TYPE (cval);
8181 size = (bitoffset_end - bitoffset).to_uhwi ();
8182 }
8183
8184 /* We do have overlap. Now see if the field is large enough
8185 to cover the access. Give up for accesses that extend
8186 beyond the end of the object or that span multiple fields. */
807e902e 8187 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 8188 return NULL_TREE;
032c80e9 8189 if (offset < bitoffset)
b8b2b009 8190 return NULL_TREE;
35b4d3a6
MS
8191
8192 offset_int inner_offset = offset_int (offset) - bitoffset;
db5d7063
EB
8193
8194 /* Integral bit-fields are left-justified on big-endian targets, so
8195 we must arrange for native_encode_int to start at their MSB. */
8196 if (DECL_BIT_FIELD (cfield) && INTEGRAL_TYPE_P (TREE_TYPE (cfield)))
8197 {
8198 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8199 return NULL_TREE;
8200 const unsigned int encoding_size
8201 = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (cfield)));
8202 if (BYTES_BIG_ENDIAN)
8203 inner_offset += encoding_size - wi::to_offset (field_size);
8204 }
8205
cfef45c8 8206 return fold_ctor_reference (type, cval,
27bcd47c 8207 inner_offset.to_uhwi (), size,
35b4d3a6 8208 from_decl, suboff);
cfef45c8
RG
8209 }
8210 }
14b7950f
MS
8211
8212 if (!type)
8213 return NULL_TREE;
8214
8215 return build_zero_cst (type);
cfef45c8
RG
8216}
8217
db5d7063 8218/* CTOR is a value initializing memory. Fold a reference of TYPE and
14b7950f 8219 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
8220 is zero, attempt to fold a reference to the entire subobject
8221 which OFFSET refers to. This is used when folding accesses to
8222 string members of aggregates. When non-null, set *SUBOFF to
8223 the bit offset of the accessed subobject. */
cfef45c8 8224
8403c2cf 8225tree
35b4d3a6
MS
8226fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8227 const poly_uint64 &poly_size, tree from_decl,
8228 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
8229{
8230 tree ret;
8231
8232 /* We found the field with exact match. */
35b4d3a6
MS
8233 if (type
8234 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 8235 && known_eq (poly_offset, 0U))
9d60be38 8236 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8237
30acf282
RS
8238 /* The remaining optimizations need a constant size and offset. */
8239 unsigned HOST_WIDE_INT size, offset;
8240 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8241 return NULL_TREE;
8242
cfef45c8
RG
8243 /* We are at the end of walk, see if we can view convert the
8244 result. */
8245 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8246 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7c09e7e2
RB
8247 && known_eq (wi::to_poly_widest (TYPE_SIZE (type)), size)
8248 && known_eq (wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ctor))), size))
cfef45c8 8249 {
9d60be38 8250 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8251 if (ret)
672d9f8e
RB
8252 {
8253 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8254 if (ret)
8255 STRIP_USELESS_TYPE_CONVERSION (ret);
8256 }
cfef45c8
RG
8257 return ret;
8258 }
db5d7063
EB
8259
8260 /* For constants and byte-aligned/sized reads, try to go through
b2505143
RB
8261 native_encode/interpret. */
8262 if (CONSTANT_CLASS_P (ctor)
8263 && BITS_PER_UNIT == 8
8264 && offset % BITS_PER_UNIT == 0
ea69031c 8265 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 8266 && size % BITS_PER_UNIT == 0
ea69031c
JJ
8267 && size <= MAX_BITSIZE_MODE_ANY_MODE
8268 && can_native_interpret_type_p (type))
b2505143
RB
8269 {
8270 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
8271 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8272 offset / BITS_PER_UNIT);
8273 if (len > 0)
8274 return native_interpret_expr (type, buf, len);
b2505143 8275 }
db5d7063
EB
8276
8277 /* For constructors, try first a recursive local processing, but in any case
8278 this requires the native storage order. */
8279 if (TREE_CODE (ctor) == CONSTRUCTOR
8280 && !(AGGREGATE_TYPE_P (TREE_TYPE (ctor))
8281 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (ctor))))
cfef45c8 8282 {
35b4d3a6
MS
8283 unsigned HOST_WIDE_INT dummy = 0;
8284 if (!suboff)
8285 suboff = &dummy;
cfef45c8 8286
ea69031c 8287 tree ret;
eb8f1123
RG
8288 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8289 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
8290 ret = fold_array_ctor_reference (type, ctor, offset, size,
8291 from_decl, suboff);
8292 else
8293 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8294 from_decl, suboff);
8295
db5d7063
EB
8296 /* Otherwise fall back to native_encode_initializer. This may be done
8297 only from the outermost fold_ctor_reference call (because it itself
8298 recurses into CONSTRUCTORs and doesn't update suboff). */
ea69031c
JJ
8299 if (ret == NULL_TREE
8300 && suboff == &dummy
8301 && BITS_PER_UNIT == 8
8302 && offset % BITS_PER_UNIT == 0
8303 && offset / BITS_PER_UNIT <= INT_MAX
8304 && size % BITS_PER_UNIT == 0
8305 && size <= MAX_BITSIZE_MODE_ANY_MODE
8306 && can_native_interpret_type_p (type))
8307 {
8308 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8309 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8310 offset / BITS_PER_UNIT);
8311 if (len > 0)
8312 return native_interpret_expr (type, buf, len);
8313 }
35b4d3a6 8314
ea69031c 8315 return ret;
cfef45c8
RG
8316 }
8317
8318 return NULL_TREE;
8319}
8320
8321/* Return the tree representing the element referenced by T if T is an
8322 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8323 names using VALUEIZE. Return NULL_TREE otherwise. */
8324
8325tree
8326fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8327{
8328 tree ctor, idx, base;
588db50c 8329 poly_int64 offset, size, max_size;
cfef45c8 8330 tree tem;
ee45a32d 8331 bool reverse;
cfef45c8 8332
f8a7df45
RG
8333 if (TREE_THIS_VOLATILE (t))
8334 return NULL_TREE;
8335
3a65ee74 8336 if (DECL_P (t))
cfef45c8
RG
8337 return get_symbol_constant_value (t);
8338
8339 tem = fold_read_from_constant_string (t);
8340 if (tem)
8341 return tem;
8342
8343 switch (TREE_CODE (t))
8344 {
8345 case ARRAY_REF:
8346 case ARRAY_RANGE_REF:
8347 /* Constant indexes are handled well by get_base_constructor.
8348 Only special case variable offsets.
8349 FIXME: This code can't handle nested references with variable indexes
8350 (they will be handled only by iteration of ccp). Perhaps we can bring
8351 get_ref_base_and_extent here and make it use a valueize callback. */
8352 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8353 && valueize
8354 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 8355 && poly_int_tree_p (idx))
cfef45c8
RG
8356 {
8357 tree low_bound, unit_size;
8358
8359 /* If the resulting bit-offset is constant, track it. */
8360 if ((low_bound = array_ref_low_bound (t),
588db50c 8361 poly_int_tree_p (low_bound))
cfef45c8 8362 && (unit_size = array_ref_element_size (t),
807e902e 8363 tree_fits_uhwi_p (unit_size)))
cfef45c8 8364 {
588db50c
RS
8365 poly_offset_int woffset
8366 = wi::sext (wi::to_poly_offset (idx)
8367 - wi::to_poly_offset (low_bound),
e287a2a1 8368 TYPE_PRECISION (sizetype));
a9e6359a
RB
8369 woffset *= tree_to_uhwi (unit_size);
8370 woffset *= BITS_PER_UNIT;
588db50c 8371 if (woffset.to_shwi (&offset))
807e902e 8372 {
807e902e
KZ
8373 base = TREE_OPERAND (t, 0);
8374 ctor = get_base_constructor (base, &offset, valueize);
8375 /* Empty constructor. Always fold to 0. */
8376 if (ctor == error_mark_node)
8377 return build_zero_cst (TREE_TYPE (t));
8378 /* Out of bound array access. Value is undefined,
8379 but don't fold. */
588db50c 8380 if (maybe_lt (offset, 0))
807e902e 8381 return NULL_TREE;
67914693 8382 /* We cannot determine ctor. */
807e902e
KZ
8383 if (!ctor)
8384 return NULL_TREE;
8385 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8386 tree_to_uhwi (unit_size)
8387 * BITS_PER_UNIT,
8388 base);
8389 }
cfef45c8
RG
8390 }
8391 }
8392 /* Fallthru. */
8393
8394 case COMPONENT_REF:
8395 case BIT_FIELD_REF:
8396 case TARGET_MEM_REF:
8397 case MEM_REF:
ee45a32d 8398 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
8399 ctor = get_base_constructor (base, &offset, valueize);
8400
8401 /* Empty constructor. Always fold to 0. */
8402 if (ctor == error_mark_node)
8403 return build_zero_cst (TREE_TYPE (t));
8404 /* We do not know precise address. */
588db50c 8405 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 8406 return NULL_TREE;
67914693 8407 /* We cannot determine ctor. */
cfef45c8
RG
8408 if (!ctor)
8409 return NULL_TREE;
8410
8411 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 8412 if (maybe_lt (offset, 0))
cfef45c8
RG
8413 return NULL_TREE;
8414
e4f1cbc3
JJ
8415 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8416 if (tem)
8417 return tem;
8418
8419 /* For bit field reads try to read the representative and
8420 adjust. */
8421 if (TREE_CODE (t) == COMPONENT_REF
8422 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8423 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8424 {
8425 HOST_WIDE_INT csize, coffset;
8426 tree field = TREE_OPERAND (t, 1);
8427 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8428 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8429 && size.is_constant (&csize)
8430 && offset.is_constant (&coffset)
8431 && (coffset % BITS_PER_UNIT != 0
8432 || csize % BITS_PER_UNIT != 0)
8433 && !reverse
8434 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8435 {
8436 poly_int64 bitoffset;
8437 poly_uint64 field_offset, repr_offset;
8438 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8439 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8440 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8441 else
8442 bitoffset = 0;
8443 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8444 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8445 HOST_WIDE_INT bitoff;
8446 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8447 - TYPE_PRECISION (TREE_TYPE (field)));
8448 if (bitoffset.is_constant (&bitoff)
8449 && bitoff >= 0
8450 && bitoff <= diff)
8451 {
8452 offset -= bitoff;
8453 size = tree_to_uhwi (DECL_SIZE (repr));
8454
8455 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8456 size, base);
8457 if (tem && TREE_CODE (tem) == INTEGER_CST)
8458 {
8459 if (!BYTES_BIG_ENDIAN)
8460 tem = wide_int_to_tree (TREE_TYPE (field),
8461 wi::lrshift (wi::to_wide (tem),
8462 bitoff));
8463 else
8464 tem = wide_int_to_tree (TREE_TYPE (field),
8465 wi::lrshift (wi::to_wide (tem),
8466 diff - bitoff));
8467 return tem;
8468 }
8469 }
8470 }
8471 }
8472 break;
cfef45c8
RG
8473
8474 case REALPART_EXPR:
8475 case IMAGPART_EXPR:
8476 {
8477 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8478 if (c && TREE_CODE (c) == COMPLEX_CST)
8479 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 8480 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
8481 break;
8482 }
8483
8484 default:
8485 break;
8486 }
8487
8488 return NULL_TREE;
8489}
8490
8491tree
8492fold_const_aggregate_ref (tree t)
8493{
8494 return fold_const_aggregate_ref_1 (t, NULL);
8495}
06bc3ec7 8496
85942f45 8497/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
8498 at OFFSET.
8499 Set CAN_REFER if non-NULL to false if method
8500 is not referable or if the virtual table is ill-formed (such as rewriten
8501 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
8502
8503tree
85942f45
JH
8504gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8505 tree v,
ec77d61f
JH
8506 unsigned HOST_WIDE_INT offset,
8507 bool *can_refer)
81fa35bd 8508{
85942f45
JH
8509 tree vtable = v, init, fn;
8510 unsigned HOST_WIDE_INT size;
8c311b50
JH
8511 unsigned HOST_WIDE_INT elt_size, access_index;
8512 tree domain_type;
81fa35bd 8513
ec77d61f
JH
8514 if (can_refer)
8515 *can_refer = true;
8516
9de2f554 8517 /* First of all double check we have virtual table. */
8813a647 8518 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 8519 {
ec77d61f
JH
8520 /* Pass down that we lost track of the target. */
8521 if (can_refer)
8522 *can_refer = false;
8523 return NULL_TREE;
8524 }
9de2f554 8525
2aa3da06
JH
8526 init = ctor_for_folding (v);
8527
9de2f554 8528 /* The virtual tables should always be born with constructors
2aa3da06
JH
8529 and we always should assume that they are avaialble for
8530 folding. At the moment we do not stream them in all cases,
8531 but it should never happen that ctor seem unreachable. */
8532 gcc_assert (init);
8533 if (init == error_mark_node)
8534 {
ec77d61f
JH
8535 /* Pass down that we lost track of the target. */
8536 if (can_refer)
8537 *can_refer = false;
2aa3da06
JH
8538 return NULL_TREE;
8539 }
81fa35bd 8540 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 8541 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 8542 offset *= BITS_PER_UNIT;
81fa35bd 8543 offset += token * size;
9de2f554 8544
8c311b50
JH
8545 /* Lookup the value in the constructor that is assumed to be array.
8546 This is equivalent to
8547 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8548 offset, size, NULL);
8549 but in a constant time. We expect that frontend produced a simple
8550 array without indexed initializers. */
8551
8552 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8553 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8554 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8555 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8556
8557 access_index = offset / BITS_PER_UNIT / elt_size;
8558 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8559
bf8d8309
MP
8560 /* The C++ FE can now produce indexed fields, and we check if the indexes
8561 match. */
8c311b50
JH
8562 if (access_index < CONSTRUCTOR_NELTS (init))
8563 {
8564 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
8565 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8566 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
8567 STRIP_NOPS (fn);
8568 }
8569 else
8570 fn = NULL;
9de2f554
JH
8571
8572 /* For type inconsistent program we may end up looking up virtual method
8573 in virtual table that does not contain TOKEN entries. We may overrun
8574 the virtual table and pick up a constant or RTTI info pointer.
8575 In any case the call is undefined. */
8576 if (!fn
8577 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8578 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
d68d3664 8579 fn = builtin_decl_unreachable ();
9de2f554
JH
8580 else
8581 {
8582 fn = TREE_OPERAND (fn, 0);
8583
8584 /* When cgraph node is missing and function is not public, we cannot
8585 devirtualize. This can happen in WHOPR when the actual method
8586 ends up in other partition, because we found devirtualization
8587 possibility too late. */
8588 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
8589 {
8590 if (can_refer)
8591 {
8592 *can_refer = false;
8593 return fn;
8594 }
8595 return NULL_TREE;
8596 }
9de2f554 8597 }
81fa35bd 8598
7501ca28
RG
8599 /* Make sure we create a cgraph node for functions we'll reference.
8600 They can be non-existent if the reference comes from an entry
8601 of an external vtable for example. */
d52f5295 8602 cgraph_node::get_create (fn);
7501ca28 8603
81fa35bd
MJ
8604 return fn;
8605}
8606
85942f45
JH
8607/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8608 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8609 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
8610 OBJ_TYPE_REF_OBJECT(REF).
8611 Set CAN_REFER if non-NULL to false if method
8612 is not referable or if the virtual table is ill-formed (such as rewriten
8613 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
8614
8615tree
ec77d61f
JH
8616gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8617 bool *can_refer)
85942f45
JH
8618{
8619 unsigned HOST_WIDE_INT offset;
8620 tree v;
8621
8622 v = BINFO_VTABLE (known_binfo);
8623 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8624 if (!v)
8625 return NULL_TREE;
8626
8627 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
8628 {
8629 if (can_refer)
8630 *can_refer = false;
8631 return NULL_TREE;
8632 }
8633 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
8634}
8635
737f500a
RB
8636/* Given a pointer value T, return a simplified version of an
8637 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
8638 possible. Note that the resulting type may be different from
8639 the type pointed to in the sense that it is still compatible
8640 from the langhooks point of view. */
8641
8642tree
8643gimple_fold_indirect_ref (tree t)
8644{
8645 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8646 tree sub = t;
8647 tree subtype;
8648
8649 STRIP_NOPS (sub);
8650 subtype = TREE_TYPE (sub);
737f500a
RB
8651 if (!POINTER_TYPE_P (subtype)
8652 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
8653 return NULL_TREE;
8654
8655 if (TREE_CODE (sub) == ADDR_EXPR)
8656 {
8657 tree op = TREE_OPERAND (sub, 0);
8658 tree optype = TREE_TYPE (op);
8659 /* *&p => p */
8660 if (useless_type_conversion_p (type, optype))
8661 return op;
8662
8663 /* *(foo *)&fooarray => fooarray[0] */
8664 if (TREE_CODE (optype) == ARRAY_TYPE
8665 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8666 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8667 {
8668 tree type_domain = TYPE_DOMAIN (optype);
8669 tree min_val = size_zero_node;
8670 if (type_domain && TYPE_MIN_VALUE (type_domain))
8671 min_val = TYPE_MIN_VALUE (type_domain);
8672 if (TREE_CODE (min_val) == INTEGER_CST)
8673 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8674 }
8675 /* *(foo *)&complexfoo => __real__ complexfoo */
8676 else if (TREE_CODE (optype) == COMPLEX_TYPE
8677 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8678 return fold_build1 (REALPART_EXPR, type, op);
8679 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8680 else if (TREE_CODE (optype) == VECTOR_TYPE
8681 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8682 {
8683 tree part_width = TYPE_SIZE (type);
8684 tree index = bitsize_int (0);
8685 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8686 }
8687 }
8688
8689 /* *(p + CST) -> ... */
8690 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8691 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8692 {
8693 tree addr = TREE_OPERAND (sub, 0);
8694 tree off = TREE_OPERAND (sub, 1);
8695 tree addrtype;
8696
8697 STRIP_NOPS (addr);
8698 addrtype = TREE_TYPE (addr);
8699
8700 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8701 if (TREE_CODE (addr) == ADDR_EXPR
8702 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8703 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 8704 && tree_fits_uhwi_p (off))
b184c8f1 8705 {
ae7e9ddd 8706 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
8707 tree part_width = TYPE_SIZE (type);
8708 unsigned HOST_WIDE_INT part_widthi
9439e9a1 8709 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
8710 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8711 tree index = bitsize_int (indexi);
928686b1
RS
8712 if (known_lt (offset / part_widthi,
8713 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
8714 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8715 part_width, index);
8716 }
8717
8718 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8719 if (TREE_CODE (addr) == ADDR_EXPR
8720 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8721 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8722 {
8723 tree size = TYPE_SIZE_UNIT (type);
8724 if (tree_int_cst_equal (size, off))
8725 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8726 }
8727
8728 /* *(p + CST) -> MEM_REF <p, CST>. */
8729 if (TREE_CODE (addr) != ADDR_EXPR
8730 || DECL_P (TREE_OPERAND (addr, 0)))
8731 return fold_build2 (MEM_REF, type,
8732 addr,
8e6cdc90 8733 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
8734 }
8735
8736 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8737 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8738 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8739 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8740 {
8741 tree type_domain;
8742 tree min_val = size_zero_node;
8743 tree osub = sub;
8744 sub = gimple_fold_indirect_ref (sub);
8745 if (! sub)
8746 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8747 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8748 if (type_domain && TYPE_MIN_VALUE (type_domain))
8749 min_val = TYPE_MIN_VALUE (type_domain);
8750 if (TREE_CODE (min_val) == INTEGER_CST)
8751 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8752 }
8753
8754 return NULL_TREE;
8755}
19e51b40
JJ
8756
8757/* Return true if CODE is an operation that when operating on signed
8758 integer types involves undefined behavior on overflow and the
8759 operation can be expressed with unsigned arithmetic. */
8760
8761bool
8762arith_code_with_undefined_signed_overflow (tree_code code)
8763{
8764 switch (code)
8765 {
8e2c037d 8766 case ABS_EXPR:
19e51b40
JJ
8767 case PLUS_EXPR:
8768 case MINUS_EXPR:
8769 case MULT_EXPR:
8770 case NEGATE_EXPR:
8771 case POINTER_PLUS_EXPR:
8772 return true;
8773 default:
8774 return false;
8775 }
8776}
8777
8778/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8779 operation that can be transformed to unsigned arithmetic by converting
8780 its operand, carrying out the operation in the corresponding unsigned
8781 type and converting the result back to the original type.
8782
c05f7482
RB
8783 If IN_PLACE is true, *GSI points to STMT, adjust the stmt in place and
8784 return NULL.
82c8ff79
JJ
8785 Otherwise returns a sequence of statements that replace STMT and also
8786 contain a modified form of STMT itself. */
19e51b40 8787
c05f7482
RB
8788static gimple_seq
8789rewrite_to_defined_overflow (gimple_stmt_iterator *gsi, gimple *stmt,
8790 bool in_place)
19e51b40
JJ
8791{
8792 if (dump_file && (dump_flags & TDF_DETAILS))
8793 {
8794 fprintf (dump_file, "rewriting stmt with undefined signed "
8795 "overflow ");
8796 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8797 }
8798
8799 tree lhs = gimple_assign_lhs (stmt);
8800 tree type = unsigned_type_for (TREE_TYPE (lhs));
8801 gimple_seq stmts = NULL;
8e2c037d
RB
8802 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8803 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8804 else
8805 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8806 {
8807 tree op = gimple_op (stmt, i);
8808 op = gimple_convert (&stmts, type, op);
8809 gimple_set_op (stmt, i, op);
8810 }
19e51b40
JJ
8811 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8812 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8813 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 8814 gimple_set_modified (stmt, true);
82c8ff79
JJ
8815 if (in_place)
8816 {
82c8ff79 8817 if (stmts)
c05f7482 8818 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
82c8ff79
JJ
8819 stmts = NULL;
8820 }
8821 else
8822 gimple_seq_add_stmt (&stmts, stmt);
355fe088 8823 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
82c8ff79
JJ
8824 if (in_place)
8825 {
c05f7482 8826 gsi_insert_after (gsi, cvt, GSI_SAME_STMT);
82c8ff79
JJ
8827 update_stmt (stmt);
8828 }
8829 else
8830 gimple_seq_add_stmt (&stmts, cvt);
19e51b40
JJ
8831
8832 return stmts;
8833}
d4f5cd5e 8834
c05f7482
RB
8835void
8836rewrite_to_defined_overflow (gimple_stmt_iterator *gsi)
8837{
8838 rewrite_to_defined_overflow (gsi, gsi_stmt (*gsi), true);
8839}
8840
8841gimple_seq
8842rewrite_to_defined_overflow (gimple *stmt)
8843{
8844 return rewrite_to_defined_overflow (nullptr, stmt, false);
8845}
3d2cf79f 8846
c26de36d
RB
8847/* The valueization hook we use for the gimple_build API simplification.
8848 This makes us match fold_buildN behavior by only combining with
8849 statements in the sequence(s) we are currently building. */
8850
8851static tree
8852gimple_build_valueize (tree op)
8853{
8854 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8855 return op;
8856 return NULL_TREE;
8857}
8858
6f5b0603
RB
8859/* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8860
8861static inline void
8862gimple_build_insert_seq (gimple_stmt_iterator *gsi,
8863 bool before, gsi_iterator_update update,
8864 gimple_seq seq)
8865{
8866 if (before)
8867 {
8868 if (gsi->bb)
8869 gsi_insert_seq_before (gsi, seq, update);
8870 else
8871 gsi_insert_seq_before_without_update (gsi, seq, update);
8872 }
8873 else
8874 {
8875 if (gsi->bb)
8876 gsi_insert_seq_after (gsi, seq, update);
8877 else
8878 gsi_insert_seq_after_without_update (gsi, seq, update);
8879 }
8880}
8881
3d2cf79f 8882/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 8883 simplifying it first if possible. Returns the built
ba206889
RB
8884 expression value and inserts statements possibly defining it
8885 before GSI if BEFORE is true or after GSI if false and advance
8886 the iterator accordingly.
8887 If gsi refers to a basic block simplifying is allowed to look
8888 at all SSA defs while when it does not it is restricted to
8889 SSA defs that are not associated with a basic block yet,
8890 indicating they belong to the currently building sequence. */
3d2cf79f
RB
8891
8892tree
ba206889
RB
8893gimple_build (gimple_stmt_iterator *gsi,
8894 bool before, gsi_iterator_update update,
8895 location_t loc, enum tree_code code, tree type, tree op0)
3d2cf79f 8896{
ba206889
RB
8897 gimple_seq seq = NULL;
8898 tree res
8899 = gimple_simplify (code, type, op0, &seq,
8900 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
3d2cf79f
RB
8901 if (!res)
8902 {
a15ebbcd 8903 res = create_tmp_reg_or_ssa_name (type);
355fe088 8904 gimple *stmt;
3d2cf79f
RB
8905 if (code == REALPART_EXPR
8906 || code == IMAGPART_EXPR
8907 || code == VIEW_CONVERT_EXPR)
0d0e4a03 8908 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 8909 else
0d0e4a03 8910 stmt = gimple_build_assign (res, code, op0);
3d2cf79f 8911 gimple_set_location (stmt, loc);
ba206889
RB
8912 gimple_seq_add_stmt_without_update (&seq, stmt);
8913 }
6f5b0603 8914 gimple_build_insert_seq (gsi, before, update, seq);
3d2cf79f
RB
8915 return res;
8916}
8917
8918/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 8919 simplifying it first if possible. Returns the built
6f5b0603
RB
8920 expression value inserting any new statements at GSI honoring BEFORE
8921 and UPDATE. */
3d2cf79f
RB
8922
8923tree
ba206889
RB
8924gimple_build (gimple_stmt_iterator *gsi,
8925 bool before, gsi_iterator_update update,
8926 location_t loc, enum tree_code code, tree type,
8927 tree op0, tree op1)
3d2cf79f 8928{
ba206889
RB
8929 gimple_seq seq = NULL;
8930 tree res
8931 = gimple_simplify (code, type, op0, op1, &seq,
8932 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
3d2cf79f
RB
8933 if (!res)
8934 {
a15ebbcd 8935 res = create_tmp_reg_or_ssa_name (type);
355fe088 8936 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f 8937 gimple_set_location (stmt, loc);
ba206889
RB
8938 gimple_seq_add_stmt_without_update (&seq, stmt);
8939 }
6f5b0603 8940 gimple_build_insert_seq (gsi, before, update, seq);
3d2cf79f
RB
8941 return res;
8942}
8943
8944/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 8945 simplifying it first if possible. Returns the built
6f5b0603
RB
8946 expression value inserting any new statements at GSI honoring BEFORE
8947 and UPDATE. */
3d2cf79f
RB
8948
8949tree
ba206889
RB
8950gimple_build (gimple_stmt_iterator *gsi,
8951 bool before, gsi_iterator_update update,
8952 location_t loc, enum tree_code code, tree type,
8953 tree op0, tree op1, tree op2)
3d2cf79f 8954{
ba206889
RB
8955
8956 gimple_seq seq = NULL;
8957 tree res
8958 = gimple_simplify (code, type, op0, op1, op2, &seq,
8959 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
3d2cf79f
RB
8960 if (!res)
8961 {
a15ebbcd 8962 res = create_tmp_reg_or_ssa_name (type);
355fe088 8963 gimple *stmt;
3d2cf79f 8964 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
8965 stmt = gimple_build_assign (res, code,
8966 build3 (code, type, op0, op1, op2));
3d2cf79f 8967 else
0d0e4a03 8968 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f 8969 gimple_set_location (stmt, loc);
ba206889
RB
8970 gimple_seq_add_stmt_without_update (&seq, stmt);
8971 }
6f5b0603 8972 gimple_build_insert_seq (gsi, before, update, seq);
3d2cf79f
RB
8973 return res;
8974}
8975
93a73251
MM
8976/* Build the call FN () with a result of type TYPE (or no result if TYPE is
8977 void) with a location LOC. Returns the built expression value (or NULL_TREE
6f5b0603
RB
8978 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8979 and UPDATE. */
93a73251
MM
8980
8981tree
6f5b0603
RB
8982gimple_build (gimple_stmt_iterator *gsi,
8983 bool before, gsi_iterator_update update,
8984 location_t loc, combined_fn fn, tree type)
93a73251
MM
8985{
8986 tree res = NULL_TREE;
6f5b0603 8987 gimple_seq seq = NULL;
93a73251
MM
8988 gcall *stmt;
8989 if (internal_fn_p (fn))
8990 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8991 else
8992 {
8993 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8994 stmt = gimple_build_call (decl, 0);
8995 }
8996 if (!VOID_TYPE_P (type))
8997 {
8998 res = create_tmp_reg_or_ssa_name (type);
8999 gimple_call_set_lhs (stmt, res);
9000 }
9001 gimple_set_location (stmt, loc);
6f5b0603
RB
9002 gimple_seq_add_stmt_without_update (&seq, stmt);
9003 gimple_build_insert_seq (gsi, before, update, seq);
93a73251
MM
9004 return res;
9005}
9006
3d2cf79f
RB
9007/* Build the call FN (ARG0) with a result of type TYPE
9008 (or no result if TYPE is void) with location LOC,
c26de36d 9009 simplifying it first if possible. Returns the built
6f5b0603
RB
9010 expression value (or NULL_TREE if TYPE is void) inserting any new
9011 statements at GSI honoring BEFORE and UPDATE. */
3d2cf79f
RB
9012
9013tree
6f5b0603
RB
9014gimple_build (gimple_stmt_iterator *gsi,
9015 bool before, gsi_iterator_update update,
9016 location_t loc, combined_fn fn,
eb69361d 9017 tree type, tree arg0)
3d2cf79f 9018{
6f5b0603
RB
9019 gimple_seq seq = NULL;
9020 tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
3d2cf79f
RB
9021 if (!res)
9022 {
eb69361d
RS
9023 gcall *stmt;
9024 if (internal_fn_p (fn))
9025 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
9026 else
9027 {
9028 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9029 stmt = gimple_build_call (decl, 1, arg0);
9030 }
3d2cf79f
RB
9031 if (!VOID_TYPE_P (type))
9032 {
a15ebbcd 9033 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
9034 gimple_call_set_lhs (stmt, res);
9035 }
9036 gimple_set_location (stmt, loc);
6f5b0603 9037 gimple_seq_add_stmt_without_update (&seq, stmt);
3d2cf79f 9038 }
6f5b0603 9039 gimple_build_insert_seq (gsi, before, update, seq);
3d2cf79f
RB
9040 return res;
9041}
9042
9043/* Build the call FN (ARG0, ARG1) with a result of type TYPE
9044 (or no result if TYPE is void) with location LOC,
c26de36d 9045 simplifying it first if possible. Returns the built
6f5b0603
RB
9046 expression value (or NULL_TREE if TYPE is void) inserting any new
9047 statements at GSI honoring BEFORE and UPDATE. */
3d2cf79f
RB
9048
9049tree
6f5b0603
RB
9050gimple_build (gimple_stmt_iterator *gsi,
9051 bool before, gsi_iterator_update update,
9052 location_t loc, combined_fn fn,
eb69361d 9053 tree type, tree arg0, tree arg1)
3d2cf79f 9054{
6f5b0603
RB
9055 gimple_seq seq = NULL;
9056 tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
9057 gimple_build_valueize);
3d2cf79f
RB
9058 if (!res)
9059 {
eb69361d
RS
9060 gcall *stmt;
9061 if (internal_fn_p (fn))
9062 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
9063 else
9064 {
9065 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9066 stmt = gimple_build_call (decl, 2, arg0, arg1);
9067 }
3d2cf79f
RB
9068 if (!VOID_TYPE_P (type))
9069 {
a15ebbcd 9070 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
9071 gimple_call_set_lhs (stmt, res);
9072 }
9073 gimple_set_location (stmt, loc);
6f5b0603 9074 gimple_seq_add_stmt_without_update (&seq, stmt);
3d2cf79f 9075 }
6f5b0603 9076 gimple_build_insert_seq (gsi, before, update, seq);
3d2cf79f
RB
9077 return res;
9078}
9079
9080/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
9081 (or no result if TYPE is void) with location LOC,
c26de36d 9082 simplifying it first if possible. Returns the built
6f5b0603
RB
9083 expression value (or NULL_TREE if TYPE is void) inserting any new
9084 statements at GSI honoring BEFORE and UPDATE. */
3d2cf79f
RB
9085
9086tree
6f5b0603
RB
9087gimple_build (gimple_stmt_iterator *gsi,
9088 bool before, gsi_iterator_update update,
9089 location_t loc, combined_fn fn,
eb69361d 9090 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 9091{
6f5b0603 9092 gimple_seq seq = NULL;
c26de36d 9093 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6f5b0603 9094 &seq, gimple_build_valueize);
3d2cf79f
RB
9095 if (!res)
9096 {
eb69361d
RS
9097 gcall *stmt;
9098 if (internal_fn_p (fn))
9099 stmt = gimple_build_call_internal (as_internal_fn (fn),
9100 3, arg0, arg1, arg2);
9101 else
9102 {
9103 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
9104 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
9105 }
3d2cf79f
RB
9106 if (!VOID_TYPE_P (type))
9107 {
a15ebbcd 9108 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
9109 gimple_call_set_lhs (stmt, res);
9110 }
9111 gimple_set_location (stmt, loc);
6f5b0603 9112 gimple_seq_add_stmt_without_update (&seq, stmt);
3d2cf79f 9113 }
6f5b0603 9114 gimple_build_insert_seq (gsi, before, update, seq);
3d2cf79f
RB
9115 return res;
9116}
9117
30213ae9
RS
9118/* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
9119 void) with location LOC, simplifying it first if possible. Returns the
6f5b0603
RB
9120 built expression value (or NULL_TREE if TYPE is void) inserting any new
9121 statements at GSI honoring BEFORE and UPDATE. */
30213ae9
RS
9122
9123tree
6f5b0603
RB
9124gimple_build (gimple_stmt_iterator *gsi,
9125 bool before, gsi_iterator_update update,
9126 location_t loc, code_helper code, tree type, tree op0)
30213ae9
RS
9127{
9128 if (code.is_tree_code ())
6f5b0603
RB
9129 return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
9130 return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
30213ae9
RS
9131}
9132
9133/* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
9134 void) with location LOC, simplifying it first if possible. Returns the
6f5b0603
RB
9135 built expression value (or NULL_TREE if TYPE is void) inserting any new
9136 statements at GSI honoring BEFORE and UPDATE. */
30213ae9
RS
9137
9138tree
6f5b0603
RB
9139gimple_build (gimple_stmt_iterator *gsi,
9140 bool before, gsi_iterator_update update,
9141 location_t loc, code_helper code, tree type, tree op0, tree op1)
30213ae9
RS
9142{
9143 if (code.is_tree_code ())
6f5b0603
RB
9144 return gimple_build (gsi, before, update,
9145 loc, tree_code (code), type, op0, op1);
9146 return gimple_build (gsi, before, update,
9147 loc, combined_fn (code), type, op0, op1);
30213ae9
RS
9148}
9149
9150/* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
9151 is void) with location LOC, simplifying it first if possible. Returns the
6f5b0603
RB
9152 built expression value (or NULL_TREE if TYPE is void) inserting any new
9153 statements at GSI honoring BEFORE and UPDATE. */
30213ae9
RS
9154
9155tree
6f5b0603
RB
9156gimple_build (gimple_stmt_iterator *gsi,
9157 bool before, gsi_iterator_update update,
9158 location_t loc, code_helper code,
30213ae9
RS
9159 tree type, tree op0, tree op1, tree op2)
9160{
9161 if (code.is_tree_code ())
6f5b0603
RB
9162 return gimple_build (gsi, before, update,
9163 loc, tree_code (code), type, op0, op1, op2);
9164 return gimple_build (gsi, before, update,
9165 loc, combined_fn (code), type, op0, op1, op2);
30213ae9
RS
9166}
9167
3d2cf79f
RB
9168/* Build the conversion (TYPE) OP with a result of type TYPE
9169 with location LOC if such conversion is neccesary in GIMPLE,
9170 simplifying it first.
6f5b0603
RB
9171 Returns the built expression inserting any new statements
9172 at GSI honoring BEFORE and UPDATE. */
d4f5cd5e
RB
9173
9174tree
6f5b0603
RB
9175gimple_convert (gimple_stmt_iterator *gsi,
9176 bool before, gsi_iterator_update update,
9177 location_t loc, tree type, tree op)
d4f5cd5e
RB
9178{
9179 if (useless_type_conversion_p (type, TREE_TYPE (op)))
9180 return op;
6f5b0603 9181 return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
d4f5cd5e 9182}
68e57f04 9183
74e3c262
RB
9184/* Build the conversion (ptrofftype) OP with a result of a type
9185 compatible with ptrofftype with location LOC if such conversion
9186 is neccesary in GIMPLE, simplifying it first.
6f5b0603
RB
9187 Returns the built expression value inserting any new statements
9188 at GSI honoring BEFORE and UPDATE. */
74e3c262
RB
9189
9190tree
6f5b0603
RB
9191gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
9192 bool before, gsi_iterator_update update,
9193 location_t loc, tree op)
74e3c262
RB
9194{
9195 if (ptrofftype_p (TREE_TYPE (op)))
9196 return op;
6f5b0603 9197 return gimple_convert (gsi, before, update, loc, sizetype, op);
74e3c262
RB
9198}
9199
e7c45b66 9200/* Build a vector of type TYPE in which each element has the value OP.
6f5b0603
RB
9201 Return a gimple value for the result, inserting any new statements
9202 at GSI honoring BEFORE and UPDATE. */
e7c45b66
RS
9203
9204tree
6f5b0603
RB
9205gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
9206 bool before, gsi_iterator_update update,
9207 location_t loc, tree type, tree op)
e7c45b66 9208{
928686b1
RS
9209 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
9210 && !CONSTANT_CLASS_P (op))
6f5b0603
RB
9211 return gimple_build (gsi, before, update,
9212 loc, VEC_DUPLICATE_EXPR, type, op);
928686b1 9213
e7c45b66
RS
9214 tree res, vec = build_vector_from_val (type, op);
9215 if (is_gimple_val (vec))
9216 return vec;
9217 if (gimple_in_ssa_p (cfun))
9218 res = make_ssa_name (type);
9219 else
9220 res = create_tmp_reg (type);
6f5b0603 9221 gimple_seq seq = NULL;
e7c45b66
RS
9222 gimple *stmt = gimple_build_assign (res, vec);
9223 gimple_set_location (stmt, loc);
6f5b0603
RB
9224 gimple_seq_add_stmt_without_update (&seq, stmt);
9225 gimple_build_insert_seq (gsi, before, update, seq);
e7c45b66
RS
9226 return res;
9227}
9228
abe73c3d 9229/* Build a vector from BUILDER, handling the case in which some elements
6f5b0603
RB
9230 are non-constant. Return a gimple value for the result, inserting
9231 any new instructions to GSI honoring BEFORE and UPDATE.
abe73c3d
RS
9232
9233 BUILDER must not have a stepped encoding on entry. This is because
9234 the function is not geared up to handle the arithmetic that would
9235 be needed in the variable case, and any code building a vector that
9236 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
9237
9238tree
6f5b0603
RB
9239gimple_build_vector (gimple_stmt_iterator *gsi,
9240 bool before, gsi_iterator_update update,
9241 location_t loc, tree_vector_builder *builder)
e7c45b66 9242{
abe73c3d
RS
9243 gcc_assert (builder->nelts_per_pattern () <= 2);
9244 unsigned int encoded_nelts = builder->encoded_nelts ();
9245 for (unsigned int i = 0; i < encoded_nelts; ++i)
8ebedfcd 9246 if (!CONSTANT_CLASS_P ((*builder)[i]))
e7c45b66 9247 {
6f5b0603 9248 gimple_seq seq = NULL;
abe73c3d 9249 tree type = builder->type ();
928686b1 9250 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
9251 vec<constructor_elt, va_gc> *v;
9252 vec_alloc (v, nelts);
9253 for (i = 0; i < nelts; ++i)
abe73c3d 9254 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
9255
9256 tree res;
9257 if (gimple_in_ssa_p (cfun))
9258 res = make_ssa_name (type);
9259 else
9260 res = create_tmp_reg (type);
9261 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9262 gimple_set_location (stmt, loc);
6f5b0603
RB
9263 gimple_seq_add_stmt_without_update (&seq, stmt);
9264 gimple_build_insert_seq (gsi, before, update, seq);
e7c45b66
RS
9265 return res;
9266 }
abe73c3d 9267 return builder->build ();
e7c45b66
RS
9268}
9269
93a73251
MM
9270/* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9271 and generate a value guaranteed to be rounded upwards to ALIGN.
9272
9273 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9274
9275tree
6f5b0603
RB
9276gimple_build_round_up (gimple_stmt_iterator *gsi,
9277 bool before, gsi_iterator_update update,
9278 location_t loc, tree type,
93a73251
MM
9279 tree old_size, unsigned HOST_WIDE_INT align)
9280{
9281 unsigned HOST_WIDE_INT tg_mask = align - 1;
9282 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9283 gcc_assert (INTEGRAL_TYPE_P (type));
9284 tree tree_mask = build_int_cst (type, tg_mask);
6f5b0603
RB
9285 tree oversize = gimple_build (gsi, before, update,
9286 loc, PLUS_EXPR, type, old_size, tree_mask);
93a73251
MM
9287
9288 tree mask = build_int_cst (type, -align);
6f5b0603
RB
9289 return gimple_build (gsi, before, update,
9290 loc, BIT_AND_EXPR, type, oversize, mask);
93a73251
MM
9291}
9292
68e57f04
RS
9293/* Return true if the result of assignment STMT is known to be non-negative.
9294 If the return value is based on the assumption that signed overflow is
9295 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9296 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9297
9298static bool
9299gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9300 int depth)
9301{
9302 enum tree_code code = gimple_assign_rhs_code (stmt);
ce777eae 9303 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
68e57f04
RS
9304 switch (get_gimple_rhs_class (code))
9305 {
9306 case GIMPLE_UNARY_RHS:
9307 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 9308 type,
68e57f04
RS
9309 gimple_assign_rhs1 (stmt),
9310 strict_overflow_p, depth);
9311 case GIMPLE_BINARY_RHS:
9312 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 9313 type,
68e57f04
RS
9314 gimple_assign_rhs1 (stmt),
9315 gimple_assign_rhs2 (stmt),
9316 strict_overflow_p, depth);
9317 case GIMPLE_TERNARY_RHS:
9318 return false;
9319 case GIMPLE_SINGLE_RHS:
9320 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9321 strict_overflow_p, depth);
9322 case GIMPLE_INVALID_RHS:
9323 break;
9324 }
9325 gcc_unreachable ();
9326}
9327
9328/* Return true if return value of call STMT is known to be non-negative.
9329 If the return value is based on the assumption that signed overflow is
9330 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9331 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9332
9333static bool
9334gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9335 int depth)
9336{
9337 tree arg0 = gimple_call_num_args (stmt) > 0 ?
9338 gimple_call_arg (stmt, 0) : NULL_TREE;
9339 tree arg1 = gimple_call_num_args (stmt) > 1 ?
9340 gimple_call_arg (stmt, 1) : NULL_TREE;
ce777eae
RB
9341 tree lhs = gimple_call_lhs (stmt);
9342 return (lhs
9343 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9344 gimple_call_combined_fn (stmt),
9345 arg0, arg1,
9346 strict_overflow_p, depth));
68e57f04
RS
9347}
9348
4534c203
RB
9349/* Return true if return value of call STMT is known to be non-negative.
9350 If the return value is based on the assumption that signed overflow is
9351 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9352 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9353
9354static bool
9355gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9356 int depth)
9357{
9358 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9359 {
9360 tree arg = gimple_phi_arg_def (stmt, i);
9361 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9362 return false;
9363 }
9364 return true;
9365}
9366
68e57f04
RS
9367/* Return true if STMT is known to compute a non-negative value.
9368 If the return value is based on the assumption that signed overflow is
9369 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9370 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9371
9372bool
9373gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9374 int depth)
9375{
822a0823
AH
9376 tree type = gimple_range_type (stmt);
9377 if (type && frange::supports_p (type))
9378 {
9379 frange r;
9380 bool sign;
9381 if (get_global_range_query ()->range_of_stmt (r, stmt)
9382 && r.signbit_p (sign))
9383 return !sign;
9384 }
68e57f04
RS
9385 switch (gimple_code (stmt))
9386 {
9387 case GIMPLE_ASSIGN:
9388 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9389 depth);
9390 case GIMPLE_CALL:
9391 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9392 depth);
4534c203
RB
9393 case GIMPLE_PHI:
9394 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9395 depth);
68e57f04
RS
9396 default:
9397 return false;
9398 }
9399}
67dbe582
RS
9400
9401/* Return true if the floating-point value computed by assignment STMT
9402 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 9403 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
9404
9405 DEPTH is the current nesting depth of the query. */
9406
9407static bool
9408gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9409{
9410 enum tree_code code = gimple_assign_rhs_code (stmt);
9411 switch (get_gimple_rhs_class (code))
9412 {
9413 case GIMPLE_UNARY_RHS:
9414 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9415 gimple_assign_rhs1 (stmt), depth);
9416 case GIMPLE_BINARY_RHS:
9417 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9418 gimple_assign_rhs1 (stmt),
9419 gimple_assign_rhs2 (stmt), depth);
9420 case GIMPLE_TERNARY_RHS:
9421 return false;
9422 case GIMPLE_SINGLE_RHS:
9423 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9424 case GIMPLE_INVALID_RHS:
9425 break;
9426 }
9427 gcc_unreachable ();
9428}
9429
9430/* Return true if the floating-point value computed by call STMT is known
9431 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9432 considered integer values. Return false for signaling NaN.
67dbe582
RS
9433
9434 DEPTH is the current nesting depth of the query. */
9435
9436static bool
9437gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9438{
9439 tree arg0 = (gimple_call_num_args (stmt) > 0
9440 ? gimple_call_arg (stmt, 0)
9441 : NULL_TREE);
9442 tree arg1 = (gimple_call_num_args (stmt) > 1
9443 ? gimple_call_arg (stmt, 1)
9444 : NULL_TREE);
1d9da71f 9445 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
9446 arg0, arg1, depth);
9447}
9448
9449/* Return true if the floating-point result of phi STMT is known to have
9450 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 9451 integer values. Return false for signaling NaN.
67dbe582
RS
9452
9453 DEPTH is the current nesting depth of the query. */
9454
9455static bool
9456gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9457{
9458 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9459 {
9460 tree arg = gimple_phi_arg_def (stmt, i);
9461 if (!integer_valued_real_single_p (arg, depth + 1))
9462 return false;
9463 }
9464 return true;
9465}
9466
9467/* Return true if the floating-point value computed by STMT is known
9468 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9469 considered integer values. Return false for signaling NaN.
67dbe582
RS
9470
9471 DEPTH is the current nesting depth of the query. */
9472
9473bool
9474gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9475{
9476 switch (gimple_code (stmt))
9477 {
9478 case GIMPLE_ASSIGN:
9479 return gimple_assign_integer_valued_real_p (stmt, depth);
9480 case GIMPLE_CALL:
9481 return gimple_call_integer_valued_real_p (stmt, depth);
9482 case GIMPLE_PHI:
9483 return gimple_phi_integer_valued_real_p (stmt, depth);
9484 default:
9485 return false;
9486 }
9487}