]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
PR 100168: Fix call test on power10.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
99dee823 2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
e7868dc6 68#include "varasm.h"
cbdd87d4 69
598f7235
MS
70enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
598f7235
MS
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83};
84
03c4a945
MS
85static bool
86get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 87
b3b9f3d0 88/* Return true when DECL can be referenced from current unit.
c44c2088
JH
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
1389294c 92
1389294c
JH
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
3e89949e 105 we devirtualize only during final compilation stage.
b3b9f3d0
JH
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
1389294c 110static bool
c44c2088 111can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 112{
2c8326a5 113 varpool_node *vnode;
1389294c 114 struct cgraph_node *node;
5e20cdc9 115 symtab_node *snode;
c44c2088 116
00de328a 117 if (DECL_ABSTRACT_P (decl))
1632a686
JH
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 122 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
123 return true;
124
d4babd37
JM
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
1632a686 128 {
d4babd37
JM
129 if (DECL_EXTERNAL (decl))
130 return false;
3aaf0529
JH
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
3dafb85c 133 if (symtab->function_flags_ready)
3aaf0529 134 return true;
d52f5295 135 snode = symtab_node::get (decl);
3aaf0529 136 if (!snode || !snode->definition)
1632a686 137 return false;
7de90a6c 138 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 139 return !node || !node->inlined_to;
1632a686
JH
140 }
141
6da8be89 142 /* We will later output the initializer, so we can refer to it.
c44c2088 143 So we are concerned only when DECL comes from initializer of
3aaf0529 144 external var or var that has been optimized out. */
c44c2088 145 if (!from_decl
8813a647 146 || !VAR_P (from_decl)
3aaf0529 147 || (!DECL_EXTERNAL (from_decl)
9041d2e6 148 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 149 && vnode->definition)
6da8be89 150 || (flag_ltrans
9041d2e6 151 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 152 && vnode->in_other_partition))
c44c2088 153 return true;
c44c2088
JH
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
a33a931b 159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 161 return false;
b3b9f3d0
JH
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
3aaf0529
JH
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
170
171 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 172 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
3dafb85c 178 if (!symtab->function_flags_ready)
b3b9f3d0 179 return true;
c44c2088 180
d52f5295 181 snode = symtab_node::get (decl);
3aaf0529
JH
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 188 return !node || !node->inlined_to;
1389294c
JH
189}
190
a15ebbcd
ML
191/* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
edc19e03
WS
195tree
196create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
197{
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202}
203
0038d4e0 204/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
207
208tree
c44c2088 209canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 210{
37f808c4
RB
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
50619002
EB
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
315f5f1b
RG
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 218 {
315f5f1b
RG
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
17f39a39
JH
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
5a27a197
RG
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
5a27a197
RG
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
239 if (!base)
240 return NULL_TREE;
b3b9f3d0 241
8813a647 242 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 243 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 244 return NULL_TREE;
13f92e8d
JJ
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
8813a647 247 if (VAR_P (base))
a076632e
RB
248 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
249 but since the use can be in a debug stmt we can't. */
250 ;
7501ca28
RG
251 else if (TREE_CODE (base) == FUNCTION_DECL)
252 {
253 /* Make sure we create a cgraph node for functions we'll reference.
254 They can be non-existent if the reference comes from an entry
255 of an external vtable for example. */
d52f5295 256 cgraph_node::get_create (base);
7501ca28 257 }
0038d4e0 258 /* Fixup types in global initializers. */
73aef89e
RG
259 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
260 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
261
262 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
263 cval = fold_convert (TREE_TYPE (orig_cval), cval);
264 return cval;
17f39a39 265 }
37f808c4
RB
266 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
267 if (TREE_CODE (cval) == INTEGER_CST)
268 {
269 if (TREE_OVERFLOW_P (cval))
270 cval = drop_tree_overflow (cval);
271 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
272 cval = fold_convert (TREE_TYPE (orig_cval), cval);
273 return cval;
274 }
50619002 275 return orig_cval;
17f39a39 276}
cbdd87d4
RG
277
278/* If SYM is a constant variable with known value, return the value.
279 NULL_TREE is returned otherwise. */
280
281tree
282get_symbol_constant_value (tree sym)
283{
6a6dac52
JH
284 tree val = ctor_for_folding (sym);
285 if (val != error_mark_node)
cbdd87d4 286 {
cbdd87d4
RG
287 if (val)
288 {
9d60be38 289 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 290 if (val && is_gimple_min_invariant (val))
17f39a39 291 return val;
1389294c
JH
292 else
293 return NULL_TREE;
cbdd87d4
RG
294 }
295 /* Variables declared 'const' without an initializer
296 have zero as the initializer if they may not be
297 overridden at link or run time. */
298 if (!val
b8a8c472 299 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 300 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
301 }
302
303 return NULL_TREE;
304}
305
306
cbdd87d4 307
0bf8cd9d
RB
308/* Subroutine of fold_stmt. We perform constant folding of the
309 memory reference tree EXPR. */
cbdd87d4
RG
310
311static tree
0bf8cd9d 312maybe_fold_reference (tree expr)
cbdd87d4 313{
2301a394 314 tree result = NULL_TREE;
cbdd87d4 315
f0eddb90
RG
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
2301a394 320 result = fold_unary_loc (EXPR_LOCATION (expr),
f0eddb90
RG
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
2301a394
RB
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 result = fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332 else
333 result = fold_const_aggregate_ref (expr);
f0eddb90 334
2301a394 335 if (result && is_gimple_min_invariant (result))
f0eddb90 336 return result;
cbdd87d4 337
cbdd87d4
RG
338 return NULL_TREE;
339}
340
52a5515e
RB
341/* Return true if EXPR is an acceptable right-hand-side for a
342 GIMPLE assignment. We validate the entire tree, not just
343 the root node, thus catching expressions that embed complex
344 operands that are not permitted in GIMPLE. This function
345 is needed because the folding routines in fold-const.c
346 may return such expressions in some cases, e.g., an array
347 access with an embedded index addition. It may make more
348 sense to have folding routines that are sensitive to the
349 constraints on GIMPLE operands, rather than abandoning any
350 any attempt to fold if the usual folding turns out to be too
351 aggressive. */
352
353bool
354valid_gimple_rhs_p (tree expr)
355{
356 enum tree_code code = TREE_CODE (expr);
357
358 switch (TREE_CODE_CLASS (code))
359 {
360 case tcc_declaration:
361 if (!is_gimple_variable (expr))
362 return false;
363 break;
364
365 case tcc_constant:
366 /* All constants are ok. */
367 break;
368
369 case tcc_comparison:
370 /* GENERIC allows comparisons with non-boolean types, reject
371 those for GIMPLE. Let vector-typed comparisons pass - rules
372 for GENERIC and GIMPLE are the same here. */
373 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
374 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
375 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
376 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
377 return false;
378
379 /* Fallthru. */
380 case tcc_binary:
381 if (!is_gimple_val (TREE_OPERAND (expr, 0))
382 || !is_gimple_val (TREE_OPERAND (expr, 1)))
383 return false;
384 break;
385
386 case tcc_unary:
387 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
388 return false;
389 break;
390
391 case tcc_expression:
392 switch (code)
393 {
394 case ADDR_EXPR:
395 {
396 tree t;
397 if (is_gimple_min_invariant (expr))
398 return true;
399 t = TREE_OPERAND (expr, 0);
400 while (handled_component_p (t))
401 {
402 /* ??? More checks needed, see the GIMPLE verifier. */
403 if ((TREE_CODE (t) == ARRAY_REF
404 || TREE_CODE (t) == ARRAY_RANGE_REF)
405 && !is_gimple_val (TREE_OPERAND (t, 1)))
406 return false;
407 t = TREE_OPERAND (t, 0);
408 }
409 if (!is_gimple_id (t))
410 return false;
411 }
412 break;
413
414 default:
415 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
416 {
417 if ((code == COND_EXPR
418 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
419 : !is_gimple_val (TREE_OPERAND (expr, 0)))
420 || !is_gimple_val (TREE_OPERAND (expr, 1))
421 || !is_gimple_val (TREE_OPERAND (expr, 2)))
422 return false;
423 break;
424 }
425 return false;
426 }
427 break;
428
429 case tcc_vl_exp:
430 return false;
431
432 case tcc_exceptional:
433 if (code == CONSTRUCTOR)
434 {
435 unsigned i;
436 tree elt;
437 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
438 if (!is_gimple_val (elt))
439 return false;
440 return true;
441 }
442 if (code != SSA_NAME)
443 return false;
444 break;
445
446 case tcc_reference:
447 if (code == BIT_FIELD_REF)
448 return is_gimple_val (TREE_OPERAND (expr, 0));
449 return false;
450
451 default:
452 return false;
453 }
454
455 return true;
456}
457
cbdd87d4
RG
458
459/* Attempt to fold an assignment statement pointed-to by SI. Returns a
460 replacement rhs for the statement or NULL_TREE if no simplification
461 could be made. It is assumed that the operands have been previously
462 folded. */
463
464static tree
465fold_gimple_assign (gimple_stmt_iterator *si)
466{
355fe088 467 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
468 enum tree_code subcode = gimple_assign_rhs_code (stmt);
469 location_t loc = gimple_location (stmt);
470
471 tree result = NULL_TREE;
472
473 switch (get_gimple_rhs_class (subcode))
474 {
475 case GIMPLE_SINGLE_RHS:
476 {
477 tree rhs = gimple_assign_rhs1 (stmt);
478
8c00ba08
JW
479 if (TREE_CLOBBER_P (rhs))
480 return NULL_TREE;
481
4e71066d 482 if (REFERENCE_CLASS_P (rhs))
0bf8cd9d 483 return maybe_fold_reference (rhs);
cbdd87d4 484
bdf37f7a
JH
485 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
486 {
487 tree val = OBJ_TYPE_REF_EXPR (rhs);
488 if (is_gimple_min_invariant (val))
489 return val;
f8a39967 490 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
491 {
492 bool final;
493 vec <cgraph_node *>targets
f8a39967 494 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 495 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 496 {
2b5f0895
XDL
497 if (dump_enabled_p ())
498 {
4f5b9c80 499 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
500 "resolving virtual function address "
501 "reference to function %s\n",
502 targets.length () == 1
503 ? targets[0]->name ()
3ef276e4 504 : "NULL");
2b5f0895 505 }
3ef276e4
RB
506 if (targets.length () == 1)
507 {
508 val = fold_convert (TREE_TYPE (val),
509 build_fold_addr_expr_loc
510 (loc, targets[0]->decl));
511 STRIP_USELESS_TYPE_CONVERSION (val);
512 }
513 else
67914693
SL
514 /* We cannot use __builtin_unreachable here because it
515 cannot have address taken. */
3ef276e4 516 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
517 return val;
518 }
519 }
bdf37f7a 520 }
7524f419 521
cbdd87d4
RG
522 else if (TREE_CODE (rhs) == ADDR_EXPR)
523 {
70f34814 524 tree ref = TREE_OPERAND (rhs, 0);
0bf8cd9d
RB
525 if (TREE_CODE (ref) == MEM_REF
526 && integer_zerop (TREE_OPERAND (ref, 1)))
7524f419 527 {
0bf8cd9d
RB
528 result = TREE_OPERAND (ref, 0);
529 if (!useless_type_conversion_p (TREE_TYPE (rhs),
530 TREE_TYPE (result)))
531 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
532 return result;
7524f419 533 }
cbdd87d4
RG
534 }
535
536 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 537 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
538 {
539 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
540 unsigned i;
541 tree val;
542
543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 544 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
545 return NULL_TREE;
546
547 return build_vector_from_ctor (TREE_TYPE (rhs),
548 CONSTRUCTOR_ELTS (rhs));
549 }
550
ca8e8301
RB
551 else if (DECL_P (rhs)
552 && is_gimple_reg_type (TREE_TYPE (rhs)))
9d60be38 553 return get_symbol_constant_value (rhs);
cbdd87d4
RG
554 }
555 break;
556
557 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
558 break;
559
560 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
561 break;
562
0354c0c7 563 case GIMPLE_TERNARY_RHS:
5c099d40
RB
564 result = fold_ternary_loc (loc, subcode,
565 TREE_TYPE (gimple_assign_lhs (stmt)),
566 gimple_assign_rhs1 (stmt),
567 gimple_assign_rhs2 (stmt),
568 gimple_assign_rhs3 (stmt));
0354c0c7
BS
569
570 if (result)
571 {
572 STRIP_USELESS_TYPE_CONVERSION (result);
573 if (valid_gimple_rhs_p (result))
574 return result;
0354c0c7
BS
575 }
576 break;
577
cbdd87d4
RG
578 case GIMPLE_INVALID_RHS:
579 gcc_unreachable ();
580 }
581
582 return NULL_TREE;
583}
584
fef5a0d9
RB
585
586/* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
590
591static void
592gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
593{
355fe088 594 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
595
596 if (gimple_has_location (stmt))
597 annotate_all_with_location (stmts, gimple_location (stmt));
598
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
355fe088 601 gimple *laststore = NULL;
fef5a0d9
RB
602 for (gimple_stmt_iterator i = gsi_last (stmts);
603 !gsi_end_p (i); gsi_prev (&i))
604 {
355fe088 605 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
606 if ((gimple_assign_single_p (new_stmt)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
608 || (is_gimple_call (new_stmt)
609 && (gimple_call_flags (new_stmt)
610 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
611 {
612 tree vdef;
613 if (!laststore)
614 vdef = gimple_vdef (stmt);
615 else
616 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
617 gimple_set_vdef (new_stmt, vdef);
618 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 SSA_NAME_DEF_STMT (vdef) = new_stmt;
620 laststore = new_stmt;
621 }
622 }
623
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse = gimple_vuse (stmt);
627 for (gimple_stmt_iterator i = gsi_start (stmts);
628 !gsi_end_p (i); gsi_next (&i))
629 {
355fe088 630 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt))
634 gimple_set_vuse (new_stmt, reaching_vuse);
635 gimple_set_modified (new_stmt, true);
636 if (gimple_vdef (new_stmt))
637 reaching_vuse = gimple_vdef (new_stmt);
638 }
639
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
642 if (reaching_vuse
643 && reaching_vuse == gimple_vuse (stmt))
644 {
645 tree vdef = gimple_vdef (stmt);
646 if (vdef
647 && TREE_CODE (vdef) == SSA_NAME)
648 {
649 unlink_stmt_vdef (stmt);
650 release_ssa_name (vdef);
651 }
652 }
653
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p, stmts, false);
656}
657
52a5515e
RB
658/* Helper function for update_gimple_call and
659 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
660 with GIMPLE_CALL NEW_STMT. */
661
662static void
663finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
664 gimple *stmt)
665{
666 tree lhs = gimple_call_lhs (stmt);
667 gimple_call_set_lhs (new_stmt, lhs);
668 if (lhs && TREE_CODE (lhs) == SSA_NAME)
669 SSA_NAME_DEF_STMT (lhs) = new_stmt;
670 gimple_move_vops (new_stmt, stmt);
671 gimple_set_location (new_stmt, gimple_location (stmt));
672 if (gimple_block (new_stmt) == NULL_TREE)
673 gimple_set_block (new_stmt, gimple_block (stmt));
674 gsi_replace (si_p, new_stmt, false);
675}
676
677/* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
678 with number of arguments NARGS, where the arguments in GIMPLE form
679 follow NARGS argument. */
680
681bool
682update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
683{
684 va_list ap;
685 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
686
687 gcc_assert (is_gimple_call (stmt));
688 va_start (ap, nargs);
689 new_stmt = gimple_build_call_valist (fn, nargs, ap);
690 finish_update_gimple_call (si_p, new_stmt, stmt);
691 va_end (ap);
692 return true;
693}
694
695/* Return true if EXPR is a CALL_EXPR suitable for representation
696 as a single GIMPLE_CALL statement. If the arguments require
697 further gimplification, return false. */
698
699static bool
700valid_gimple_call_p (tree expr)
701{
702 unsigned i, nargs;
703
704 if (TREE_CODE (expr) != CALL_EXPR)
705 return false;
706
707 nargs = call_expr_nargs (expr);
708 for (i = 0; i < nargs; i++)
709 {
710 tree arg = CALL_EXPR_ARG (expr, i);
711 if (is_gimple_reg_type (TREE_TYPE (arg)))
712 {
713 if (!is_gimple_val (arg))
714 return false;
715 }
716 else
717 if (!is_gimple_lvalue (arg))
718 return false;
719 }
720
721 return true;
722}
723
cbdd87d4
RG
724/* Convert EXPR into a GIMPLE value suitable for substitution on the
725 RHS of an assignment. Insert the necessary statements before
726 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
727 is replaced. If the call is expected to produces a result, then it
728 is replaced by an assignment of the new RHS to the result variable.
729 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
730 GIMPLE_NOP. A proper VDEF chain is retained by making the first
731 VUSE and the last VDEF of the whole sequence be the same as the replaced
732 statement and using new SSA names for stores in between. */
cbdd87d4
RG
733
734void
735gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
736{
737 tree lhs;
355fe088 738 gimple *stmt, *new_stmt;
cbdd87d4 739 gimple_stmt_iterator i;
355a7673 740 gimple_seq stmts = NULL;
cbdd87d4
RG
741
742 stmt = gsi_stmt (*si_p);
743
744 gcc_assert (is_gimple_call (stmt));
745
52a5515e
RB
746 if (valid_gimple_call_p (expr))
747 {
748 /* The call has simplified to another call. */
749 tree fn = CALL_EXPR_FN (expr);
750 unsigned i;
751 unsigned nargs = call_expr_nargs (expr);
752 vec<tree> args = vNULL;
753 gcall *new_stmt;
754
755 if (nargs > 0)
756 {
757 args.create (nargs);
758 args.safe_grow_cleared (nargs, true);
759
760 for (i = 0; i < nargs; i++)
761 args[i] = CALL_EXPR_ARG (expr, i);
762 }
763
764 new_stmt = gimple_build_call_vec (fn, args);
765 finish_update_gimple_call (si_p, new_stmt, stmt);
766 args.release ();
767 return;
768 }
cbdd87d4 769
e256dfce 770 lhs = gimple_call_lhs (stmt);
cbdd87d4 771 if (lhs == NULL_TREE)
6e572326 772 {
52a5515e 773 push_gimplify_context (gimple_in_ssa_p (cfun));
6e572326 774 gimplify_and_add (expr, &stmts);
52a5515e
RB
775 pop_gimplify_context (NULL);
776
6e572326
RG
777 /* We can end up with folding a memcpy of an empty class assignment
778 which gets optimized away by C++ gimplification. */
779 if (gimple_seq_empty_p (stmts))
780 {
781 if (gimple_in_ssa_p (cfun))
782 {
783 unlink_stmt_vdef (stmt);
784 release_defs (stmt);
785 }
f6b4dc28 786 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
787 return;
788 }
789 }
cbdd87d4 790 else
e256dfce 791 {
381cdae4 792 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
793 new_stmt = gimple_build_assign (lhs, tmp);
794 i = gsi_last (stmts);
795 gsi_insert_after_without_update (&i, new_stmt,
796 GSI_CONTINUE_LINKING);
797 }
cbdd87d4 798
fef5a0d9
RB
799 gsi_replace_with_seq_vops (si_p, stmts);
800}
cbdd87d4 801
fef5a0d9
RB
802
803/* Replace the call at *GSI with the gimple value VAL. */
804
e3174bdf 805void
fef5a0d9
RB
806replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
807{
355fe088 808 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 809 tree lhs = gimple_call_lhs (stmt);
355fe088 810 gimple *repl;
fef5a0d9 811 if (lhs)
e256dfce 812 {
fef5a0d9
RB
813 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
814 val = fold_convert (TREE_TYPE (lhs), val);
815 repl = gimple_build_assign (lhs, val);
816 }
817 else
818 repl = gimple_build_nop ();
819 tree vdef = gimple_vdef (stmt);
820 if (vdef && TREE_CODE (vdef) == SSA_NAME)
821 {
822 unlink_stmt_vdef (stmt);
823 release_ssa_name (vdef);
824 }
f6b4dc28 825 gsi_replace (gsi, repl, false);
fef5a0d9
RB
826}
827
828/* Replace the call at *GSI with the new call REPL and fold that
829 again. */
830
831static void
355fe088 832replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 833{
355fe088 834 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
835 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
836 gimple_set_location (repl, gimple_location (stmt));
779724a5 837 gimple_move_vops (repl, stmt);
f6b4dc28 838 gsi_replace (gsi, repl, false);
fef5a0d9
RB
839 fold_stmt (gsi);
840}
841
842/* Return true if VAR is a VAR_DECL or a component thereof. */
843
844static bool
845var_decl_component_p (tree var)
846{
847 tree inner = var;
848 while (handled_component_p (inner))
849 inner = TREE_OPERAND (inner, 0);
47cac108
RB
850 return (DECL_P (inner)
851 || (TREE_CODE (inner) == MEM_REF
852 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
853}
854
c89af696
AH
855/* Return TRUE if the SIZE argument, representing the size of an
856 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
857
858static bool
859size_must_be_zero_p (tree size)
860{
861 if (integer_zerop (size))
862 return true;
863
3f27391f 864 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
865 return false;
866
6512c0f1
MS
867 tree type = TREE_TYPE (size);
868 int prec = TYPE_PRECISION (type);
869
6512c0f1
MS
870 /* Compute the value of SSIZE_MAX, the largest positive value that
871 can be stored in ssize_t, the signed counterpart of size_t. */
872 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 873 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
874 wide_int_to_tree (type, ssize_max));
875 value_range vr;
45f4e2b0
AH
876 if (cfun)
877 get_range_query (cfun)->range_of_expr (vr, size);
878 else
879 get_global_range_query ()->range_of_expr (vr, size);
880 if (vr.undefined_p ())
881 vr.set_varying (TREE_TYPE (size));
c89af696
AH
882 vr.intersect (&valid_range);
883 return vr.zero_p ();
6512c0f1
MS
884}
885
cc8bea0a
MS
886/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
887 diagnose (otherwise undefined) overlapping copies without preventing
888 folding. When folded, GCC guarantees that overlapping memcpy has
889 the same semantics as memmove. Call to the library memcpy need not
890 provide the same guarantee. Return false if no simplification can
891 be made. */
fef5a0d9
RB
892
893static bool
894gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 895 tree dest, tree src, enum built_in_function code)
fef5a0d9 896{
355fe088 897 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
898 tree lhs = gimple_call_lhs (stmt);
899 tree len = gimple_call_arg (stmt, 2);
fef5a0d9
RB
900 location_t loc = gimple_location (stmt);
901
6512c0f1
MS
902 /* If the LEN parameter is a constant zero or in range where
903 the only valid value is zero, return DEST. */
904 if (size_must_be_zero_p (len))
fef5a0d9 905 {
355fe088 906 gimple *repl;
fef5a0d9
RB
907 if (gimple_call_lhs (stmt))
908 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
909 else
910 repl = gimple_build_nop ();
911 tree vdef = gimple_vdef (stmt);
912 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 913 {
fef5a0d9
RB
914 unlink_stmt_vdef (stmt);
915 release_ssa_name (vdef);
916 }
f6b4dc28 917 gsi_replace (gsi, repl, false);
fef5a0d9
RB
918 return true;
919 }
920
921 /* If SRC and DEST are the same (and not volatile), return
922 DEST{,+LEN,+LEN-1}. */
923 if (operand_equal_p (src, dest, 0))
924 {
cc8bea0a
MS
925 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
926 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 927 32667). */
fef5a0d9
RB
928 unlink_stmt_vdef (stmt);
929 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
930 release_ssa_name (gimple_vdef (stmt));
931 if (!lhs)
932 {
f6b4dc28 933 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
934 return true;
935 }
936 goto done;
937 }
938 else
939 {
b541b871
EB
940 /* We cannot (easily) change the type of the copy if it is a storage
941 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
942 modify the storage order of objects (see storage_order_barrier_p). */
943 tree srctype
944 = POINTER_TYPE_P (TREE_TYPE (src))
945 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
946 tree desttype
947 = POINTER_TYPE_P (TREE_TYPE (dest))
948 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
949 tree destvar, srcvar, srcoff;
fef5a0d9 950 unsigned int src_align, dest_align;
d01b568a 951 unsigned HOST_WIDE_INT tmp_len;
b541b871 952 const char *tmp_str;
fef5a0d9
RB
953
954 /* Build accesses at offset zero with a ref-all character type. */
b541b871
EB
955 tree off0
956 = build_int_cst (build_pointer_type_for_mode (char_type_node,
957 ptr_mode, true), 0);
fef5a0d9
RB
958
959 /* If we can perform the copy efficiently with first doing all loads
960 and then all stores inline it that way. Currently efficiently
961 means that we can load all the memory into a single integer
962 register which is what MOVE_MAX gives us. */
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (tree_fits_uhwi_p (len)
966 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
967 /* FIXME: Don't transform copies from strings with known length.
968 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
969 from being handled, and the case was XFAILed for that reason.
970 Now that it is handled and the XFAIL removed, as soon as other
971 strlenopt tests that rely on it for passing are adjusted, this
972 hack can be removed. */
973 && !c_strlen (src, 1)
866626ef 974 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
b541b871
EB
975 && memchr (tmp_str, 0, tmp_len) == NULL)
976 && !(srctype
977 && AGGREGATE_TYPE_P (srctype)
978 && TYPE_REVERSE_STORAGE_ORDER (srctype))
979 && !(desttype
980 && AGGREGATE_TYPE_P (desttype)
981 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
982 {
983 unsigned ilen = tree_to_uhwi (len);
146ec50f 984 if (pow2p_hwi (ilen))
fef5a0d9 985 {
213694e5
MS
986 /* Detect out-of-bounds accesses without issuing warnings.
987 Avoid folding out-of-bounds copies but to avoid false
988 positives for unreachable code defer warning until after
989 DCE has worked its magic.
990 -Wrestrict is still diagnosed. */
991 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
992 dest, src, len, len,
993 false, false))
994 if (warning != OPT_Wrestrict)
995 return false;
cc8bea0a 996
64ab8765 997 scalar_int_mode mode;
fef5a0d9
RB
998 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
999 if (type
64ab8765
RS
1000 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
1001 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
1002 /* If the destination pointer is not aligned we must be able
1003 to emit an unaligned store. */
64ab8765 1004 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1005 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 1006 || (optab_handler (movmisalign_optab, mode)
f869c12f 1007 != CODE_FOR_nothing)))
fef5a0d9
RB
1008 {
1009 tree srctype = type;
1010 tree desttype = type;
64ab8765 1011 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1012 srctype = build_aligned_type (type, src_align);
1013 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1014 tree tem = fold_const_aggregate_ref (srcmem);
1015 if (tem)
1016 srcmem = tem;
64ab8765 1017 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1018 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 1019 && (optab_handler (movmisalign_optab, mode)
f869c12f 1020 == CODE_FOR_nothing))
fef5a0d9
RB
1021 srcmem = NULL_TREE;
1022 if (srcmem)
1023 {
355fe088 1024 gimple *new_stmt;
fef5a0d9
RB
1025 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1026 {
1027 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
1028 srcmem
1029 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1030 new_stmt);
fef5a0d9
RB
1031 gimple_assign_set_lhs (new_stmt, srcmem);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1034 }
64ab8765 1035 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1036 desttype = build_aligned_type (type, dest_align);
1037 new_stmt
1038 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1039 dest, off0),
1040 srcmem);
779724a5 1041 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1042 if (!lhs)
1043 {
f6b4dc28 1044 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1045 return true;
1046 }
1047 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1048 goto done;
1049 }
1050 }
1051 }
1052 }
1053
0d67a510 1054 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
1055 {
1056 /* Both DEST and SRC must be pointer types.
1057 ??? This is what old code did. Is the testing for pointer types
1058 really mandatory?
1059
1060 If either SRC is readonly or length is 1, we can use memcpy. */
1061 if (!dest_align || !src_align)
1062 return false;
1063 if (readonly_data_expr (src)
1064 || (tree_fits_uhwi_p (len)
1065 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1066 >= tree_to_uhwi (len))))
1067 {
1068 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1069 if (!fn)
1070 return false;
1071 gimple_call_set_fndecl (stmt, fn);
1072 gimple_call_set_arg (stmt, 0, dest);
1073 gimple_call_set_arg (stmt, 1, src);
1074 fold_stmt (gsi);
1075 return true;
1076 }
1077
1078 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1079 if (TREE_CODE (src) == ADDR_EXPR
1080 && TREE_CODE (dest) == ADDR_EXPR)
1081 {
1082 tree src_base, dest_base, fn;
a90c8804
RS
1083 poly_int64 src_offset = 0, dest_offset = 0;
1084 poly_uint64 maxsize;
fef5a0d9
RB
1085
1086 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
1087 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1088 if (src_base == NULL)
1089 src_base = srcvar;
fef5a0d9 1090 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
1091 dest_base = get_addr_base_and_unit_offset (destvar,
1092 &dest_offset);
1093 if (dest_base == NULL)
1094 dest_base = destvar;
a90c8804 1095 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 1096 maxsize = -1;
fef5a0d9
RB
1097 if (SSA_VAR_P (src_base)
1098 && SSA_VAR_P (dest_base))
1099 {
1100 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
1101 && ranges_maybe_overlap_p (src_offset, maxsize,
1102 dest_offset, maxsize))
fef5a0d9
RB
1103 return false;
1104 }
1105 else if (TREE_CODE (src_base) == MEM_REF
1106 && TREE_CODE (dest_base) == MEM_REF)
1107 {
1108 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1109 TREE_OPERAND (dest_base, 0), 0))
1110 return false;
a90c8804
RS
1111 poly_offset_int full_src_offset
1112 = mem_ref_offset (src_base) + src_offset;
1113 poly_offset_int full_dest_offset
1114 = mem_ref_offset (dest_base) + dest_offset;
1115 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1116 full_dest_offset, maxsize))
fef5a0d9
RB
1117 return false;
1118 }
1119 else
1120 return false;
1121
1122 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1123 if (!fn)
1124 return false;
1125 gimple_call_set_fndecl (stmt, fn);
1126 gimple_call_set_arg (stmt, 0, dest);
1127 gimple_call_set_arg (stmt, 1, src);
1128 fold_stmt (gsi);
1129 return true;
1130 }
1131
1132 /* If the destination and source do not alias optimize into
1133 memcpy as well. */
1134 if ((is_gimple_min_invariant (dest)
1135 || TREE_CODE (dest) == SSA_NAME)
1136 && (is_gimple_min_invariant (src)
1137 || TREE_CODE (src) == SSA_NAME))
1138 {
1139 ao_ref destr, srcr;
1140 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1141 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1142 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1143 {
1144 tree fn;
1145 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1146 if (!fn)
1147 return false;
1148 gimple_call_set_fndecl (stmt, fn);
1149 gimple_call_set_arg (stmt, 0, dest);
1150 gimple_call_set_arg (stmt, 1, src);
1151 fold_stmt (gsi);
1152 return true;
1153 }
1154 }
1155
1156 return false;
1157 }
1158
1159 if (!tree_fits_shwi_p (len))
1160 return false;
b541b871
EB
1161 if (!srctype
1162 || (AGGREGATE_TYPE_P (srctype)
1163 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1164 return false;
1165 if (!desttype
1166 || (AGGREGATE_TYPE_P (desttype)
1167 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
1168 return false;
1169 /* In the following try to find a type that is most natural to be
1170 used for the memcpy source and destination and that allows
1171 the most optimization when memcpy is turned into a plain assignment
1172 using that type. In theory we could always use a char[len] type
1173 but that only gains us that the destination and source possibly
1174 no longer will have their address taken. */
fef5a0d9
RB
1175 if (TREE_CODE (srctype) == ARRAY_TYPE
1176 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 1177 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
1178 if (TREE_CODE (desttype) == ARRAY_TYPE
1179 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 1180 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
1181 if (TREE_ADDRESSABLE (srctype)
1182 || TREE_ADDRESSABLE (desttype))
1183 return false;
1184
1185 /* Make sure we are not copying using a floating-point mode or
1186 a type whose size possibly does not match its precision. */
1187 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1188 || TREE_CODE (desttype) == BOOLEAN_TYPE
1189 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1190 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1191 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1192 || TREE_CODE (srctype) == BOOLEAN_TYPE
1193 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1194 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1195 if (!srctype)
1196 srctype = desttype;
1197 if (!desttype)
1198 desttype = srctype;
1199 if (!srctype)
1200 return false;
1201
1202 src_align = get_pointer_alignment (src);
1203 dest_align = get_pointer_alignment (dest);
fef5a0d9 1204
5105b576
RB
1205 /* Choose between src and destination type for the access based
1206 on alignment, whether the access constitutes a register access
1207 and whether it may actually expose a declaration for SSA rewrite
e362a897
EB
1208 or SRA decomposition. Also try to expose a string constant, we
1209 might be able to concatenate several of them later into a single
1210 string store. */
42f74245 1211 destvar = NULL_TREE;
5105b576 1212 srcvar = NULL_TREE;
42f74245
RB
1213 if (TREE_CODE (dest) == ADDR_EXPR
1214 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
1215 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1216 && dest_align >= TYPE_ALIGN (desttype)
1217 && (is_gimple_reg_type (desttype)
1218 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1219 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1220 else if (TREE_CODE (src) == ADDR_EXPR
1221 && var_decl_component_p (TREE_OPERAND (src, 0))
1222 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1223 && src_align >= TYPE_ALIGN (srctype)
1224 && (is_gimple_reg_type (srctype)
1225 || dest_align >= TYPE_ALIGN (srctype)))
1226 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
e362a897
EB
1227 /* FIXME: Don't transform copies from strings with known original length.
1228 As soon as strlenopt tests that rely on it for passing are adjusted,
1229 this hack can be removed. */
1230 else if (gimple_call_alloca_for_var_p (stmt)
1231 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1232 && integer_zerop (srcoff)
1233 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1234 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1235 srctype = TREE_TYPE (srcvar);
1236 else
fef5a0d9
RB
1237 return false;
1238
5105b576
RB
1239 /* Now that we chose an access type express the other side in
1240 terms of it if the target allows that with respect to alignment
1241 constraints. */
fef5a0d9
RB
1242 if (srcvar == NULL_TREE)
1243 {
fef5a0d9
RB
1244 if (src_align >= TYPE_ALIGN (desttype))
1245 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1246 else
1247 {
1248 if (STRICT_ALIGNMENT)
1249 return false;
1250 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1251 src_align);
1252 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1253 }
1254 }
1255 else if (destvar == NULL_TREE)
1256 {
fef5a0d9
RB
1257 if (dest_align >= TYPE_ALIGN (srctype))
1258 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1259 else
1260 {
1261 if (STRICT_ALIGNMENT)
1262 return false;
1263 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1264 dest_align);
1265 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1266 }
1267 }
1268
213694e5
MS
1269 /* Same as above, detect out-of-bounds accesses without issuing
1270 warnings. Avoid folding out-of-bounds copies but to avoid
1271 false positives for unreachable code defer warning until
1272 after DCE has worked its magic.
1273 -Wrestrict is still diagnosed. */
1274 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1275 dest, src, len, len,
1276 false, false))
1277 if (warning != OPT_Wrestrict)
1278 return false;
cc8bea0a 1279
355fe088 1280 gimple *new_stmt;
fef5a0d9
RB
1281 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1282 {
921b13d0
RB
1283 tree tem = fold_const_aggregate_ref (srcvar);
1284 if (tem)
1285 srcvar = tem;
1286 if (! is_gimple_min_invariant (srcvar))
1287 {
1288 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1289 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1290 new_stmt);
921b13d0
RB
1291 gimple_assign_set_lhs (new_stmt, srcvar);
1292 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1293 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1294 }
d7257171
RB
1295 new_stmt = gimple_build_assign (destvar, srcvar);
1296 goto set_vop_and_replace;
fef5a0d9 1297 }
d7257171 1298
e362a897
EB
1299 /* We get an aggregate copy. If the source is a STRING_CST, then
1300 directly use its type to perform the copy. */
1301 if (TREE_CODE (srcvar) == STRING_CST)
1302 desttype = srctype;
1303
1304 /* Or else, use an unsigned char[] type to perform the copy in order
1305 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1306 types or float modes behavior on copying. */
1307 else
1308 {
1309 desttype = build_array_type_nelts (unsigned_char_type_node,
1310 tree_to_uhwi (len));
1311 srctype = desttype;
1312 if (src_align > TYPE_ALIGN (srctype))
1313 srctype = build_aligned_type (srctype, src_align);
1314 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1315 }
1316
d7257171
RB
1317 if (dest_align > TYPE_ALIGN (desttype))
1318 desttype = build_aligned_type (desttype, dest_align);
e362a897
EB
1319 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1320 new_stmt = gimple_build_assign (destvar, srcvar);
1321
d7257171 1322set_vop_and_replace:
779724a5 1323 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1324 if (!lhs)
1325 {
f6b4dc28 1326 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1327 return true;
1328 }
1329 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1330 }
1331
1332done:
74e3c262 1333 gimple_seq stmts = NULL;
0d67a510 1334 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1335 len = NULL_TREE;
0d67a510 1336 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1337 {
1338 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1339 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1340 TREE_TYPE (dest), dest, len);
1341 }
0d67a510
ML
1342 else
1343 gcc_unreachable ();
fef5a0d9 1344
74e3c262 1345 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1346 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1347 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1348 return true;
1349}
1350
b3d8d88e
MS
1351/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1352 to built-in memcmp (a, b, len). */
1353
1354static bool
1355gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1356{
1357 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1358
1359 if (!fn)
1360 return false;
1361
1362 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1363
1364 gimple *stmt = gsi_stmt (*gsi);
1365 tree a = gimple_call_arg (stmt, 0);
1366 tree b = gimple_call_arg (stmt, 1);
1367 tree len = gimple_call_arg (stmt, 2);
1368
1369 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1370 replace_call_with_call_and_fold (gsi, repl);
1371
1372 return true;
1373}
1374
1375/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1376 to built-in memmove (dest, src, len). */
1377
1378static bool
1379gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1380{
1381 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1382
1383 if (!fn)
1384 return false;
1385
1386 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1387 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1388 len) into memmove (dest, src, len). */
1389
1390 gimple *stmt = gsi_stmt (*gsi);
1391 tree src = gimple_call_arg (stmt, 0);
1392 tree dest = gimple_call_arg (stmt, 1);
1393 tree len = gimple_call_arg (stmt, 2);
1394
1395 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1396 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1397 replace_call_with_call_and_fold (gsi, repl);
1398
1399 return true;
1400}
1401
1402/* Transform a call to built-in bzero (dest, len) at *GSI into one
1403 to built-in memset (dest, 0, len). */
1404
1405static bool
1406gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1407{
1408 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1409
1410 if (!fn)
1411 return false;
1412
1413 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1414
1415 gimple *stmt = gsi_stmt (*gsi);
1416 tree dest = gimple_call_arg (stmt, 0);
1417 tree len = gimple_call_arg (stmt, 1);
1418
1419 gimple_seq seq = NULL;
1420 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1421 gimple_seq_add_stmt_without_update (&seq, repl);
1422 gsi_replace_with_seq_vops (gsi, seq);
1423 fold_stmt (gsi);
1424
1425 return true;
1426}
1427
fef5a0d9
RB
1428/* Fold function call to builtin memset or bzero at *GSI setting the
1429 memory of size LEN to VAL. Return whether a simplification was made. */
1430
1431static bool
1432gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1433{
355fe088 1434 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1435 tree etype;
1436 unsigned HOST_WIDE_INT length, cval;
1437
1438 /* If the LEN parameter is zero, return DEST. */
1439 if (integer_zerop (len))
1440 {
1441 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1442 return true;
1443 }
1444
1445 if (! tree_fits_uhwi_p (len))
1446 return false;
1447
1448 if (TREE_CODE (c) != INTEGER_CST)
1449 return false;
1450
1451 tree dest = gimple_call_arg (stmt, 0);
1452 tree var = dest;
1453 if (TREE_CODE (var) != ADDR_EXPR)
1454 return false;
1455
1456 var = TREE_OPERAND (var, 0);
1457 if (TREE_THIS_VOLATILE (var))
1458 return false;
1459
1460 etype = TREE_TYPE (var);
1461 if (TREE_CODE (etype) == ARRAY_TYPE)
1462 etype = TREE_TYPE (etype);
1463
1464 if (!INTEGRAL_TYPE_P (etype)
1465 && !POINTER_TYPE_P (etype))
1466 return NULL_TREE;
1467
1468 if (! var_decl_component_p (var))
1469 return NULL_TREE;
1470
1471 length = tree_to_uhwi (len);
7a504f33 1472 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1473 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1474 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1475 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1476 return NULL_TREE;
1477
1478 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1479 return NULL_TREE;
1480
1ba9acb1
RB
1481 if (!type_has_mode_precision_p (etype))
1482 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1483 TYPE_UNSIGNED (etype));
1484
fef5a0d9
RB
1485 if (integer_zerop (c))
1486 cval = 0;
1487 else
1488 {
1489 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1490 return NULL_TREE;
1491
1492 cval = TREE_INT_CST_LOW (c);
1493 cval &= 0xff;
1494 cval |= cval << 8;
1495 cval |= cval << 16;
1496 cval |= (cval << 31) << 1;
1497 }
1498
1499 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1500 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1501 gimple_move_vops (store, stmt);
fef5a0d9
RB
1502 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1503 if (gimple_call_lhs (stmt))
1504 {
355fe088 1505 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1506 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1507 }
1508 else
1509 {
1510 gimple_stmt_iterator gsi2 = *gsi;
1511 gsi_prev (gsi);
1512 gsi_remove (&gsi2, true);
1513 }
1514
1515 return true;
1516}
1517
fb471a13 1518/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1519
1520static bool
03c4a945
MS
1521get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1522 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1523{
fb471a13 1524 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1525
fb471a13
MS
1526 /* The length computed by this invocation of the function. */
1527 tree val = NULL_TREE;
1528
eef2da67
MS
1529 /* True if VAL is an optimistic (tight) bound determined from
1530 the size of the character array in which the string may be
1531 stored. In that case, the computed VAL is used to set
1532 PDATA->MAXBOUND. */
1533 bool tight_bound = false;
1534
fb471a13
MS
1535 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1536 if (TREE_CODE (arg) == ADDR_EXPR
1537 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1538 {
fb471a13
MS
1539 tree op = TREE_OPERAND (arg, 0);
1540 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1541 {
fb471a13
MS
1542 tree aop0 = TREE_OPERAND (op, 0);
1543 if (TREE_CODE (aop0) == INDIRECT_REF
1544 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1545 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1546 pdata, eltsize);
fef5a0d9 1547 }
598f7235 1548 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1549 && rkind == SRK_LENRANGE)
fef5a0d9 1550 {
fb471a13
MS
1551 /* Fail if an array is the last member of a struct object
1552 since it could be treated as a (fake) flexible array
1553 member. */
1554 tree idx = TREE_OPERAND (op, 1);
1555
1556 arg = TREE_OPERAND (op, 0);
1557 tree optype = TREE_TYPE (arg);
1558 if (tree dom = TYPE_DOMAIN (optype))
1559 if (tree bound = TYPE_MAX_VALUE (dom))
1560 if (TREE_CODE (bound) == INTEGER_CST
1561 && TREE_CODE (idx) == INTEGER_CST
1562 && tree_int_cst_lt (bound, idx))
1563 return false;
fef5a0d9 1564 }
fb471a13 1565 }
7d583f42 1566
598f7235 1567 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1568 {
1569 /* We are computing the maximum value (not string length). */
1570 val = arg;
1571 if (TREE_CODE (val) != INTEGER_CST
1572 || tree_int_cst_sgn (val) < 0)
1573 return false;
1574 }
1575 else
1576 {
1577 c_strlen_data lendata = { };
1578 val = c_strlen (arg, 1, &lendata, eltsize);
1579
fb471a13
MS
1580 if (!val && lendata.decl)
1581 {
03c4a945
MS
1582 /* ARG refers to an unterminated const character array.
1583 DATA.DECL with size DATA.LEN. */
1584 val = lendata.minlen;
730832cd 1585 pdata->decl = lendata.decl;
7d583f42 1586 }
fb471a13
MS
1587 }
1588
a7160771
MS
1589 /* Set if VAL represents the maximum length based on array size (set
1590 when exact length cannot be determined). */
1591 bool maxbound = false;
1592
84de9426 1593 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1594 {
1595 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1596 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1597 pdata, eltsize);
88d0c3f0 1598
fb471a13 1599 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1600 {
fb471a13 1601 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1602
fb471a13
MS
1603 /* Determine the "innermost" array type. */
1604 while (TREE_CODE (optype) == ARRAY_TYPE
1605 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1606 optype = TREE_TYPE (optype);
c42d0aa0 1607
fb471a13
MS
1608 /* Avoid arrays of pointers. */
1609 tree eltype = TREE_TYPE (optype);
1610 if (TREE_CODE (optype) != ARRAY_TYPE
1611 || !INTEGRAL_TYPE_P (eltype))
1612 return false;
c42d0aa0 1613
fb471a13
MS
1614 /* Fail when the array bound is unknown or zero. */
1615 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1616 if (!val
1617 || TREE_CODE (val) != INTEGER_CST
1618 || integer_zerop (val))
fb471a13 1619 return false;
1bfd6a00 1620
fb471a13
MS
1621 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1622 integer_one_node);
c42d0aa0 1623
fb471a13
MS
1624 /* Set the minimum size to zero since the string in
1625 the array could have zero length. */
730832cd 1626 pdata->minlen = ssize_int (0);
204a7ecb 1627
eef2da67 1628 tight_bound = true;
fb471a13
MS
1629 }
1630 else if (TREE_CODE (arg) == COMPONENT_REF
1631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1632 == ARRAY_TYPE))
1633 {
1634 /* Use the type of the member array to determine the upper
1635 bound on the length of the array. This may be overly
1636 optimistic if the array itself isn't NUL-terminated and
1637 the caller relies on the subsequent member to contain
1638 the NUL but that would only be considered valid if
03c4a945 1639 the array were the last member of a struct. */
fb471a13
MS
1640
1641 tree fld = TREE_OPERAND (arg, 1);
1642
1643 tree optype = TREE_TYPE (fld);
1644
1645 /* Determine the "innermost" array type. */
1646 while (TREE_CODE (optype) == ARRAY_TYPE
1647 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1648 optype = TREE_TYPE (optype);
1649
1650 /* Fail when the array bound is unknown or zero. */
1651 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1652 if (!val
1653 || TREE_CODE (val) != INTEGER_CST
1654 || integer_zerop (val))
fb471a13
MS
1655 return false;
1656 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1657 integer_one_node);
1658
1659 /* Set the minimum size to zero since the string in
1660 the array could have zero length. */
730832cd 1661 pdata->minlen = ssize_int (0);
fb471a13 1662
eef2da67
MS
1663 /* The array size determined above is an optimistic bound
1664 on the length. If the array isn't nul-terminated the
1665 length computed by the library function would be greater.
1666 Even though using strlen to cross the subobject boundary
1667 is undefined, avoid drawing conclusions from the member
1668 type about the length here. */
1669 tight_bound = true;
1670 }
e7868dc6
MS
1671 else if (TREE_CODE (arg) == MEM_REF
1672 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1674 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1675 {
1676 /* Handle a MEM_REF into a DECL accessing an array of integers,
1677 being conservative about references to extern structures with
1678 flexible array members that can be initialized to arbitrary
1679 numbers of elements as an extension (static structs are okay).
1680 FIXME: Make this less conservative -- see
1681 component_ref_size in tree.c. */
1682 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1683 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1684 && (decl_binds_to_current_def_p (ref)
1685 || !array_at_struct_end_p (arg)))
1686 {
1687 /* Fail if the offset is out of bounds. Such accesses
1688 should be diagnosed at some point. */
1689 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1690 if (!val
1691 || TREE_CODE (val) != INTEGER_CST
1692 || integer_zerop (val))
e7868dc6
MS
1693 return false;
1694
1695 poly_offset_int psiz = wi::to_offset (val);
1696 poly_offset_int poff = mem_ref_offset (arg);
1697 if (known_le (psiz, poff))
1698 return false;
1699
1700 pdata->minlen = ssize_int (0);
1701
1702 /* Subtract the offset and one for the terminating nul. */
1703 psiz -= poff;
1704 psiz -= 1;
1705 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1706 /* Since VAL reflects the size of a declared object
1707 rather the type of the access it is not a tight bound. */
1708 }
1709 }
1710 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1711 {
eef2da67
MS
1712 /* Avoid handling pointers to arrays. GCC might misuse
1713 a pointer to an array of one bound to point to an array
1714 object of a greater bound. */
1715 tree argtype = TREE_TYPE (arg);
1716 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1717 {
eef2da67 1718 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1719 if (!val
1720 || TREE_CODE (val) != INTEGER_CST
1721 || integer_zerop (val))
88d0c3f0 1722 return false;
fb471a13
MS
1723 val = wide_int_to_tree (TREE_TYPE (val),
1724 wi::sub (wi::to_wide (val), 1));
1725
e495e31a
MS
1726 /* Set the minimum size to zero since the string in
1727 the array could have zero length. */
730832cd 1728 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1729 }
1730 }
a7160771 1731 maxbound = true;
fb471a13 1732 }
88d0c3f0 1733
fb471a13
MS
1734 if (!val)
1735 return false;
fef5a0d9 1736
fb471a13 1737 /* Adjust the lower bound on the string length as necessary. */
730832cd 1738 if (!pdata->minlen
598f7235 1739 || (rkind != SRK_STRLEN
730832cd 1740 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1741 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1742 && tree_int_cst_lt (val, pdata->minlen)))
1743 pdata->minlen = val;
88d0c3f0 1744
a7160771 1745 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1746 {
1747 /* Adjust the tighter (more optimistic) string length bound
1748 if necessary and proceed to adjust the more conservative
1749 bound. */
1750 if (TREE_CODE (val) == INTEGER_CST)
1751 {
a7160771
MS
1752 if (tree_int_cst_lt (pdata->maxbound, val))
1753 pdata->maxbound = val;
730832cd
MS
1754 }
1755 else
1756 pdata->maxbound = val;
1757 }
a7160771
MS
1758 else if (pdata->maxbound || maxbound)
1759 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1760 if VAL corresponds to the maximum length determined based
1761 on the type of the object. */
730832cd
MS
1762 pdata->maxbound = val;
1763
eef2da67
MS
1764 if (tight_bound)
1765 {
1766 /* VAL computed above represents an optimistically tight bound
1767 on the length of the string based on the referenced object's
1768 or subobject's type. Determine the conservative upper bound
1769 based on the enclosing object's size if possible. */
84de9426 1770 if (rkind == SRK_LENRANGE)
eef2da67
MS
1771 {
1772 poly_int64 offset;
1773 tree base = get_addr_base_and_unit_offset (arg, &offset);
1774 if (!base)
1775 {
1776 /* When the call above fails due to a non-constant offset
1777 assume the offset is zero and use the size of the whole
1778 enclosing object instead. */
1779 base = get_base_address (arg);
1780 offset = 0;
1781 }
1782 /* If the base object is a pointer no upper bound on the length
1783 can be determined. Otherwise the maximum length is equal to
1784 the size of the enclosing object minus the offset of
1785 the referenced subobject minus 1 (for the terminating nul). */
1786 tree type = TREE_TYPE (base);
1787 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1788 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1789 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1790 val = build_all_ones_cst (size_type_node);
1791 else
1792 {
1793 val = DECL_SIZE_UNIT (base);
1794 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1795 size_int (offset + 1));
1796 }
1797 }
1798 else
1799 return false;
1800 }
1801
730832cd 1802 if (pdata->maxlen)
fb471a13
MS
1803 {
1804 /* Adjust the more conservative bound if possible/necessary
1805 and fail otherwise. */
598f7235 1806 if (rkind != SRK_STRLEN)
fef5a0d9 1807 {
730832cd 1808 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1809 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1810 return false;
fef5a0d9 1811
730832cd
MS
1812 if (tree_int_cst_lt (pdata->maxlen, val))
1813 pdata->maxlen = val;
fb471a13
MS
1814 return true;
1815 }
730832cd 1816 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1817 {
1818 /* Fail if the length of this ARG is different from that
1819 previously determined from another ARG. */
1820 return false;
1821 }
fef5a0d9
RB
1822 }
1823
730832cd 1824 pdata->maxlen = val;
84de9426 1825 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1826}
1827
5d6655eb
MS
1828/* For an ARG referencing one or more strings, try to obtain the range
1829 of their lengths, or the size of the largest array ARG referes to if
1830 the range of lengths cannot be determined, and store all in *PDATA.
1831 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1832 the maximum constant value.
1833 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1834 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1835 length or if we are unable to determine the length, return false.
fb471a13 1836 VISITED is a bitmap of visited variables.
598f7235
MS
1837 RKIND determines the kind of value or range to obtain (see
1838 strlen_range_kind).
1839 Set PDATA->DECL if ARG refers to an unterminated constant array.
1840 On input, set ELTSIZE to 1 for normal single byte character strings,
1841 and either 2 or 4 for wide characer strings (the size of wchar_t).
1842 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1843
1844static bool
03c4a945
MS
1845get_range_strlen (tree arg, bitmap *visited,
1846 strlen_range_kind rkind,
1847 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1848{
1849
1850 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1851 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1852
fef5a0d9
RB
1853 /* If ARG is registered for SSA update we cannot look at its defining
1854 statement. */
1855 if (name_registered_for_update_p (arg))
1856 return false;
1857
1858 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1859 if (!*visited)
1860 *visited = BITMAP_ALLOC (NULL);
1861 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1862 return true;
1863
fb471a13
MS
1864 tree var = arg;
1865 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1866
fef5a0d9
RB
1867 switch (gimple_code (def_stmt))
1868 {
1869 case GIMPLE_ASSIGN:
598f7235
MS
1870 /* The RHS of the statement defining VAR must either have a
1871 constant length or come from another SSA_NAME with a constant
1872 length. */
fef5a0d9
RB
1873 if (gimple_assign_single_p (def_stmt)
1874 || gimple_assign_unary_nop_p (def_stmt))
1875 {
598f7235 1876 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1877 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1878 }
1879 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1880 {
c8602fe6
JJ
1881 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1882 gimple_assign_rhs3 (def_stmt) };
1883
1884 for (unsigned int i = 0; i < 2; i++)
03c4a945 1885 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1886 {
84de9426 1887 if (rkind != SRK_LENRANGE)
c8602fe6 1888 return false;
80c2bad6
MS
1889 /* Set the upper bound to the maximum to prevent
1890 it from being adjusted in the next iteration but
1891 leave MINLEN and the more conservative MAXBOUND
1892 determined so far alone (or leave them null if
1893 they haven't been set yet). That the MINLEN is
1894 in fact zero can be determined from MAXLEN being
1895 unbounded but the discovered minimum is used for
1896 diagnostics. */
730832cd 1897 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1898 }
1899 return true;
cc8bea0a 1900 }
fef5a0d9
RB
1901 return false;
1902
1903 case GIMPLE_PHI:
598f7235
MS
1904 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1905 must have a constant length. */
c8602fe6 1906 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1907 {
1908 tree arg = gimple_phi_arg (def_stmt, i)->def;
1909
1910 /* If this PHI has itself as an argument, we cannot
1911 determine the string length of this argument. However,
1912 if we can find a constant string length for the other
1913 PHI args then we can still be sure that this is a
1914 constant string length. So be optimistic and just
1915 continue with the next argument. */
1916 if (arg == gimple_phi_result (def_stmt))
1917 continue;
1918
03c4a945 1919 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1920 {
84de9426 1921 if (rkind != SRK_LENRANGE)
88d0c3f0 1922 return false;
80c2bad6
MS
1923 /* Set the upper bound to the maximum to prevent
1924 it from being adjusted in the next iteration but
1925 leave MINLEN and the more conservative MAXBOUND
1926 determined so far alone (or leave them null if
1927 they haven't been set yet). That the MINLEN is
1928 in fact zero can be determined from MAXLEN being
1929 unbounded but the discovered minimum is used for
1930 diagnostics. */
730832cd 1931 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1932 }
fef5a0d9 1933 }
fef5a0d9
RB
1934 return true;
1935
1936 default:
1937 return false;
1938 }
1939}
5d6655eb 1940
97623b52
MS
1941/* Try to obtain the range of the lengths of the string(s) referenced
1942 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1943 of lengths cannot be determined, and store all in *PDATA which must
1944 be zero-initialized on input except PDATA->MAXBOUND may be set to
1945 a non-null tree node other than INTEGER_CST to request to have it
1946 set to the length of the longest string in a PHI. ELTSIZE is
1947 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1948 some power of 2 for wide characters.
1949 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1950 for optimization. Returning false means that a nonzero PDATA->MINLEN
1951 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1952 is -1 (in that case, the actual range is indeterminate, i.e.,
1953 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1954
3f343040 1955bool
84de9426 1956get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1957{
1958 bitmap visited = NULL;
a7160771 1959 tree maxbound = pdata->maxbound;
88d0c3f0 1960
84de9426 1961 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1962 {
5d6655eb
MS
1963 /* On failure extend the length range to an impossible maximum
1964 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1965 members can stay unchanged regardless. */
1966 pdata->minlen = ssize_int (0);
1967 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1968 }
5d6655eb
MS
1969 else if (!pdata->minlen)
1970 pdata->minlen = ssize_int (0);
1971
a7160771
MS
1972 /* If it's unchanged from it initial non-null value, set the conservative
1973 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1974 if (maxbound && pdata->maxbound == maxbound)
1975 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1976
1977 if (visited)
1978 BITMAP_FREE (visited);
3f343040 1979
03c4a945 1980 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1981}
1982
5d6655eb
MS
1983/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1984 For ARG of pointer types, NONSTR indicates if the caller is prepared
1985 to handle unterminated strings. For integer ARG and when RKIND ==
1986 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1987
5d6655eb
MS
1988 If an unterminated array is discovered and our caller handles
1989 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1990 return the maximum size. Otherwise return NULL. */
1991
598f7235
MS
1992static tree
1993get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1994{
598f7235
MS
1995 /* A non-null NONSTR is meaningless when determining the maximum
1996 value of an integer ARG. */
1997 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1998 /* ARG must have an integral type when RKIND says so. */
1999 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2000
dcb7fae2 2001 bitmap visited = NULL;
3f343040 2002
5d6655eb
MS
2003 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2004 is unbounded. */
730832cd 2005 c_strlen_data lendata = { };
03c4a945 2006 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 2007 lendata.maxlen = NULL_TREE;
5d6655eb
MS
2008 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2009 lendata.maxlen = NULL_TREE;
2010
dcb7fae2
RB
2011 if (visited)
2012 BITMAP_FREE (visited);
2013
e08341bb
MS
2014 if (nonstr)
2015 {
2016 /* For callers prepared to handle unterminated arrays set
2017 *NONSTR to point to the declaration of the array and return
2018 the maximum length/size. */
730832cd
MS
2019 *nonstr = lendata.decl;
2020 return lendata.maxlen;
e08341bb
MS
2021 }
2022
2023 /* Fail if the constant array isn't nul-terminated. */
730832cd 2024 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
2025}
2026
fef5a0d9
RB
2027
2028/* Fold function call to builtin strcpy with arguments DEST and SRC.
2029 If LEN is not NULL, it represents the length of the string to be
2030 copied. Return NULL_TREE if no simplification can be made. */
2031
2032static bool
2033gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 2034 tree dest, tree src)
fef5a0d9 2035{
cc8bea0a
MS
2036 gimple *stmt = gsi_stmt (*gsi);
2037 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2038 tree fn;
2039
2040 /* If SRC and DEST are the same (and not volatile), return DEST. */
2041 if (operand_equal_p (src, dest, 0))
2042 {
8cd95cec
MS
2043 /* Issue -Wrestrict unless the pointers are null (those do
2044 not point to objects and so do not indicate an overlap;
2045 such calls could be the result of sanitization and jump
2046 threading). */
e9e2bad7 2047 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
2048 {
2049 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2050
e9b9fa4c
MS
2051 warning_at (loc, OPT_Wrestrict,
2052 "%qD source argument is the same as destination",
2053 func);
2054 }
cc8bea0a 2055
fef5a0d9
RB
2056 replace_call_with_value (gsi, dest);
2057 return true;
2058 }
2059
2060 if (optimize_function_for_size_p (cfun))
2061 return false;
2062
2063 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2064 if (!fn)
2065 return false;
2066
e08341bb
MS
2067 /* Set to non-null if ARG refers to an unterminated array. */
2068 tree nonstr = NULL;
598f7235 2069 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
2070
2071 if (nonstr)
2072 {
2073 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 2074 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
d14c547a 2075 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
e9e2bad7 2076 suppress_warning (stmt, OPT_Wstringop_overread);
e08341bb
MS
2077 return false;
2078 }
2079
fef5a0d9 2080 if (!len)
dcb7fae2 2081 return false;
fef5a0d9
RB
2082
2083 len = fold_convert_loc (loc, size_type_node, len);
2084 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2085 len = force_gimple_operand_gsi (gsi, len, true,
2086 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2087 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2088 replace_call_with_call_and_fold (gsi, repl);
2089 return true;
2090}
2091
2092/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2093 If SLEN is not NULL, it represents the length of the source string.
2094 Return NULL_TREE if no simplification can be made. */
2095
2096static bool
dcb7fae2
RB
2097gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2098 tree dest, tree src, tree len)
fef5a0d9 2099{
025d57f0
MS
2100 gimple *stmt = gsi_stmt (*gsi);
2101 location_t loc = gimple_location (stmt);
6a33d0ff 2102 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
2103
2104 /* If the LEN parameter is zero, return DEST. */
2105 if (integer_zerop (len))
2106 {
53b28abf 2107 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
2108 decorate with attribute nonstring. */
2109 if (!nonstring)
2110 {
2111 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
2112
2113 /* Warn about the lack of nul termination: the result is not
2114 a (nul-terminated) string. */
598f7235 2115 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
2116 if (slen && !integer_zerop (slen))
2117 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d 2118 "%qD destination unchanged after copying no bytes "
6a33d0ff 2119 "from a string of length %E",
6d3bab5d 2120 fndecl, slen);
6a33d0ff
MS
2121 else
2122 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d
MS
2123 "%qD destination unchanged after copying no bytes",
2124 fndecl);
6a33d0ff 2125 }
025d57f0 2126
fef5a0d9
RB
2127 replace_call_with_value (gsi, dest);
2128 return true;
2129 }
2130
2131 /* We can't compare slen with len as constants below if len is not a
2132 constant. */
dcb7fae2 2133 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
2134 return false;
2135
fef5a0d9 2136 /* Now, we must be passed a constant src ptr parameter. */
598f7235 2137 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 2138 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
2139 return false;
2140
025d57f0
MS
2141 /* The size of the source string including the terminating nul. */
2142 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
2143
2144 /* We do not support simplification of this case, though we do
2145 support it when expanding trees into RTL. */
2146 /* FIXME: generate a call to __builtin_memset. */
025d57f0 2147 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
2148 return false;
2149
5d0d5d68
MS
2150 /* Diagnose truncation that leaves the copy unterminated. */
2151 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 2152
fef5a0d9 2153 /* OK transform into builtin memcpy. */
025d57f0 2154 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
2155 if (!fn)
2156 return false;
2157
2158 len = fold_convert_loc (loc, size_type_node, len);
2159 len = force_gimple_operand_gsi (gsi, len, true,
2160 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2161 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 2162 replace_call_with_call_and_fold (gsi, repl);
025d57f0 2163
fef5a0d9
RB
2164 return true;
2165}
2166
71dea1dd
WD
2167/* Fold function call to builtin strchr or strrchr.
2168 If both arguments are constant, evaluate and fold the result,
2169 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
2170 In general strlen is significantly faster than strchr
2171 due to being a simpler operation. */
2172static bool
71dea1dd 2173gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
2174{
2175 gimple *stmt = gsi_stmt (*gsi);
2176 tree str = gimple_call_arg (stmt, 0);
2177 tree c = gimple_call_arg (stmt, 1);
2178 location_t loc = gimple_location (stmt);
71dea1dd
WD
2179 const char *p;
2180 char ch;
912d9ec3 2181
71dea1dd 2182 if (!gimple_call_lhs (stmt))
912d9ec3
WD
2183 return false;
2184
b5338fb3
MS
2185 /* Avoid folding if the first argument is not a nul-terminated array.
2186 Defer warning until later. */
2187 if (!check_nul_terminated_array (NULL_TREE, str))
2188 return false;
2189
71dea1dd
WD
2190 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2191 {
2192 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2193
2194 if (p1 == NULL)
2195 {
2196 replace_call_with_value (gsi, integer_zero_node);
2197 return true;
2198 }
2199
2200 tree len = build_int_cst (size_type_node, p1 - p);
2201 gimple_seq stmts = NULL;
2202 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2203 POINTER_PLUS_EXPR, str, len);
2204 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2205 gsi_replace_with_seq_vops (gsi, stmts);
2206 return true;
2207 }
2208
2209 if (!integer_zerop (c))
912d9ec3
WD
2210 return false;
2211
71dea1dd 2212 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 2213 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
2214 {
2215 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2216
c8952930 2217 if (strchr_fn)
71dea1dd
WD
2218 {
2219 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2220 replace_call_with_call_and_fold (gsi, repl);
2221 return true;
2222 }
2223
2224 return false;
2225 }
2226
912d9ec3
WD
2227 tree len;
2228 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2229
2230 if (!strlen_fn)
2231 return false;
2232
2233 /* Create newstr = strlen (str). */
2234 gimple_seq stmts = NULL;
2235 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2236 gimple_set_location (new_stmt, loc);
a15ebbcd 2237 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2238 gimple_call_set_lhs (new_stmt, len);
2239 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2240
2241 /* Create (str p+ strlen (str)). */
2242 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2243 POINTER_PLUS_EXPR, str, len);
2244 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2245 gsi_replace_with_seq_vops (gsi, stmts);
2246 /* gsi now points at the assignment to the lhs, get a
2247 stmt iterator to the strlen.
2248 ??? We can't use gsi_for_stmt as that doesn't work when the
2249 CFG isn't built yet. */
2250 gimple_stmt_iterator gsi2 = *gsi;
2251 gsi_prev (&gsi2);
2252 fold_stmt (&gsi2);
2253 return true;
2254}
2255
c8952930
JJ
2256/* Fold function call to builtin strstr.
2257 If both arguments are constant, evaluate and fold the result,
2258 additionally fold strstr (x, "") into x and strstr (x, "c")
2259 into strchr (x, 'c'). */
2260static bool
2261gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2262{
2263 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2264 if (!gimple_call_lhs (stmt))
2265 return false;
2266
c8952930
JJ
2267 tree haystack = gimple_call_arg (stmt, 0);
2268 tree needle = gimple_call_arg (stmt, 1);
c8952930 2269
b5338fb3
MS
2270 /* Avoid folding if either argument is not a nul-terminated array.
2271 Defer warning until later. */
2272 if (!check_nul_terminated_array (NULL_TREE, haystack)
2273 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2274 return false;
2275
b5338fb3 2276 const char *q = c_getstr (needle);
c8952930
JJ
2277 if (q == NULL)
2278 return false;
2279
b5338fb3 2280 if (const char *p = c_getstr (haystack))
c8952930
JJ
2281 {
2282 const char *r = strstr (p, q);
2283
2284 if (r == NULL)
2285 {
2286 replace_call_with_value (gsi, integer_zero_node);
2287 return true;
2288 }
2289
2290 tree len = build_int_cst (size_type_node, r - p);
2291 gimple_seq stmts = NULL;
2292 gimple *new_stmt
2293 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2294 haystack, len);
2295 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2296 gsi_replace_with_seq_vops (gsi, stmts);
2297 return true;
2298 }
2299
2300 /* For strstr (x, "") return x. */
2301 if (q[0] == '\0')
2302 {
2303 replace_call_with_value (gsi, haystack);
2304 return true;
2305 }
2306
2307 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2308 if (q[1] == '\0')
2309 {
2310 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2311 if (strchr_fn)
2312 {
2313 tree c = build_int_cst (integer_type_node, q[0]);
2314 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2315 replace_call_with_call_and_fold (gsi, repl);
2316 return true;
2317 }
2318 }
2319
2320 return false;
2321}
2322
fef5a0d9
RB
2323/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2324 to the call.
2325
2326 Return NULL_TREE if no simplification was possible, otherwise return the
2327 simplified form of the call as a tree.
2328
2329 The simplified form may be a constant or other expression which
2330 computes the same value, but in a more efficient manner (including
2331 calls to other builtin functions).
2332
2333 The call may contain arguments which need to be evaluated, but
2334 which are not useful to determine the result of the call. In
2335 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2336 COMPOUND_EXPR will be an argument which must be evaluated.
2337 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2338 COMPOUND_EXPR in the chain will contain the tree for the simplified
2339 form of the builtin function call. */
2340
2341static bool
dcb7fae2 2342gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2343{
355fe088 2344 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2345 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2346
2347 const char *p = c_getstr (src);
2348
2349 /* If the string length is zero, return the dst parameter. */
2350 if (p && *p == '\0')
2351 {
2352 replace_call_with_value (gsi, dst);
2353 return true;
2354 }
2355
2356 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2357 return false;
2358
2359 /* See if we can store by pieces into (dst + strlen(dst)). */
2360 tree newdst;
2361 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2362 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2363
2364 if (!strlen_fn || !memcpy_fn)
2365 return false;
2366
2367 /* If the length of the source string isn't computable don't
2368 split strcat into strlen and memcpy. */
598f7235 2369 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2370 if (! len)
fef5a0d9
RB
2371 return false;
2372
2373 /* Create strlen (dst). */
2374 gimple_seq stmts = NULL, stmts2;
355fe088 2375 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2376 gimple_set_location (repl, loc);
a15ebbcd 2377 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2378 gimple_call_set_lhs (repl, newdst);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2380
2381 /* Create (dst p+ strlen (dst)). */
2382 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2383 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2384 gimple_seq_add_seq_without_update (&stmts, stmts2);
2385
2386 len = fold_convert_loc (loc, size_type_node, len);
2387 len = size_binop_loc (loc, PLUS_EXPR, len,
2388 build_int_cst (size_type_node, 1));
2389 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2390 gimple_seq_add_seq_without_update (&stmts, stmts2);
2391
2392 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2393 gimple_seq_add_stmt_without_update (&stmts, repl);
2394 if (gimple_call_lhs (stmt))
2395 {
2396 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2397 gimple_seq_add_stmt_without_update (&stmts, repl);
2398 gsi_replace_with_seq_vops (gsi, stmts);
2399 /* gsi now points at the assignment to the lhs, get a
2400 stmt iterator to the memcpy call.
2401 ??? We can't use gsi_for_stmt as that doesn't work when the
2402 CFG isn't built yet. */
2403 gimple_stmt_iterator gsi2 = *gsi;
2404 gsi_prev (&gsi2);
2405 fold_stmt (&gsi2);
2406 }
2407 else
2408 {
2409 gsi_replace_with_seq_vops (gsi, stmts);
2410 fold_stmt (gsi);
2411 }
2412 return true;
2413}
2414
07f1cf56
RB
2415/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2416 are the arguments to the call. */
2417
2418static bool
2419gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2420{
355fe088 2421 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2422 tree dest = gimple_call_arg (stmt, 0);
2423 tree src = gimple_call_arg (stmt, 1);
2424 tree size = gimple_call_arg (stmt, 2);
2425 tree fn;
2426 const char *p;
2427
2428
2429 p = c_getstr (src);
2430 /* If the SRC parameter is "", return DEST. */
2431 if (p && *p == '\0')
2432 {
2433 replace_call_with_value (gsi, dest);
2434 return true;
2435 }
2436
2437 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2438 return false;
2439
2440 /* If __builtin_strcat_chk is used, assume strcat is available. */
2441 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2442 if (!fn)
2443 return false;
2444
355fe088 2445 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2446 replace_call_with_call_and_fold (gsi, repl);
2447 return true;
2448}
2449
ad03a744
RB
2450/* Simplify a call to the strncat builtin. */
2451
2452static bool
2453gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2454{
8a45b051 2455 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2456 tree dst = gimple_call_arg (stmt, 0);
2457 tree src = gimple_call_arg (stmt, 1);
2458 tree len = gimple_call_arg (stmt, 2);
2459
2460 const char *p = c_getstr (src);
2461
2462 /* If the requested length is zero, or the src parameter string
2463 length is zero, return the dst parameter. */
2464 if (integer_zerop (len) || (p && *p == '\0'))
2465 {
2466 replace_call_with_value (gsi, dst);
2467 return true;
2468 }
2469
025d57f0
MS
2470 if (TREE_CODE (len) != INTEGER_CST || !p)
2471 return false;
2472
2473 unsigned srclen = strlen (p);
2474
2475 int cmpsrc = compare_tree_int (len, srclen);
2476
2477 /* Return early if the requested len is less than the string length.
2478 Warnings will be issued elsewhere later. */
2479 if (cmpsrc < 0)
2480 return false;
2481
2482 unsigned HOST_WIDE_INT dstsize;
2483
e9e2bad7 2484 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
025d57f0
MS
2485
2486 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2487 {
025d57f0 2488 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2489
025d57f0
MS
2490 if (cmpdst >= 0)
2491 {
2492 tree fndecl = gimple_call_fndecl (stmt);
2493
2494 /* Strncat copies (at most) LEN bytes and always appends
2495 the terminating NUL so the specified bound should never
2496 be equal to (or greater than) the size of the destination.
2497 If it is, the copy could overflow. */
2498 location_t loc = gimple_location (stmt);
2499 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2500 cmpdst == 0
6d3bab5d 2501 ? G_("%qD specified bound %E equals "
025d57f0 2502 "destination size")
6d3bab5d 2503 : G_("%qD specified bound %E exceeds "
025d57f0 2504 "destination size %wu"),
6d3bab5d 2505 fndecl, len, dstsize);
025d57f0 2506 if (nowarn)
e9e2bad7 2507 suppress_warning (stmt, OPT_Wstringop_overflow_);
025d57f0
MS
2508 }
2509 }
ad03a744 2510
025d57f0
MS
2511 if (!nowarn && cmpsrc == 0)
2512 {
2513 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2514 location_t loc = gimple_location (stmt);
eec5f615
MS
2515
2516 /* To avoid possible overflow the specified bound should also
2517 not be equal to the length of the source, even when the size
2518 of the destination is unknown (it's not an uncommon mistake
2519 to specify as the bound to strncpy the length of the source). */
025d57f0 2520 if (warning_at (loc, OPT_Wstringop_overflow_,
6d3bab5d
MS
2521 "%qD specified bound %E equals source length",
2522 fndecl, len))
e9e2bad7 2523 suppress_warning (stmt, OPT_Wstringop_overflow_);
ad03a744
RB
2524 }
2525
025d57f0
MS
2526 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2527
2528 /* If the replacement _DECL isn't initialized, don't do the
2529 transformation. */
2530 if (!fn)
2531 return false;
2532
2533 /* Otherwise, emit a call to strcat. */
2534 gcall *repl = gimple_build_call (fn, 2, dst, src);
2535 replace_call_with_call_and_fold (gsi, repl);
2536 return true;
ad03a744
RB
2537}
2538
745583f9
RB
2539/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2540 LEN, and SIZE. */
2541
2542static bool
2543gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2544{
355fe088 2545 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2546 tree dest = gimple_call_arg (stmt, 0);
2547 tree src = gimple_call_arg (stmt, 1);
2548 tree len = gimple_call_arg (stmt, 2);
2549 tree size = gimple_call_arg (stmt, 3);
2550 tree fn;
2551 const char *p;
2552
2553 p = c_getstr (src);
2554 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2555 if ((p && *p == '\0')
2556 || integer_zerop (len))
2557 {
2558 replace_call_with_value (gsi, dest);
2559 return true;
2560 }
2561
2562 if (! tree_fits_uhwi_p (size))
2563 return false;
2564
2565 if (! integer_all_onesp (size))
2566 {
2567 tree src_len = c_strlen (src, 1);
2568 if (src_len
2569 && tree_fits_uhwi_p (src_len)
2570 && tree_fits_uhwi_p (len)
2571 && ! tree_int_cst_lt (len, src_len))
2572 {
2573 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2574 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2575 if (!fn)
2576 return false;
2577
355fe088 2578 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2579 replace_call_with_call_and_fold (gsi, repl);
2580 return true;
2581 }
2582 return false;
2583 }
2584
2585 /* If __builtin_strncat_chk is used, assume strncat is available. */
2586 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2587 if (!fn)
2588 return false;
2589
355fe088 2590 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2591 replace_call_with_call_and_fold (gsi, repl);
2592 return true;
2593}
2594
a918bfbf
ML
2595/* Build and append gimple statements to STMTS that would load a first
2596 character of a memory location identified by STR. LOC is location
2597 of the statement. */
2598
2599static tree
2600gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2601{
2602 tree var;
2603
2604 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2605 tree cst_uchar_ptr_node
2606 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2607 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2608
2609 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2610 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2611 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2612
2613 gimple_assign_set_lhs (stmt, var);
2614 gimple_seq_add_stmt_without_update (stmts, stmt);
2615
2616 return var;
2617}
2618
d2f8402a 2619/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2620
2621static bool
2622gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2623{
2624 gimple *stmt = gsi_stmt (*gsi);
2625 tree callee = gimple_call_fndecl (stmt);
2626 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2627
2628 tree type = integer_type_node;
2629 tree str1 = gimple_call_arg (stmt, 0);
2630 tree str2 = gimple_call_arg (stmt, 1);
2631 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2632
2633 tree bound_node = NULL_TREE;
d2f8402a 2634 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2635
2636 /* Handle strncmp and strncasecmp functions. */
2637 if (gimple_call_num_args (stmt) == 3)
2638 {
d86d8b35
MS
2639 bound_node = gimple_call_arg (stmt, 2);
2640 if (tree_fits_uhwi_p (bound_node))
2641 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2642 }
2643
d86d8b35 2644 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2645 if (bound == 0)
a918bfbf
ML
2646 {
2647 replace_call_with_value (gsi, integer_zero_node);
2648 return true;
2649 }
2650
2651 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2652 if (operand_equal_p (str1, str2, 0))
2653 {
2654 replace_call_with_value (gsi, integer_zero_node);
2655 return true;
2656 }
2657
d2f8402a
MS
2658 /* Initially set to the number of characters, including the terminating
2659 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2660 the array Sx is not terminated by a nul.
2661 For nul-terminated strings then adjusted to their length so that
2662 LENx == NULPOSx holds. */
2663 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
866626ef
MS
2664 const char *p1 = getbyterep (str1, &len1);
2665 const char *p2 = getbyterep (str2, &len2);
d2f8402a
MS
2666
2667 /* The position of the terminating nul character if one exists, otherwise
2668 a value greater than LENx. */
2669 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2670
2671 if (p1)
2672 {
2673 size_t n = strnlen (p1, len1);
2674 if (n < len1)
2675 len1 = nulpos1 = n;
2676 }
2677
2678 if (p2)
2679 {
2680 size_t n = strnlen (p2, len2);
2681 if (n < len2)
2682 len2 = nulpos2 = n;
2683 }
a918bfbf
ML
2684
2685 /* For known strings, return an immediate value. */
2686 if (p1 && p2)
2687 {
2688 int r = 0;
2689 bool known_result = false;
2690
2691 switch (fcode)
2692 {
2693 case BUILT_IN_STRCMP:
8b0b334a 2694 case BUILT_IN_STRCMP_EQ:
d2f8402a 2695 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2696 break;
d2f8402a
MS
2697
2698 r = strcmp (p1, p2);
2699 known_result = true;
2700 break;
2701
a918bfbf 2702 case BUILT_IN_STRNCMP:
8b0b334a 2703 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2704 {
d86d8b35
MS
2705 if (bound == HOST_WIDE_INT_M1U)
2706 break;
2707
d2f8402a
MS
2708 /* Reduce the bound to be no more than the length
2709 of the shorter of the two strings, or the sizes
2710 of the unterminated arrays. */
2711 unsigned HOST_WIDE_INT n = bound;
2712
2713 if (len1 == nulpos1 && len1 < n)
2714 n = len1 + 1;
2715 if (len2 == nulpos2 && len2 < n)
2716 n = len2 + 1;
2717
2718 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2719 break;
d2f8402a
MS
2720
2721 r = strncmp (p1, p2, n);
a918bfbf
ML
2722 known_result = true;
2723 break;
2724 }
2725 /* Only handleable situation is where the string are equal (result 0),
2726 which is already handled by operand_equal_p case. */
2727 case BUILT_IN_STRCASECMP:
2728 break;
2729 case BUILT_IN_STRNCASECMP:
2730 {
d2f8402a 2731 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2732 break;
d2f8402a 2733 r = strncmp (p1, p2, bound);
a918bfbf
ML
2734 if (r == 0)
2735 known_result = true;
5de73c05 2736 break;
a918bfbf
ML
2737 }
2738 default:
2739 gcc_unreachable ();
2740 }
2741
2742 if (known_result)
2743 {
2744 replace_call_with_value (gsi, build_cmp_result (type, r));
2745 return true;
2746 }
2747 }
2748
d2f8402a 2749 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2750 || fcode == BUILT_IN_STRCMP
8b0b334a 2751 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2752 || fcode == BUILT_IN_STRCASECMP;
2753
2754 location_t loc = gimple_location (stmt);
2755
2756 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2757 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2758 {
2759 gimple_seq stmts = NULL;
2760 tree var = gimple_load_first_char (loc, str1, &stmts);
2761 if (lhs)
2762 {
2763 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2764 gimple_seq_add_stmt_without_update (&stmts, stmt);
2765 }
2766
2767 gsi_replace_with_seq_vops (gsi, stmts);
2768 return true;
2769 }
2770
2771 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2772 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2773 {
2774 gimple_seq stmts = NULL;
2775 tree var = gimple_load_first_char (loc, str2, &stmts);
2776
2777 if (lhs)
2778 {
2779 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2780 stmt = gimple_build_assign (c, NOP_EXPR, var);
2781 gimple_seq_add_stmt_without_update (&stmts, stmt);
2782
2783 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2784 gimple_seq_add_stmt_without_update (&stmts, stmt);
2785 }
2786
2787 gsi_replace_with_seq_vops (gsi, stmts);
2788 return true;
2789 }
2790
d2f8402a 2791 /* If BOUND is one, return an expression corresponding to
a918bfbf 2792 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2793 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2794 {
2795 gimple_seq stmts = NULL;
2796 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2797 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2798
2799 if (lhs)
2800 {
2801 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2802 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2803 gimple_seq_add_stmt_without_update (&stmts, convert1);
2804
2805 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2806 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2807 gimple_seq_add_stmt_without_update (&stmts, convert2);
2808
2809 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2810 gimple_seq_add_stmt_without_update (&stmts, stmt);
2811 }
2812
2813 gsi_replace_with_seq_vops (gsi, stmts);
2814 return true;
2815 }
2816
d2f8402a
MS
2817 /* If BOUND is greater than the length of one constant string,
2818 and the other argument is also a nul-terminated string, replace
2819 strncmp with strcmp. */
2820 if (fcode == BUILT_IN_STRNCMP
2821 && bound > 0 && bound < HOST_WIDE_INT_M1U
2822 && ((p2 && len2 < bound && len2 == nulpos2)
2823 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2824 {
2825 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2826 if (!fn)
2827 return false;
2828 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2829 replace_call_with_call_and_fold (gsi, repl);
2830 return true;
2831 }
2832
a918bfbf
ML
2833 return false;
2834}
2835
488c6247
ML
2836/* Fold a call to the memchr pointed by GSI iterator. */
2837
2838static bool
2839gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2840{
2841 gimple *stmt = gsi_stmt (*gsi);
2842 tree lhs = gimple_call_lhs (stmt);
2843 tree arg1 = gimple_call_arg (stmt, 0);
2844 tree arg2 = gimple_call_arg (stmt, 1);
2845 tree len = gimple_call_arg (stmt, 2);
2846
2847 /* If the LEN parameter is zero, return zero. */
2848 if (integer_zerop (len))
2849 {
2850 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2851 return true;
2852 }
2853
2854 char c;
2855 if (TREE_CODE (arg2) != INTEGER_CST
2856 || !tree_fits_uhwi_p (len)
2857 || !target_char_cst_p (arg2, &c))
2858 return false;
2859
2860 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2861 unsigned HOST_WIDE_INT string_length;
866626ef 2862 const char *p1 = getbyterep (arg1, &string_length);
488c6247
ML
2863
2864 if (p1)
2865 {
2866 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2867 if (r == NULL)
2868 {
5fd336bb 2869 tree mem_size, offset_node;
bb04901d 2870 byte_representation (arg1, &offset_node, &mem_size, NULL);
5fd336bb
JM
2871 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2872 ? 0 : tree_to_uhwi (offset_node);
2873 /* MEM_SIZE is the size of the array the string literal
2874 is stored in. */
2875 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2876 gcc_checking_assert (string_length <= string_size);
2877 if (length <= string_size)
488c6247
ML
2878 {
2879 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2880 return true;
2881 }
2882 }
2883 else
2884 {
2885 unsigned HOST_WIDE_INT offset = r - p1;
2886 gimple_seq stmts = NULL;
2887 if (lhs != NULL_TREE)
2888 {
aec2d684 2889 tree offset_cst = build_int_cst (sizetype, offset);
488c6247
ML
2890 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2891 arg1, offset_cst);
2892 gimple_seq_add_stmt_without_update (&stmts, stmt);
2893 }
2894 else
2895 gimple_seq_add_stmt_without_update (&stmts,
2896 gimple_build_nop ());
2897
2898 gsi_replace_with_seq_vops (gsi, stmts);
2899 return true;
2900 }
2901 }
2902
2903 return false;
2904}
a918bfbf 2905
fef5a0d9
RB
2906/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2907 to the call. IGNORE is true if the value returned
2908 by the builtin will be ignored. UNLOCKED is true is true if this
2909 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2910 the known length of the string. Return NULL_TREE if no simplification
2911 was possible. */
2912
2913static bool
2914gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2915 tree arg0, tree arg1,
dcb7fae2 2916 bool unlocked)
fef5a0d9 2917{
355fe088 2918 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2919
fef5a0d9
RB
2920 /* If we're using an unlocked function, assume the other unlocked
2921 functions exist explicitly. */
2922 tree const fn_fputc = (unlocked
2923 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2924 : builtin_decl_implicit (BUILT_IN_FPUTC));
2925 tree const fn_fwrite = (unlocked
2926 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2927 : builtin_decl_implicit (BUILT_IN_FWRITE));
2928
2929 /* If the return value is used, don't do the transformation. */
dcb7fae2 2930 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2931 return false;
2932
fef5a0d9
RB
2933 /* Get the length of the string passed to fputs. If the length
2934 can't be determined, punt. */
598f7235 2935 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2936 if (!len
2937 || TREE_CODE (len) != INTEGER_CST)
2938 return false;
2939
2940 switch (compare_tree_int (len, 1))
2941 {
2942 case -1: /* length is 0, delete the call entirely . */
2943 replace_call_with_value (gsi, integer_zero_node);
2944 return true;
2945
2946 case 0: /* length is 1, call fputc. */
2947 {
2948 const char *p = c_getstr (arg0);
2949 if (p != NULL)
2950 {
2951 if (!fn_fputc)
2952 return false;
2953
355fe088 2954 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2955 build_int_cst
2956 (integer_type_node, p[0]), arg1);
2957 replace_call_with_call_and_fold (gsi, repl);
2958 return true;
2959 }
2960 }
2961 /* FALLTHROUGH */
2962 case 1: /* length is greater than 1, call fwrite. */
2963 {
2964 /* If optimizing for size keep fputs. */
2965 if (optimize_function_for_size_p (cfun))
2966 return false;
2967 /* New argument list transforming fputs(string, stream) to
2968 fwrite(string, 1, len, stream). */
2969 if (!fn_fwrite)
2970 return false;
2971
355fe088 2972 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2973 size_one_node, len, arg1);
2974 replace_call_with_call_and_fold (gsi, repl);
2975 return true;
2976 }
2977 default:
2978 gcc_unreachable ();
2979 }
2980 return false;
2981}
2982
2983/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2984 DEST, SRC, LEN, and SIZE are the arguments to the call.
2985 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2986 code of the builtin. If MAXLEN is not NULL, it is maximum length
2987 passed as third argument. */
2988
2989static bool
2990gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2991 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2992 enum built_in_function fcode)
2993{
355fe088 2994 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2995 location_t loc = gimple_location (stmt);
2996 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2997 tree fn;
2998
2999 /* If SRC and DEST are the same (and not volatile), return DEST
3000 (resp. DEST+LEN for __mempcpy_chk). */
3001 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3002 {
3003 if (fcode != BUILT_IN_MEMPCPY_CHK)
3004 {
3005 replace_call_with_value (gsi, dest);
3006 return true;
3007 }
3008 else
3009 {
74e3c262
RB
3010 gimple_seq stmts = NULL;
3011 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
3012 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3013 TREE_TYPE (dest), dest, len);
74e3c262 3014 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
3015 replace_call_with_value (gsi, temp);
3016 return true;
3017 }
3018 }
3019
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3022
598f7235 3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
3024 if (! integer_all_onesp (size))
3025 {
3026 if (! tree_fits_uhwi_p (len))
3027 {
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 {
3033 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3034 {
3035 /* (void) __mempcpy_chk () can be optimized into
3036 (void) __memcpy_chk (). */
3037 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3038 if (!fn)
3039 return false;
3040
355fe088 3041 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3042 replace_call_with_call_and_fold (gsi, repl);
3043 return true;
3044 }
3045 return false;
3046 }
3047 }
3048 else
3049 maxlen = len;
3050
3051 if (tree_int_cst_lt (size, maxlen))
3052 return false;
3053 }
3054
3055 fn = NULL_TREE;
3056 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3057 mem{cpy,pcpy,move,set} is available. */
3058 switch (fcode)
3059 {
3060 case BUILT_IN_MEMCPY_CHK:
3061 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3062 break;
3063 case BUILT_IN_MEMPCPY_CHK:
3064 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3065 break;
3066 case BUILT_IN_MEMMOVE_CHK:
3067 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3068 break;
3069 case BUILT_IN_MEMSET_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3071 break;
3072 default:
3073 break;
3074 }
3075
3076 if (!fn)
3077 return false;
3078
355fe088 3079 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3080 replace_call_with_call_and_fold (gsi, repl);
3081 return true;
3082}
3083
3084/* Fold a call to the __st[rp]cpy_chk builtin.
3085 DEST, SRC, and SIZE are the arguments to the call.
3086 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3087 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3088 strings passed as second argument. */
3089
3090static bool
3091gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3092 tree dest,
fef5a0d9 3093 tree src, tree size,
fef5a0d9
RB
3094 enum built_in_function fcode)
3095{
355fe088 3096 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
3097 location_t loc = gimple_location (stmt);
3098 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3099 tree len, fn;
3100
3101 /* If SRC and DEST are the same (and not volatile), return DEST. */
3102 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3103 {
8cd95cec
MS
3104 /* Issue -Wrestrict unless the pointers are null (those do
3105 not point to objects and so do not indicate an overlap;
3106 such calls could be the result of sanitization and jump
3107 threading). */
e9e2bad7
MS
3108 if (!integer_zerop (dest)
3109 && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
3110 {
3111 tree func = gimple_call_fndecl (stmt);
cc8bea0a 3112
e9b9fa4c
MS
3113 warning_at (loc, OPT_Wrestrict,
3114 "%qD source argument is the same as destination",
3115 func);
3116 }
cc8bea0a 3117
fef5a0d9
RB
3118 replace_call_with_value (gsi, dest);
3119 return true;
3120 }
3121
3122 if (! tree_fits_uhwi_p (size))
3123 return false;
3124
598f7235 3125 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
3126 if (! integer_all_onesp (size))
3127 {
3128 len = c_strlen (src, 1);
3129 if (! len || ! tree_fits_uhwi_p (len))
3130 {
3131 /* If LEN is not constant, try MAXLEN too.
3132 For MAXLEN only allow optimizing into non-_ocs function
3133 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3134 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3135 {
3136 if (fcode == BUILT_IN_STPCPY_CHK)
3137 {
3138 if (! ignore)
3139 return false;
3140
3141 /* If return value of __stpcpy_chk is ignored,
3142 optimize into __strcpy_chk. */
3143 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3144 if (!fn)
3145 return false;
3146
355fe088 3147 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
3148 replace_call_with_call_and_fold (gsi, repl);
3149 return true;
3150 }
3151
3152 if (! len || TREE_SIDE_EFFECTS (len))
3153 return false;
3154
3155 /* If c_strlen returned something, but not a constant,
3156 transform __strcpy_chk into __memcpy_chk. */
3157 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3158 if (!fn)
3159 return false;
3160
74e3c262 3161 gimple_seq stmts = NULL;
770fe3a3 3162 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
3163 len = gimple_convert (&stmts, loc, size_type_node, len);
3164 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3165 build_int_cst (size_type_node, 1));
3166 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 3167 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3168 replace_call_with_call_and_fold (gsi, repl);
3169 return true;
3170 }
e256dfce 3171 }
fef5a0d9
RB
3172 else
3173 maxlen = len;
3174
3175 if (! tree_int_cst_lt (maxlen, size))
3176 return false;
e256dfce
RG
3177 }
3178
fef5a0d9
RB
3179 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3180 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3181 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3182 if (!fn)
3183 return false;
3184
355fe088 3185 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
3186 replace_call_with_call_and_fold (gsi, repl);
3187 return true;
3188}
3189
3190/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3191 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3192 length passed as third argument. IGNORE is true if return value can be
3193 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3194
3195static bool
3196gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3197 tree dest, tree src,
dcb7fae2 3198 tree len, tree size,
fef5a0d9
RB
3199 enum built_in_function fcode)
3200{
355fe088 3201 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3202 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3203 tree fn;
3204
3205 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 3206 {
fef5a0d9
RB
3207 /* If return value of __stpncpy_chk is ignored,
3208 optimize into __strncpy_chk. */
3209 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3210 if (fn)
3211 {
355fe088 3212 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3213 replace_call_with_call_and_fold (gsi, repl);
3214 return true;
3215 }
cbdd87d4
RG
3216 }
3217
fef5a0d9
RB
3218 if (! tree_fits_uhwi_p (size))
3219 return false;
3220
598f7235 3221 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3222 if (! integer_all_onesp (size))
cbdd87d4 3223 {
fef5a0d9 3224 if (! tree_fits_uhwi_p (len))
fe2ef088 3225 {
fef5a0d9
RB
3226 /* If LEN is not constant, try MAXLEN too.
3227 For MAXLEN only allow optimizing into non-_ocs function
3228 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3229 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3230 return false;
8a1561bc 3231 }
fef5a0d9
RB
3232 else
3233 maxlen = len;
3234
3235 if (tree_int_cst_lt (size, maxlen))
3236 return false;
cbdd87d4
RG
3237 }
3238
fef5a0d9
RB
3239 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3240 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3241 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3242 if (!fn)
3243 return false;
3244
355fe088 3245 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3246 replace_call_with_call_and_fold (gsi, repl);
3247 return true;
cbdd87d4
RG
3248}
3249
2625bb5d
RB
3250/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3251 Return NULL_TREE if no simplification can be made. */
3252
3253static bool
3254gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3255{
3256 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3257 location_t loc = gimple_location (stmt);
3258 tree dest = gimple_call_arg (stmt, 0);
3259 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3260 tree fn, lenp1;
2625bb5d
RB
3261
3262 /* If the result is unused, replace stpcpy with strcpy. */
3263 if (gimple_call_lhs (stmt) == NULL_TREE)
3264 {
3265 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3266 if (!fn)
3267 return false;
3268 gimple_call_set_fndecl (stmt, fn);
3269 fold_stmt (gsi);
3270 return true;
3271 }
3272
01b0acb7 3273 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3274 c_strlen_data data = { };
d14c547a
MS
3275 /* The size of the unterminated array if SRC referes to one. */
3276 tree size;
3277 /* True if the size is exact/constant, false if it's the lower bound
3278 of a range. */
3279 bool exact;
7d583f42 3280 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3281 if (!len
3282 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3283 {
d14c547a 3284 data.decl = unterminated_array (src, &size, &exact);
7d583f42 3285 if (!data.decl)
01b0acb7
MS
3286 return false;
3287 }
3288
7d583f42 3289 if (data.decl)
01b0acb7
MS
3290 {
3291 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 3292 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
d14c547a
MS
3293 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3294 exact);
e9e2bad7 3295 suppress_warning (stmt, OPT_Wstringop_overread);
01b0acb7
MS
3296 return false;
3297 }
2625bb5d
RB
3298
3299 if (optimize_function_for_size_p (cfun)
3300 /* If length is zero it's small enough. */
3301 && !integer_zerop (len))
3302 return false;
3303
3304 /* If the source has a known length replace stpcpy with memcpy. */
3305 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3306 if (!fn)
3307 return false;
3308
3309 gimple_seq stmts = NULL;
3310 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3311 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3312 tem, build_int_cst (size_type_node, 1));
3313 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3314 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3315 gimple_move_vops (repl, stmt);
2625bb5d
RB
3316 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3317 /* Replace the result with dest + len. */
3318 stmts = NULL;
3319 tem = gimple_convert (&stmts, loc, sizetype, len);
3320 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3321 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3322 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3323 gsi_replace (gsi, ret, false);
2625bb5d
RB
3324 /* Finally fold the memcpy call. */
3325 gimple_stmt_iterator gsi2 = *gsi;
3326 gsi_prev (&gsi2);
3327 fold_stmt (&gsi2);
3328 return true;
3329}
3330
fef5a0d9
RB
3331/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3332 NULL_TREE if a normal call should be emitted rather than expanding
3333 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3334 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3335 passed as second argument. */
cbdd87d4
RG
3336
3337static bool
fef5a0d9 3338gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3339 enum built_in_function fcode)
cbdd87d4 3340{
538dd0b7 3341 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3342 tree dest, size, len, fn, fmt, flag;
3343 const char *fmt_str;
cbdd87d4 3344
fef5a0d9
RB
3345 /* Verify the required arguments in the original call. */
3346 if (gimple_call_num_args (stmt) < 5)
3347 return false;
cbdd87d4 3348
fef5a0d9
RB
3349 dest = gimple_call_arg (stmt, 0);
3350 len = gimple_call_arg (stmt, 1);
3351 flag = gimple_call_arg (stmt, 2);
3352 size = gimple_call_arg (stmt, 3);
3353 fmt = gimple_call_arg (stmt, 4);
3354
3355 if (! tree_fits_uhwi_p (size))
3356 return false;
3357
3358 if (! integer_all_onesp (size))
3359 {
598f7235 3360 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3361 if (! tree_fits_uhwi_p (len))
cbdd87d4 3362 {
fef5a0d9
RB
3363 /* If LEN is not constant, try MAXLEN too.
3364 For MAXLEN only allow optimizing into non-_ocs function
3365 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3366 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3367 return false;
3368 }
3369 else
fef5a0d9 3370 maxlen = len;
cbdd87d4 3371
fef5a0d9
RB
3372 if (tree_int_cst_lt (size, maxlen))
3373 return false;
3374 }
cbdd87d4 3375
fef5a0d9
RB
3376 if (!init_target_chars ())
3377 return false;
cbdd87d4 3378
fef5a0d9
RB
3379 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3380 or if format doesn't contain % chars or is "%s". */
3381 if (! integer_zerop (flag))
3382 {
3383 fmt_str = c_getstr (fmt);
3384 if (fmt_str == NULL)
3385 return false;
3386 if (strchr (fmt_str, target_percent) != NULL
3387 && strcmp (fmt_str, target_percent_s))
3388 return false;
cbdd87d4
RG
3389 }
3390
fef5a0d9
RB
3391 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3392 available. */
3393 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3394 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3395 if (!fn)
491e0b9b
RG
3396 return false;
3397
fef5a0d9
RB
3398 /* Replace the called function and the first 5 argument by 3 retaining
3399 trailing varargs. */
3400 gimple_call_set_fndecl (stmt, fn);
3401 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3402 gimple_call_set_arg (stmt, 0, dest);
3403 gimple_call_set_arg (stmt, 1, len);
3404 gimple_call_set_arg (stmt, 2, fmt);
3405 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3406 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3407 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3408 fold_stmt (gsi);
3409 return true;
3410}
cbdd87d4 3411
fef5a0d9
RB
3412/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3413 Return NULL_TREE if a normal call should be emitted rather than
3414 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3415 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3416
fef5a0d9
RB
3417static bool
3418gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3419 enum built_in_function fcode)
3420{
538dd0b7 3421 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3422 tree dest, size, len, fn, fmt, flag;
3423 const char *fmt_str;
3424 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3425
fef5a0d9
RB
3426 /* Verify the required arguments in the original call. */
3427 if (nargs < 4)
3428 return false;
3429 dest = gimple_call_arg (stmt, 0);
3430 flag = gimple_call_arg (stmt, 1);
3431 size = gimple_call_arg (stmt, 2);
3432 fmt = gimple_call_arg (stmt, 3);
3433
3434 if (! tree_fits_uhwi_p (size))
3435 return false;
3436
3437 len = NULL_TREE;
3438
3439 if (!init_target_chars ())
3440 return false;
3441
3442 /* Check whether the format is a literal string constant. */
3443 fmt_str = c_getstr (fmt);
3444 if (fmt_str != NULL)
3445 {
3446 /* If the format doesn't contain % args or %%, we know the size. */
3447 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3448 {
fef5a0d9
RB
3449 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3450 len = build_int_cstu (size_type_node, strlen (fmt_str));
3451 }
3452 /* If the format is "%s" and first ... argument is a string literal,
3453 we know the size too. */
3454 else if (fcode == BUILT_IN_SPRINTF_CHK
3455 && strcmp (fmt_str, target_percent_s) == 0)
3456 {
3457 tree arg;
cbdd87d4 3458
fef5a0d9
RB
3459 if (nargs == 5)
3460 {
3461 arg = gimple_call_arg (stmt, 4);
3462 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3463 {
3464 len = c_strlen (arg, 1);
3465 if (! len || ! tree_fits_uhwi_p (len))
3466 len = NULL_TREE;
3467 }
3468 }
3469 }
3470 }
cbdd87d4 3471
fef5a0d9
RB
3472 if (! integer_all_onesp (size))
3473 {
3474 if (! len || ! tree_int_cst_lt (len, size))
3475 return false;
3476 }
cbdd87d4 3477
fef5a0d9
RB
3478 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3479 or if format doesn't contain % chars or is "%s". */
3480 if (! integer_zerop (flag))
3481 {
3482 if (fmt_str == NULL)
3483 return false;
3484 if (strchr (fmt_str, target_percent) != NULL
3485 && strcmp (fmt_str, target_percent_s))
3486 return false;
3487 }
cbdd87d4 3488
fef5a0d9
RB
3489 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3490 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3491 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3492 if (!fn)
3493 return false;
3494
3495 /* Replace the called function and the first 4 argument by 2 retaining
3496 trailing varargs. */
3497 gimple_call_set_fndecl (stmt, fn);
3498 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3499 gimple_call_set_arg (stmt, 0, dest);
3500 gimple_call_set_arg (stmt, 1, fmt);
3501 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3502 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3503 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3504 fold_stmt (gsi);
3505 return true;
3506}
3507
35770bb2
RB
3508/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3509 ORIG may be null if this is a 2-argument call. We don't attempt to
3510 simplify calls with more than 3 arguments.
3511
a104bd88 3512 Return true if simplification was possible, otherwise false. */
35770bb2 3513
a104bd88 3514bool
dcb7fae2 3515gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3516{
355fe088 3517 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3518
3519 /* Verify the required arguments in the original call. We deal with two
3520 types of sprintf() calls: 'sprintf (str, fmt)' and
3521 'sprintf (dest, "%s", orig)'. */
3522 if (gimple_call_num_args (stmt) > 3)
3523 return false;
3524
9816f509 3525 tree orig = NULL_TREE;
35770bb2
RB
3526 if (gimple_call_num_args (stmt) == 3)
3527 orig = gimple_call_arg (stmt, 2);
3528
3529 /* Check whether the format is a literal string constant. */
9816f509
MS
3530 tree fmt = gimple_call_arg (stmt, 1);
3531 const char *fmt_str = c_getstr (fmt);
35770bb2
RB
3532 if (fmt_str == NULL)
3533 return false;
3534
9816f509
MS
3535 tree dest = gimple_call_arg (stmt, 0);
3536
35770bb2
RB
3537 if (!init_target_chars ())
3538 return false;
3539
9816f509
MS
3540 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3541 if (!fn)
3542 return false;
3543
35770bb2
RB
3544 /* If the format doesn't contain % args or %%, use strcpy. */
3545 if (strchr (fmt_str, target_percent) == NULL)
3546 {
35770bb2
RB
3547 /* Don't optimize sprintf (buf, "abc", ptr++). */
3548 if (orig)
3549 return false;
3550
3551 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3552 'format' is known to contain no % formats. */
3553 gimple_seq stmts = NULL;
355fe088 3554 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3555
3556 /* Propagate the NO_WARNING bit to avoid issuing the same
3557 warning more than once. */
e9e2bad7 3558 copy_warning (repl, stmt);
01b0acb7 3559
35770bb2 3560 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3561 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3562 {
a73468e8
JJ
3563 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3564 strlen (fmt_str)));
35770bb2
RB
3565 gimple_seq_add_stmt_without_update (&stmts, repl);
3566 gsi_replace_with_seq_vops (gsi, stmts);
3567 /* gsi now points at the assignment to the lhs, get a
3568 stmt iterator to the memcpy call.
3569 ??? We can't use gsi_for_stmt as that doesn't work when the
3570 CFG isn't built yet. */
3571 gimple_stmt_iterator gsi2 = *gsi;
3572 gsi_prev (&gsi2);
3573 fold_stmt (&gsi2);
3574 }
3575 else
3576 {
3577 gsi_replace_with_seq_vops (gsi, stmts);
3578 fold_stmt (gsi);
3579 }
3580 return true;
3581 }
3582
3583 /* If the format is "%s", use strcpy if the result isn't used. */
3584 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3585 {
35770bb2
RB
3586 /* Don't crash on sprintf (str1, "%s"). */
3587 if (!orig)
3588 return false;
3589
9816f509
MS
3590 /* Don't fold calls with source arguments of invalid (nonpointer)
3591 types. */
3592 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3593 return false;
3594
dcb7fae2
RB
3595 tree orig_len = NULL_TREE;
3596 if (gimple_call_lhs (stmt))
35770bb2 3597 {
598f7235 3598 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3599 if (!orig_len)
35770bb2
RB
3600 return false;
3601 }
3602
3603 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3604 gimple_seq stmts = NULL;
355fe088 3605 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3606
3607 /* Propagate the NO_WARNING bit to avoid issuing the same
3608 warning more than once. */
e9e2bad7 3609 copy_warning (repl, stmt);
01b0acb7 3610
35770bb2 3611 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3612 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3613 {
a73468e8 3614 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3615 TREE_TYPE (orig_len)))
a73468e8
JJ
3616 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3617 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3618 gimple_seq_add_stmt_without_update (&stmts, repl);
3619 gsi_replace_with_seq_vops (gsi, stmts);
3620 /* gsi now points at the assignment to the lhs, get a
3621 stmt iterator to the memcpy call.
3622 ??? We can't use gsi_for_stmt as that doesn't work when the
3623 CFG isn't built yet. */
3624 gimple_stmt_iterator gsi2 = *gsi;
3625 gsi_prev (&gsi2);
3626 fold_stmt (&gsi2);
3627 }
3628 else
3629 {
3630 gsi_replace_with_seq_vops (gsi, stmts);
3631 fold_stmt (gsi);
3632 }
3633 return true;
3634 }
3635 return false;
3636}
3637
d7e78447
RB
3638/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3639 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3640 attempt to simplify calls with more than 4 arguments.
35770bb2 3641
a104bd88 3642 Return true if simplification was possible, otherwise false. */
d7e78447 3643
a104bd88 3644bool
dcb7fae2 3645gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3646{
538dd0b7 3647 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3648 tree dest = gimple_call_arg (stmt, 0);
3649 tree destsize = gimple_call_arg (stmt, 1);
3650 tree fmt = gimple_call_arg (stmt, 2);
3651 tree orig = NULL_TREE;
3652 const char *fmt_str = NULL;
3653
3654 if (gimple_call_num_args (stmt) > 4)
3655 return false;
3656
3657 if (gimple_call_num_args (stmt) == 4)
3658 orig = gimple_call_arg (stmt, 3);
3659
3660 if (!tree_fits_uhwi_p (destsize))
3661 return false;
3662 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3663
3664 /* Check whether the format is a literal string constant. */
3665 fmt_str = c_getstr (fmt);
3666 if (fmt_str == NULL)
3667 return false;
3668
3669 if (!init_target_chars ())
3670 return false;
3671
3672 /* If the format doesn't contain % args or %%, use strcpy. */
3673 if (strchr (fmt_str, target_percent) == NULL)
3674 {
3675 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3676 if (!fn)
3677 return false;
3678
3679 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3680 if (orig)
3681 return false;
3682
3683 /* We could expand this as
3684 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3685 or to
3686 memcpy (str, fmt_with_nul_at_cstm1, cst);
3687 but in the former case that might increase code size
3688 and in the latter case grow .rodata section too much.
3689 So punt for now. */
3690 size_t len = strlen (fmt_str);
3691 if (len >= destlen)
3692 return false;
3693
3694 gimple_seq stmts = NULL;
355fe088 3695 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3696 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3697 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3698 {
a73468e8
JJ
3699 repl = gimple_build_assign (lhs,
3700 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3701 gimple_seq_add_stmt_without_update (&stmts, repl);
3702 gsi_replace_with_seq_vops (gsi, stmts);
3703 /* gsi now points at the assignment to the lhs, get a
3704 stmt iterator to the memcpy call.
3705 ??? We can't use gsi_for_stmt as that doesn't work when the
3706 CFG isn't built yet. */
3707 gimple_stmt_iterator gsi2 = *gsi;
3708 gsi_prev (&gsi2);
3709 fold_stmt (&gsi2);
3710 }
3711 else
3712 {
3713 gsi_replace_with_seq_vops (gsi, stmts);
3714 fold_stmt (gsi);
3715 }
3716 return true;
3717 }
3718
3719 /* If the format is "%s", use strcpy if the result isn't used. */
3720 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3721 {
3722 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3723 if (!fn)
3724 return false;
3725
3726 /* Don't crash on snprintf (str1, cst, "%s"). */
3727 if (!orig)
3728 return false;
3729
598f7235 3730 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3731 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3732 return false;
d7e78447
RB
3733
3734 /* We could expand this as
3735 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3736 or to
3737 memcpy (str1, str2_with_nul_at_cstm1, cst);
3738 but in the former case that might increase code size
3739 and in the latter case grow .rodata section too much.
3740 So punt for now. */
3741 if (compare_tree_int (orig_len, destlen) >= 0)
3742 return false;
3743
3744 /* Convert snprintf (str1, cst, "%s", str2) into
3745 strcpy (str1, str2) if strlen (str2) < cst. */
3746 gimple_seq stmts = NULL;
355fe088 3747 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3748 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3749 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3750 {
a73468e8 3751 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3752 TREE_TYPE (orig_len)))
a73468e8
JJ
3753 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3754 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3755 gimple_seq_add_stmt_without_update (&stmts, repl);
3756 gsi_replace_with_seq_vops (gsi, stmts);
3757 /* gsi now points at the assignment to the lhs, get a
3758 stmt iterator to the memcpy call.
3759 ??? We can't use gsi_for_stmt as that doesn't work when the
3760 CFG isn't built yet. */
3761 gimple_stmt_iterator gsi2 = *gsi;
3762 gsi_prev (&gsi2);
3763 fold_stmt (&gsi2);
3764 }
3765 else
3766 {
3767 gsi_replace_with_seq_vops (gsi, stmts);
3768 fold_stmt (gsi);
3769 }
3770 return true;
3771 }
3772 return false;
3773}
35770bb2 3774
edd7ae68
RB
3775/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3776 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3777 more than 3 arguments, and ARG may be null in the 2-argument case.
3778
3779 Return NULL_TREE if no simplification was possible, otherwise return the
3780 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3781 code of the function to be simplified. */
3782
3783static bool
3784gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3785 tree fp, tree fmt, tree arg,
3786 enum built_in_function fcode)
3787{
3788 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3789 tree fn_fputc, fn_fputs;
3790 const char *fmt_str = NULL;
3791
3792 /* If the return value is used, don't do the transformation. */
3793 if (gimple_call_lhs (stmt) != NULL_TREE)
3794 return false;
3795
3796 /* Check whether the format is a literal string constant. */
3797 fmt_str = c_getstr (fmt);
3798 if (fmt_str == NULL)
3799 return false;
3800
3801 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3802 {
3803 /* If we're using an unlocked function, assume the other
3804 unlocked functions exist explicitly. */
3805 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3806 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3807 }
3808 else
3809 {
3810 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3811 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3812 }
3813
3814 if (!init_target_chars ())
3815 return false;
3816
3817 /* If the format doesn't contain % args or %%, use strcpy. */
3818 if (strchr (fmt_str, target_percent) == NULL)
3819 {
3820 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3821 && arg)
3822 return false;
3823
3824 /* If the format specifier was "", fprintf does nothing. */
3825 if (fmt_str[0] == '\0')
3826 {
3827 replace_call_with_value (gsi, NULL_TREE);
3828 return true;
3829 }
3830
3831 /* When "string" doesn't contain %, replace all cases of
3832 fprintf (fp, string) with fputs (string, fp). The fputs
3833 builtin will take care of special cases like length == 1. */
3834 if (fn_fputs)
3835 {
3836 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3837 replace_call_with_call_and_fold (gsi, repl);
3838 return true;
3839 }
3840 }
3841
3842 /* The other optimizations can be done only on the non-va_list variants. */
3843 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3844 return false;
3845
3846 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3847 else if (strcmp (fmt_str, target_percent_s) == 0)
3848 {
3849 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3850 return false;
3851 if (fn_fputs)
3852 {
3853 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3854 replace_call_with_call_and_fold (gsi, repl);
3855 return true;
3856 }
3857 }
3858
3859 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3860 else if (strcmp (fmt_str, target_percent_c) == 0)
3861 {
3862 if (!arg
3863 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3864 return false;
3865 if (fn_fputc)
3866 {
3867 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3868 replace_call_with_call_and_fold (gsi, repl);
3869 return true;
3870 }
3871 }
3872
3873 return false;
3874}
3875
ad03a744
RB
3876/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3877 FMT and ARG are the arguments to the call; we don't fold cases with
3878 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3879
3880 Return NULL_TREE if no simplification was possible, otherwise return the
3881 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3882 code of the function to be simplified. */
3883
3884static bool
3885gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3886 tree arg, enum built_in_function fcode)
3887{
3888 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3889 tree fn_putchar, fn_puts, newarg;
3890 const char *fmt_str = NULL;
3891
3892 /* If the return value is used, don't do the transformation. */
3893 if (gimple_call_lhs (stmt) != NULL_TREE)
3894 return false;
3895
3896 /* Check whether the format is a literal string constant. */
3897 fmt_str = c_getstr (fmt);
3898 if (fmt_str == NULL)
3899 return false;
3900
3901 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3902 {
3903 /* If we're using an unlocked function, assume the other
3904 unlocked functions exist explicitly. */
3905 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3906 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3907 }
3908 else
3909 {
3910 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3911 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3912 }
3913
3914 if (!init_target_chars ())
3915 return false;
3916
3917 if (strcmp (fmt_str, target_percent_s) == 0
3918 || strchr (fmt_str, target_percent) == NULL)
3919 {
3920 const char *str;
3921
3922 if (strcmp (fmt_str, target_percent_s) == 0)
3923 {
3924 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3925 return false;
3926
3927 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3928 return false;
3929
3930 str = c_getstr (arg);
3931 if (str == NULL)
3932 return false;
3933 }
3934 else
3935 {
3936 /* The format specifier doesn't contain any '%' characters. */
3937 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3938 && arg)
3939 return false;
3940 str = fmt_str;
3941 }
3942
3943 /* If the string was "", printf does nothing. */
3944 if (str[0] == '\0')
3945 {
3946 replace_call_with_value (gsi, NULL_TREE);
3947 return true;
3948 }
3949
3950 /* If the string has length of 1, call putchar. */
3951 if (str[1] == '\0')
3952 {
3953 /* Given printf("c"), (where c is any one character,)
3954 convert "c"[0] to an int and pass that to the replacement
3955 function. */
3956 newarg = build_int_cst (integer_type_node, str[0]);
3957 if (fn_putchar)
3958 {
3959 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3960 replace_call_with_call_and_fold (gsi, repl);
3961 return true;
3962 }
3963 }
3964 else
3965 {
3966 /* If the string was "string\n", call puts("string"). */
3967 size_t len = strlen (str);
3968 if ((unsigned char)str[len - 1] == target_newline
3969 && (size_t) (int) len == len
3970 && (int) len > 0)
3971 {
3972 char *newstr;
ad03a744
RB
3973
3974 /* Create a NUL-terminated string that's one char shorter
3975 than the original, stripping off the trailing '\n'. */
a353fec4 3976 newstr = xstrdup (str);
ad03a744 3977 newstr[len - 1] = '\0';
a353fec4
BE
3978 newarg = build_string_literal (len, newstr);
3979 free (newstr);
ad03a744
RB
3980 if (fn_puts)
3981 {
3982 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3983 replace_call_with_call_and_fold (gsi, repl);
3984 return true;
3985 }
3986 }
3987 else
3988 /* We'd like to arrange to call fputs(string,stdout) here,
3989 but we need stdout and don't have a way to get it yet. */
3990 return false;
3991 }
3992 }
3993
3994 /* The other optimizations can be done only on the non-va_list variants. */
3995 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3996 return false;
3997
3998 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3999 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4000 {
4001 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4002 return false;
4003 if (fn_puts)
4004 {
4005 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4006 replace_call_with_call_and_fold (gsi, repl);
4007 return true;
4008 }
4009 }
4010
4011 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4012 else if (strcmp (fmt_str, target_percent_c) == 0)
4013 {
4014 if (!arg || ! useless_type_conversion_p (integer_type_node,
4015 TREE_TYPE (arg)))
4016 return false;
4017 if (fn_putchar)
4018 {
4019 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4020 replace_call_with_call_and_fold (gsi, repl);
4021 return true;
4022 }
4023 }
4024
4025 return false;
4026}
4027
edd7ae68 4028
fef5a0d9
RB
4029
4030/* Fold a call to __builtin_strlen with known length LEN. */
4031
4032static bool
dcb7fae2 4033gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 4034{
355fe088 4035 gimple *stmt = gsi_stmt (*gsi);
e08341bb 4036 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
4037
4038 wide_int minlen;
4039 wide_int maxlen;
4040
5d6655eb 4041 c_strlen_data lendata = { };
03c4a945 4042 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
4043 && !lendata.decl
4044 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4045 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
4046 {
4047 /* The range of lengths refers to either a single constant
4048 string or to the longest and shortest constant string
4049 referenced by the argument of the strlen() call, or to
4050 the strings that can possibly be stored in the arrays
4051 the argument refers to. */
5d6655eb
MS
4052 minlen = wi::to_wide (lendata.minlen);
4053 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
4054 }
4055 else
4056 {
4057 unsigned prec = TYPE_PRECISION (sizetype);
4058
4059 minlen = wi::shwi (0, prec);
4060 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4061 }
4062
4063 if (minlen == maxlen)
4064 {
5d6655eb
MS
4065 /* Fold the strlen call to a constant. */
4066 tree type = TREE_TYPE (lendata.minlen);
4067 tree len = force_gimple_operand_gsi (gsi,
4068 wide_int_to_tree (type, minlen),
4069 true, NULL, true, GSI_SAME_STMT);
4070 replace_call_with_value (gsi, len);
c42d0aa0
MS
4071 return true;
4072 }
4073
d4bf6975 4074 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 4075 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 4076 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
4077
4078 return false;
cbdd87d4
RG
4079}
4080
48126138
NS
4081/* Fold a call to __builtin_acc_on_device. */
4082
4083static bool
4084gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4085{
4086 /* Defer folding until we know which compiler we're in. */
4087 if (symtab->state != EXPANSION)
4088 return false;
4089
4090 unsigned val_host = GOMP_DEVICE_HOST;
4091 unsigned val_dev = GOMP_DEVICE_NONE;
4092
4093#ifdef ACCEL_COMPILER
4094 val_host = GOMP_DEVICE_NOT_HOST;
4095 val_dev = ACCEL_COMPILER_acc_device;
4096#endif
4097
4098 location_t loc = gimple_location (gsi_stmt (*gsi));
4099
4100 tree host_eq = make_ssa_name (boolean_type_node);
4101 gimple *host_ass = gimple_build_assign
4102 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4103 gimple_set_location (host_ass, loc);
4104 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4105
4106 tree dev_eq = make_ssa_name (boolean_type_node);
4107 gimple *dev_ass = gimple_build_assign
4108 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4109 gimple_set_location (dev_ass, loc);
4110 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4111
4112 tree result = make_ssa_name (boolean_type_node);
4113 gimple *result_ass = gimple_build_assign
4114 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4115 gimple_set_location (result_ass, loc);
4116 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4117
4118 replace_call_with_value (gsi, result);
4119
4120 return true;
4121}
cbdd87d4 4122
fe75f732
PK
4123/* Fold realloc (0, n) -> malloc (n). */
4124
4125static bool
4126gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4127{
4128 gimple *stmt = gsi_stmt (*gsi);
4129 tree arg = gimple_call_arg (stmt, 0);
4130 tree size = gimple_call_arg (stmt, 1);
4131
4132 if (operand_equal_p (arg, null_pointer_node, 0))
4133 {
4134 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4135 if (fn_malloc)
4136 {
4137 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4138 replace_call_with_call_and_fold (gsi, repl);
4139 return true;
4140 }
4141 }
4142 return false;
4143}
4144
1bea0d0a
JJ
4145/* Number of bytes into which any type but aggregate or vector types
4146 should fit. */
4147static constexpr size_t clear_padding_unit
4148 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4149/* Buffer size on which __builtin_clear_padding folding code works. */
4150static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4151
4152/* Data passed through __builtin_clear_padding folding. */
4153struct clear_padding_struct {
4154 location_t loc;
896048cf
JJ
4155 /* 0 during __builtin_clear_padding folding, nonzero during
4156 clear_type_padding_in_mask. In that case, instead of clearing the
4157 non-padding bits in union_ptr array clear the padding bits in there. */
4158 bool clear_in_mask;
1bea0d0a
JJ
4159 tree base;
4160 tree alias_type;
4161 gimple_stmt_iterator *gsi;
4162 /* Alignment of buf->base + 0. */
4163 unsigned align;
4164 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4165 HOST_WIDE_INT off;
4166 /* Number of padding bytes before buf->off that don't have padding clear
4167 code emitted yet. */
4168 HOST_WIDE_INT padding_bytes;
4169 /* The size of the whole object. Never emit code to touch
4170 buf->base + buf->sz or following bytes. */
4171 HOST_WIDE_INT sz;
4172 /* Number of bytes recorded in buf->buf. */
4173 size_t size;
4174 /* When inside union, instead of emitting code we and bits inside of
4175 the union_ptr array. */
4176 unsigned char *union_ptr;
4177 /* Set bits mean padding bits that need to be cleared by the builtin. */
4178 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4179};
4180
4181/* Emit code to clear padding requested in BUF->buf - set bits
4182 in there stand for padding that should be cleared. FULL is true
4183 if everything from the buffer should be flushed, otherwise
4184 it can leave up to 2 * clear_padding_unit bytes for further
4185 processing. */
4186
4187static void
4188clear_padding_flush (clear_padding_struct *buf, bool full)
4189{
4190 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4191 if (!full && buf->size < 2 * clear_padding_unit)
4192 return;
4193 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4194 size_t end = buf->size;
4195 if (!full)
4196 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4197 * clear_padding_unit);
4198 size_t padding_bytes = buf->padding_bytes;
4199 if (buf->union_ptr)
4200 {
896048cf
JJ
4201 if (buf->clear_in_mask)
4202 {
4203 /* During clear_type_padding_in_mask, clear the padding
4204 bits set in buf->buf in the buf->union_ptr mask. */
4205 for (size_t i = 0; i < end; i++)
4206 {
4207 if (buf->buf[i] == (unsigned char) ~0)
4208 padding_bytes++;
4209 else
4210 {
4211 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4212 0, padding_bytes);
4213 padding_bytes = 0;
4214 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4215 }
4216 }
4217 if (full)
4218 {
4219 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4220 0, padding_bytes);
4221 buf->off = 0;
4222 buf->size = 0;
4223 buf->padding_bytes = 0;
4224 }
4225 else
4226 {
4227 memmove (buf->buf, buf->buf + end, buf->size - end);
4228 buf->off += end;
4229 buf->size -= end;
4230 buf->padding_bytes = padding_bytes;
4231 }
4232 return;
4233 }
1bea0d0a
JJ
4234 /* Inside of a union, instead of emitting any code, instead
4235 clear all bits in the union_ptr buffer that are clear
4236 in buf. Whole padding bytes don't clear anything. */
4237 for (size_t i = 0; i < end; i++)
4238 {
4239 if (buf->buf[i] == (unsigned char) ~0)
4240 padding_bytes++;
4241 else
4242 {
4243 padding_bytes = 0;
4244 buf->union_ptr[buf->off + i] &= buf->buf[i];
4245 }
4246 }
4247 if (full)
4248 {
4249 buf->off = 0;
4250 buf->size = 0;
4251 buf->padding_bytes = 0;
4252 }
4253 else
4254 {
4255 memmove (buf->buf, buf->buf + end, buf->size - end);
4256 buf->off += end;
4257 buf->size -= end;
4258 buf->padding_bytes = padding_bytes;
4259 }
4260 return;
4261 }
4262 size_t wordsize = UNITS_PER_WORD;
4263 for (size_t i = 0; i < end; i += wordsize)
4264 {
4265 size_t nonzero_first = wordsize;
4266 size_t nonzero_last = 0;
4adfcea0
JJ
4267 size_t zero_first = wordsize;
4268 size_t zero_last = 0;
4269 bool all_ones = true, bytes_only = true;
1bea0d0a
JJ
4270 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4271 > (unsigned HOST_WIDE_INT) buf->sz)
4272 {
4273 gcc_assert (wordsize > 1);
4274 wordsize /= 2;
4275 i -= wordsize;
4276 continue;
4277 }
4278 for (size_t j = i; j < i + wordsize && j < end; j++)
4279 {
4280 if (buf->buf[j])
4281 {
4282 if (nonzero_first == wordsize)
4283 {
4284 nonzero_first = j - i;
4285 nonzero_last = j - i;
4286 }
4287 if (nonzero_last != j - i)
4288 all_ones = false;
4289 nonzero_last = j + 1 - i;
4290 }
4adfcea0
JJ
4291 else
4292 {
4293 if (zero_first == wordsize)
4294 zero_first = j - i;
4295 zero_last = j + 1 - i;
4296 }
1bea0d0a 4297 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4adfcea0
JJ
4298 {
4299 all_ones = false;
4300 bytes_only = false;
4301 }
1bea0d0a 4302 }
4adfcea0 4303 size_t padding_end = i;
1bea0d0a
JJ
4304 if (padding_bytes)
4305 {
4306 if (nonzero_first == 0
4307 && nonzero_last == wordsize
4308 && all_ones)
4309 {
4310 /* All bits are padding and we had some padding
4311 before too. Just extend it. */
4312 padding_bytes += wordsize;
4313 continue;
4314 }
1bea0d0a
JJ
4315 if (all_ones && nonzero_first == 0)
4316 {
4317 padding_bytes += nonzero_last;
4318 padding_end += nonzero_last;
4319 nonzero_first = wordsize;
4320 nonzero_last = 0;
4321 }
4adfcea0
JJ
4322 else if (bytes_only && nonzero_first == 0)
4323 {
4324 gcc_assert (zero_first && zero_first != wordsize);
4325 padding_bytes += zero_first;
4326 padding_end += zero_first;
4327 }
4328 tree atype, src;
4329 if (padding_bytes == 1)
4330 {
4331 atype = char_type_node;
4332 src = build_zero_cst (char_type_node);
4333 }
4334 else
4335 {
4336 atype = build_array_type_nelts (char_type_node, padding_bytes);
4337 src = build_constructor (atype, NULL);
4338 }
1bea0d0a
JJ
4339 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4340 build_int_cst (buf->alias_type,
4341 buf->off + padding_end
4342 - padding_bytes));
1bea0d0a
JJ
4343 gimple *g = gimple_build_assign (dst, src);
4344 gimple_set_location (g, buf->loc);
4345 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4346 padding_bytes = 0;
4347 buf->padding_bytes = 0;
4348 }
4349 if (nonzero_first == wordsize)
4350 /* All bits in a word are 0, there are no padding bits. */
4351 continue;
4352 if (all_ones && nonzero_last == wordsize)
4353 {
4354 /* All bits between nonzero_first and end of word are padding
4355 bits, start counting padding_bytes. */
4356 padding_bytes = nonzero_last - nonzero_first;
4357 continue;
4358 }
4adfcea0
JJ
4359 if (bytes_only)
4360 {
4361 /* If bitfields aren't involved in this word, prefer storing
4362 individual bytes or groups of them over performing a RMW
4363 operation on the whole word. */
4364 gcc_assert (i + zero_last <= end);
4365 for (size_t j = padding_end; j < i + zero_last; j++)
4366 {
4367 if (buf->buf[j])
4368 {
4369 size_t k;
4370 for (k = j; k < i + zero_last; k++)
4371 if (buf->buf[k] == 0)
4372 break;
4373 HOST_WIDE_INT off = buf->off + j;
4374 tree atype, src;
4375 if (k - j == 1)
4376 {
4377 atype = char_type_node;
4378 src = build_zero_cst (char_type_node);
4379 }
4380 else
4381 {
4382 atype = build_array_type_nelts (char_type_node, k - j);
4383 src = build_constructor (atype, NULL);
4384 }
4385 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4386 buf->base,
4387 build_int_cst (buf->alias_type, off));
4388 gimple *g = gimple_build_assign (dst, src);
4389 gimple_set_location (g, buf->loc);
4390 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4391 j = k;
4392 }
4393 }
4394 if (nonzero_last == wordsize)
4395 padding_bytes = nonzero_last - zero_last;
4396 continue;
4397 }
1bea0d0a
JJ
4398 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4399 {
4400 if (nonzero_last - nonzero_first <= eltsz
4401 && ((nonzero_first & ~(eltsz - 1))
4402 == ((nonzero_last - 1) & ~(eltsz - 1))))
4403 {
4404 tree type;
4405 if (eltsz == 1)
4406 type = char_type_node;
4407 else
4408 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4409 0);
4410 size_t start = nonzero_first & ~(eltsz - 1);
4411 HOST_WIDE_INT off = buf->off + i + start;
4412 tree atype = type;
4413 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4414 atype = build_aligned_type (type, buf->align);
4415 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4416 build_int_cst (buf->alias_type, off));
4417 tree src;
4418 gimple *g;
4419 if (all_ones
4420 && nonzero_first == start
4421 && nonzero_last == start + eltsz)
4422 src = build_zero_cst (type);
4423 else
4424 {
4425 src = make_ssa_name (type);
4426 g = gimple_build_assign (src, unshare_expr (dst));
4427 gimple_set_location (g, buf->loc);
4428 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4429 tree mask = native_interpret_expr (type,
4430 buf->buf + i + start,
4431 eltsz);
4432 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4433 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4434 tree src_masked = make_ssa_name (type);
4435 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4436 src, mask);
4437 gimple_set_location (g, buf->loc);
4438 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4439 src = src_masked;
4440 }
4441 g = gimple_build_assign (dst, src);
4442 gimple_set_location (g, buf->loc);
4443 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4444 break;
4445 }
4446 }
4447 }
4448 if (full)
4449 {
4450 if (padding_bytes)
4451 {
4adfcea0
JJ
4452 tree atype, src;
4453 if (padding_bytes == 1)
4454 {
4455 atype = char_type_node;
4456 src = build_zero_cst (char_type_node);
4457 }
4458 else
4459 {
4460 atype = build_array_type_nelts (char_type_node, padding_bytes);
4461 src = build_constructor (atype, NULL);
4462 }
1bea0d0a
JJ
4463 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4464 build_int_cst (buf->alias_type,
4465 buf->off + end
4466 - padding_bytes));
1bea0d0a
JJ
4467 gimple *g = gimple_build_assign (dst, src);
4468 gimple_set_location (g, buf->loc);
4469 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4470 }
4471 size_t end_rem = end % UNITS_PER_WORD;
4472 buf->off += end - end_rem;
4473 buf->size = end_rem;
4474 memset (buf->buf, 0, buf->size);
4475 buf->padding_bytes = 0;
4476 }
4477 else
4478 {
4479 memmove (buf->buf, buf->buf + end, buf->size - end);
4480 buf->off += end;
4481 buf->size -= end;
4482 buf->padding_bytes = padding_bytes;
4483 }
4484}
4485
4486/* Append PADDING_BYTES padding bytes. */
4487
4488static void
4489clear_padding_add_padding (clear_padding_struct *buf,
4490 HOST_WIDE_INT padding_bytes)
4491{
4492 if (padding_bytes == 0)
4493 return;
4494 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4495 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4496 clear_padding_flush (buf, false);
4497 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4498 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4499 {
4500 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4501 padding_bytes -= clear_padding_buf_size - buf->size;
4502 buf->size = clear_padding_buf_size;
4503 clear_padding_flush (buf, false);
4504 gcc_assert (buf->padding_bytes);
4505 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4506 is guaranteed to be all ones. */
4507 padding_bytes += buf->size;
4508 buf->size = padding_bytes % UNITS_PER_WORD;
4509 memset (buf->buf, ~0, buf->size);
4510 buf->off += padding_bytes - buf->size;
4511 buf->padding_bytes += padding_bytes - buf->size;
4512 }
4513 else
4514 {
4515 memset (buf->buf + buf->size, ~0, padding_bytes);
4516 buf->size += padding_bytes;
4517 }
4518}
4519
4520static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4521
4522/* Clear padding bits of union type TYPE. */
4523
4524static void
4525clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4526{
4527 clear_padding_struct *union_buf;
4528 HOST_WIDE_INT start_off = 0, next_off = 0;
4529 size_t start_size = 0;
4530 if (buf->union_ptr)
4531 {
4532 start_off = buf->off + buf->size;
4533 next_off = start_off + sz;
4534 start_size = start_off % UNITS_PER_WORD;
4535 start_off -= start_size;
4536 clear_padding_flush (buf, true);
4537 union_buf = buf;
4538 }
4539 else
4540 {
4541 if (sz + buf->size > clear_padding_buf_size)
4542 clear_padding_flush (buf, false);
4543 union_buf = XALLOCA (clear_padding_struct);
4544 union_buf->loc = buf->loc;
896048cf 4545 union_buf->clear_in_mask = buf->clear_in_mask;
1bea0d0a
JJ
4546 union_buf->base = NULL_TREE;
4547 union_buf->alias_type = NULL_TREE;
4548 union_buf->gsi = NULL;
4549 union_buf->align = 0;
4550 union_buf->off = 0;
4551 union_buf->padding_bytes = 0;
4552 union_buf->sz = sz;
4553 union_buf->size = 0;
4554 if (sz + buf->size <= clear_padding_buf_size)
4555 union_buf->union_ptr = buf->buf + buf->size;
4556 else
4557 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4558 memset (union_buf->union_ptr, ~0, sz);
4559 }
4560
4561 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4562 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4563 {
a7285c86
JJ
4564 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4565 {
4566 if (TREE_TYPE (field) == error_mark_node)
4567 continue;
4568 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4569 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
896048cf
JJ
4570 if (!buf->clear_in_mask)
4571 error_at (buf->loc, "flexible array member %qD does not have "
4572 "well defined padding bits for %qs",
4573 field, "__builtin_clear_padding");
a7285c86
JJ
4574 continue;
4575 }
1bea0d0a
JJ
4576 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4577 gcc_assert (union_buf->size == 0);
4578 union_buf->off = start_off;
4579 union_buf->size = start_size;
4580 memset (union_buf->buf, ~0, start_size);
4581 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4582 clear_padding_add_padding (union_buf, sz - fldsz);
4583 clear_padding_flush (union_buf, true);
4584 }
4585
4586 if (buf == union_buf)
4587 {
4588 buf->off = next_off;
4589 buf->size = next_off % UNITS_PER_WORD;
4590 buf->off -= buf->size;
4591 memset (buf->buf, ~0, buf->size);
4592 }
4593 else if (sz + buf->size <= clear_padding_buf_size)
4594 buf->size += sz;
4595 else
4596 {
4597 unsigned char *union_ptr = union_buf->union_ptr;
4598 while (sz)
4599 {
4600 clear_padding_flush (buf, false);
4601 HOST_WIDE_INT this_sz
4602 = MIN ((unsigned HOST_WIDE_INT) sz,
4603 clear_padding_buf_size - buf->size);
4604 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4605 buf->size += this_sz;
4606 union_ptr += this_sz;
4607 sz -= this_sz;
4608 }
4609 XDELETE (union_buf->union_ptr);
4610 }
4611}
4612
4613/* The only known floating point formats with padding bits are the
4614 IEEE extended ones. */
4615
4616static bool
4617clear_padding_real_needs_padding_p (tree type)
4618{
4619 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4620 return (fmt->b == 2
4621 && fmt->signbit_ro == fmt->signbit_rw
4622 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4623}
4624
4625/* Return true if TYPE might contain any padding bits. */
4626
4627static bool
4628clear_padding_type_may_have_padding_p (tree type)
4629{
4630 switch (TREE_CODE (type))
4631 {
4632 case RECORD_TYPE:
4633 case UNION_TYPE:
4634 return true;
4635 case ARRAY_TYPE:
4636 case COMPLEX_TYPE:
4637 case VECTOR_TYPE:
4638 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4639 case REAL_TYPE:
4640 return clear_padding_real_needs_padding_p (type);
4641 default:
4642 return false;
4643 }
4644}
4645
4646/* Emit a runtime loop:
4647 for (; buf.base != end; buf.base += sz)
4648 __builtin_clear_padding (buf.base); */
4649
4650static void
4651clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4652{
4653 tree l1 = create_artificial_label (buf->loc);
4654 tree l2 = create_artificial_label (buf->loc);
4655 tree l3 = create_artificial_label (buf->loc);
4656 gimple *g = gimple_build_goto (l2);
4657 gimple_set_location (g, buf->loc);
4658 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4659 g = gimple_build_label (l1);
4660 gimple_set_location (g, buf->loc);
4661 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4662 clear_padding_type (buf, type, buf->sz);
4663 clear_padding_flush (buf, true);
4664 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4665 size_int (buf->sz));
4666 gimple_set_location (g, buf->loc);
4667 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4668 g = gimple_build_label (l2);
4669 gimple_set_location (g, buf->loc);
4670 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4671 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4672 gimple_set_location (g, buf->loc);
4673 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4674 g = gimple_build_label (l3);
4675 gimple_set_location (g, buf->loc);
4676 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4677}
4678
4679/* Clear padding bits for TYPE. Called recursively from
4680 gimple_fold_builtin_clear_padding. */
4681
4682static void
4683clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4684{
4685 switch (TREE_CODE (type))
4686 {
4687 case RECORD_TYPE:
4688 HOST_WIDE_INT cur_pos;
4689 cur_pos = 0;
4690 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4691 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4692 {
a7285c86 4693 tree ftype = TREE_TYPE (field);
1bea0d0a
JJ
4694 if (DECL_BIT_FIELD (field))
4695 {
a7285c86 4696 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
1bea0d0a
JJ
4697 if (fldsz == 0)
4698 continue;
4699 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4700 if (pos >= sz)
4701 continue;
1bea0d0a
JJ
4702 HOST_WIDE_INT bpos
4703 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4704 bpos %= BITS_PER_UNIT;
4705 HOST_WIDE_INT end
4706 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4707 if (pos + end > cur_pos)
4708 {
4709 clear_padding_add_padding (buf, pos + end - cur_pos);
4710 cur_pos = pos + end;
4711 }
4712 gcc_assert (cur_pos > pos
4713 && ((unsigned HOST_WIDE_INT) buf->size
4714 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4715 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4716 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4717 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4718 " in %qs", "__builtin_clear_padding");
4719 else if (BYTES_BIG_ENDIAN)
4720 {
4721 /* Big endian. */
4722 if (bpos + fldsz <= BITS_PER_UNIT)
4723 *p &= ~(((1 << fldsz) - 1)
4724 << (BITS_PER_UNIT - bpos - fldsz));
4725 else
4726 {
4727 if (bpos)
4728 {
4729 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4730 p++;
4731 fldsz -= BITS_PER_UNIT - bpos;
4732 }
4733 memset (p, 0, fldsz / BITS_PER_UNIT);
4734 p += fldsz / BITS_PER_UNIT;
4735 fldsz %= BITS_PER_UNIT;
4736 if (fldsz)
4737 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4738 }
4739 }
4740 else
4741 {
4742 /* Little endian. */
4743 if (bpos + fldsz <= BITS_PER_UNIT)
4744 *p &= ~(((1 << fldsz) - 1) << bpos);
4745 else
4746 {
4747 if (bpos)
4748 {
4749 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4750 p++;
4751 fldsz -= BITS_PER_UNIT - bpos;
4752 }
4753 memset (p, 0, fldsz / BITS_PER_UNIT);
4754 p += fldsz / BITS_PER_UNIT;
4755 fldsz %= BITS_PER_UNIT;
4756 if (fldsz)
4757 *p &= ~((1 << fldsz) - 1);
4758 }
4759 }
4760 }
a7285c86
JJ
4761 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4762 {
4763 if (ftype == error_mark_node)
4764 continue;
4765 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4766 && !COMPLETE_TYPE_P (ftype));
896048cf
JJ
4767 if (!buf->clear_in_mask)
4768 error_at (buf->loc, "flexible array member %qD does not "
4769 "have well defined padding bits for %qs",
4770 field, "__builtin_clear_padding");
a7285c86 4771 }
bf0a63a1
JJ
4772 else if (is_empty_type (TREE_TYPE (field)))
4773 continue;
1bea0d0a
JJ
4774 else
4775 {
4776 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4777 if (pos >= sz)
4778 continue;
1bea0d0a
JJ
4779 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4780 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4781 clear_padding_add_padding (buf, pos - cur_pos);
4782 cur_pos = pos;
4783 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4784 cur_pos += fldsz;
4785 }
4786 }
4787 gcc_assert (sz >= cur_pos);
4788 clear_padding_add_padding (buf, sz - cur_pos);
4789 break;
4790 case ARRAY_TYPE:
4791 HOST_WIDE_INT nelts, fldsz;
4792 fldsz = int_size_in_bytes (TREE_TYPE (type));
337d6362
ML
4793 if (fldsz == 0)
4794 break;
1bea0d0a
JJ
4795 nelts = sz / fldsz;
4796 if (nelts > 1
4797 && sz > 8 * UNITS_PER_WORD
4798 && buf->union_ptr == NULL
4799 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4800 {
4801 /* For sufficiently large array of more than one elements,
4802 emit a runtime loop to keep code size manageable. */
4803 tree base = buf->base;
4804 unsigned int prev_align = buf->align;
4805 HOST_WIDE_INT off = buf->off + buf->size;
4806 HOST_WIDE_INT prev_sz = buf->sz;
4807 clear_padding_flush (buf, true);
4808 tree elttype = TREE_TYPE (type);
4809 buf->base = create_tmp_var (build_pointer_type (elttype));
4810 tree end = make_ssa_name (TREE_TYPE (buf->base));
4811 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4812 base, size_int (off));
4813 gimple_set_location (g, buf->loc);
4814 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4815 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4816 size_int (sz));
4817 gimple_set_location (g, buf->loc);
4818 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4819 buf->sz = fldsz;
4820 buf->align = TYPE_ALIGN (elttype);
4821 buf->off = 0;
4822 buf->size = 0;
4823 clear_padding_emit_loop (buf, elttype, end);
4824 buf->base = base;
4825 buf->sz = prev_sz;
4826 buf->align = prev_align;
4827 buf->size = off % UNITS_PER_WORD;
4828 buf->off = off - buf->size;
4829 memset (buf->buf, 0, buf->size);
4830 break;
4831 }
4832 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4833 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4834 break;
4835 case UNION_TYPE:
4836 clear_padding_union (buf, type, sz);
4837 break;
4838 case REAL_TYPE:
4839 gcc_assert ((size_t) sz <= clear_padding_unit);
4840 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4841 clear_padding_flush (buf, false);
4842 if (clear_padding_real_needs_padding_p (type))
4843 {
4844 /* Use native_interpret_expr + native_encode_expr to figure out
4845 which bits are padding. */
4846 memset (buf->buf + buf->size, ~0, sz);
4847 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4848 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4849 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4850 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4851 for (size_t i = 0; i < (size_t) sz; i++)
4852 buf->buf[buf->size + i] ^= ~0;
4853 }
4854 else
4855 memset (buf->buf + buf->size, 0, sz);
4856 buf->size += sz;
4857 break;
4858 case COMPLEX_TYPE:
4859 fldsz = int_size_in_bytes (TREE_TYPE (type));
4860 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4861 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4862 break;
4863 case VECTOR_TYPE:
4864 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4865 fldsz = int_size_in_bytes (TREE_TYPE (type));
4866 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4867 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4868 break;
4869 case NULLPTR_TYPE:
4870 gcc_assert ((size_t) sz <= clear_padding_unit);
4871 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4872 clear_padding_flush (buf, false);
4873 memset (buf->buf + buf->size, ~0, sz);
4874 buf->size += sz;
4875 break;
4876 default:
4877 gcc_assert ((size_t) sz <= clear_padding_unit);
4878 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4879 clear_padding_flush (buf, false);
4880 memset (buf->buf + buf->size, 0, sz);
4881 buf->size += sz;
4882 break;
4883 }
4884}
4885
896048cf
JJ
4886/* Clear padding bits of TYPE in MASK. */
4887
4888void
4889clear_type_padding_in_mask (tree type, unsigned char *mask)
4890{
4891 clear_padding_struct buf;
4892 buf.loc = UNKNOWN_LOCATION;
4893 buf.clear_in_mask = true;
4894 buf.base = NULL_TREE;
4895 buf.alias_type = NULL_TREE;
4896 buf.gsi = NULL;
4897 buf.align = 0;
4898 buf.off = 0;
4899 buf.padding_bytes = 0;
4900 buf.sz = int_size_in_bytes (type);
4901 buf.size = 0;
4902 buf.union_ptr = mask;
4903 clear_padding_type (&buf, type, buf.sz);
4904 clear_padding_flush (&buf, true);
4905}
4906
1bea0d0a
JJ
4907/* Fold __builtin_clear_padding builtin. */
4908
4909static bool
4910gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4911{
4912 gimple *stmt = gsi_stmt (*gsi);
4913 gcc_assert (gimple_call_num_args (stmt) == 2);
4914 tree ptr = gimple_call_arg (stmt, 0);
4915 tree typearg = gimple_call_arg (stmt, 1);
4916 tree type = TREE_TYPE (TREE_TYPE (typearg));
4917 location_t loc = gimple_location (stmt);
4918 clear_padding_struct buf;
4919 gimple_stmt_iterator gsiprev = *gsi;
4920 /* This should be folded during the lower pass. */
4921 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4922 gcc_assert (COMPLETE_TYPE_P (type));
4923 gsi_prev (&gsiprev);
4924
4925 buf.loc = loc;
896048cf 4926 buf.clear_in_mask = false;
1bea0d0a
JJ
4927 buf.base = ptr;
4928 buf.alias_type = NULL_TREE;
4929 buf.gsi = gsi;
4930 buf.align = get_pointer_alignment (ptr);
4931 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4932 buf.align = MAX (buf.align, talign);
4933 buf.off = 0;
4934 buf.padding_bytes = 0;
4935 buf.size = 0;
4936 buf.sz = int_size_in_bytes (type);
4937 buf.union_ptr = NULL;
4938 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4939 sorry_at (loc, "%s not supported for variable length aggregates",
4940 "__builtin_clear_padding");
4941 /* The implementation currently assumes 8-bit host and target
4942 chars which is the case for all currently supported targets
4943 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4944 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4945 sorry_at (loc, "%s not supported on this target",
4946 "__builtin_clear_padding");
4947 else if (!clear_padding_type_may_have_padding_p (type))
4948 ;
4949 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4950 {
4951 tree sz = TYPE_SIZE_UNIT (type);
4952 tree elttype = type;
4953 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4954 while (TREE_CODE (elttype) == ARRAY_TYPE
4955 && int_size_in_bytes (elttype) < 0)
4956 elttype = TREE_TYPE (elttype);
4957 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4958 gcc_assert (eltsz >= 0);
4959 if (eltsz)
4960 {
4961 buf.base = create_tmp_var (build_pointer_type (elttype));
4962 tree end = make_ssa_name (TREE_TYPE (buf.base));
4963 gimple *g = gimple_build_assign (buf.base, ptr);
4964 gimple_set_location (g, loc);
4965 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4966 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4967 gimple_set_location (g, loc);
4968 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4969 buf.sz = eltsz;
4970 buf.align = TYPE_ALIGN (elttype);
4971 buf.alias_type = build_pointer_type (elttype);
4972 clear_padding_emit_loop (&buf, elttype, end);
4973 }
4974 }
4975 else
4976 {
4977 if (!is_gimple_mem_ref_addr (buf.base))
4978 {
4979 buf.base = make_ssa_name (TREE_TYPE (ptr));
4980 gimple *g = gimple_build_assign (buf.base, ptr);
4981 gimple_set_location (g, loc);
4982 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4983 }
4984 buf.alias_type = build_pointer_type (type);
4985 clear_padding_type (&buf, type, buf.sz);
4986 clear_padding_flush (&buf, true);
4987 }
4988
4989 gimple_stmt_iterator gsiprev2 = *gsi;
4990 gsi_prev (&gsiprev2);
4991 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4992 gsi_replace (gsi, gimple_build_nop (), true);
4993 else
4994 {
4995 gsi_remove (gsi, true);
4996 *gsi = gsiprev2;
4997 }
4998 return true;
4999}
5000
dcb7fae2
RB
5001/* Fold the non-target builtin at *GSI and return whether any simplification
5002 was made. */
cbdd87d4 5003
fef5a0d9 5004static bool
dcb7fae2 5005gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 5006{
538dd0b7 5007 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 5008 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 5009
dcb7fae2
RB
5010 /* Give up for always_inline inline builtins until they are
5011 inlined. */
5012 if (avoid_folding_inline_builtin (callee))
5013 return false;
cbdd87d4 5014
edd7ae68
RB
5015 unsigned n = gimple_call_num_args (stmt);
5016 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5017 switch (fcode)
cbdd87d4 5018 {
b3d8d88e
MS
5019 case BUILT_IN_BCMP:
5020 return gimple_fold_builtin_bcmp (gsi);
5021 case BUILT_IN_BCOPY:
5022 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 5023 case BUILT_IN_BZERO:
b3d8d88e
MS
5024 return gimple_fold_builtin_bzero (gsi);
5025
dcb7fae2
RB
5026 case BUILT_IN_MEMSET:
5027 return gimple_fold_builtin_memset (gsi,
5028 gimple_call_arg (stmt, 1),
5029 gimple_call_arg (stmt, 2));
dcb7fae2 5030 case BUILT_IN_MEMCPY:
dcb7fae2 5031 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
5032 case BUILT_IN_MEMMOVE:
5033 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 5034 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
5035 case BUILT_IN_SPRINTF_CHK:
5036 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 5037 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
5038 case BUILT_IN_STRCAT_CHK:
5039 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
5040 case BUILT_IN_STRNCAT_CHK:
5041 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 5042 case BUILT_IN_STRLEN:
dcb7fae2 5043 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 5044 case BUILT_IN_STRCPY:
dcb7fae2 5045 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 5046 gimple_call_arg (stmt, 0),
dcb7fae2 5047 gimple_call_arg (stmt, 1));
cbdd87d4 5048 case BUILT_IN_STRNCPY:
dcb7fae2 5049 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
5050 gimple_call_arg (stmt, 0),
5051 gimple_call_arg (stmt, 1),
dcb7fae2 5052 gimple_call_arg (stmt, 2));
9a7eefec 5053 case BUILT_IN_STRCAT:
dcb7fae2
RB
5054 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5055 gimple_call_arg (stmt, 1));
ad03a744
RB
5056 case BUILT_IN_STRNCAT:
5057 return gimple_fold_builtin_strncat (gsi);
71dea1dd 5058 case BUILT_IN_INDEX:
912d9ec3 5059 case BUILT_IN_STRCHR:
71dea1dd
WD
5060 return gimple_fold_builtin_strchr (gsi, false);
5061 case BUILT_IN_RINDEX:
5062 case BUILT_IN_STRRCHR:
5063 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
5064 case BUILT_IN_STRSTR:
5065 return gimple_fold_builtin_strstr (gsi);
a918bfbf 5066 case BUILT_IN_STRCMP:
8b0b334a 5067 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
5068 case BUILT_IN_STRCASECMP:
5069 case BUILT_IN_STRNCMP:
8b0b334a 5070 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
5071 case BUILT_IN_STRNCASECMP:
5072 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
5073 case BUILT_IN_MEMCHR:
5074 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 5075 case BUILT_IN_FPUTS:
dcb7fae2
RB
5076 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5077 gimple_call_arg (stmt, 1), false);
cbdd87d4 5078 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
5079 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5080 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
5081 case BUILT_IN_MEMCPY_CHK:
5082 case BUILT_IN_MEMPCPY_CHK:
5083 case BUILT_IN_MEMMOVE_CHK:
5084 case BUILT_IN_MEMSET_CHK:
dcb7fae2 5085 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
5086 gimple_call_arg (stmt, 0),
5087 gimple_call_arg (stmt, 1),
5088 gimple_call_arg (stmt, 2),
5089 gimple_call_arg (stmt, 3),
edd7ae68 5090 fcode);
2625bb5d
RB
5091 case BUILT_IN_STPCPY:
5092 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
5093 case BUILT_IN_STRCPY_CHK:
5094 case BUILT_IN_STPCPY_CHK:
dcb7fae2 5095 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
5096 gimple_call_arg (stmt, 0),
5097 gimple_call_arg (stmt, 1),
5098 gimple_call_arg (stmt, 2),
edd7ae68 5099 fcode);
cbdd87d4 5100 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 5101 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
5102 return gimple_fold_builtin_stxncpy_chk (gsi,
5103 gimple_call_arg (stmt, 0),
5104 gimple_call_arg (stmt, 1),
5105 gimple_call_arg (stmt, 2),
5106 gimple_call_arg (stmt, 3),
edd7ae68 5107 fcode);
cbdd87d4
RG
5108 case BUILT_IN_SNPRINTF_CHK:
5109 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 5110 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 5111
edd7ae68
RB
5112 case BUILT_IN_FPRINTF:
5113 case BUILT_IN_FPRINTF_UNLOCKED:
5114 case BUILT_IN_VFPRINTF:
5115 if (n == 2 || n == 3)
5116 return gimple_fold_builtin_fprintf (gsi,
5117 gimple_call_arg (stmt, 0),
5118 gimple_call_arg (stmt, 1),
5119 n == 3
5120 ? gimple_call_arg (stmt, 2)
5121 : NULL_TREE,
5122 fcode);
5123 break;
5124 case BUILT_IN_FPRINTF_CHK:
5125 case BUILT_IN_VFPRINTF_CHK:
5126 if (n == 3 || n == 4)
5127 return gimple_fold_builtin_fprintf (gsi,
5128 gimple_call_arg (stmt, 0),
5129 gimple_call_arg (stmt, 2),
5130 n == 4
5131 ? gimple_call_arg (stmt, 3)
5132 : NULL_TREE,
5133 fcode);
5134 break;
ad03a744
RB
5135 case BUILT_IN_PRINTF:
5136 case BUILT_IN_PRINTF_UNLOCKED:
5137 case BUILT_IN_VPRINTF:
5138 if (n == 1 || n == 2)
5139 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5140 n == 2
5141 ? gimple_call_arg (stmt, 1)
5142 : NULL_TREE, fcode);
5143 break;
5144 case BUILT_IN_PRINTF_CHK:
5145 case BUILT_IN_VPRINTF_CHK:
5146 if (n == 2 || n == 3)
5147 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5148 n == 3
5149 ? gimple_call_arg (stmt, 2)
5150 : NULL_TREE, fcode);
242a37f1 5151 break;
48126138
NS
5152 case BUILT_IN_ACC_ON_DEVICE:
5153 return gimple_fold_builtin_acc_on_device (gsi,
5154 gimple_call_arg (stmt, 0));
fe75f732
PK
5155 case BUILT_IN_REALLOC:
5156 return gimple_fold_builtin_realloc (gsi);
5157
1bea0d0a
JJ
5158 case BUILT_IN_CLEAR_PADDING:
5159 return gimple_fold_builtin_clear_padding (gsi);
5160
fef5a0d9
RB
5161 default:;
5162 }
5163
5164 /* Try the generic builtin folder. */
5165 bool ignore = (gimple_call_lhs (stmt) == NULL);
5166 tree result = fold_call_stmt (stmt, ignore);
5167 if (result)
5168 {
5169 if (ignore)
5170 STRIP_NOPS (result);
5171 else
5172 result = fold_convert (gimple_call_return_type (stmt), result);
52a5515e 5173 gimplify_and_update_call_from_tree (gsi, result);
fef5a0d9
RB
5174 return true;
5175 }
5176
5177 return false;
5178}
5179
451e8dae
NS
5180/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5181 function calls to constants, where possible. */
5182
5183static tree
5184fold_internal_goacc_dim (const gimple *call)
5185{
629b3d75
MJ
5186 int axis = oacc_get_ifn_dim_arg (call);
5187 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 5188 tree result = NULL_TREE;
67d2229e 5189 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 5190
67d2229e 5191 switch (gimple_call_internal_fn (call))
451e8dae 5192 {
67d2229e
TV
5193 case IFN_GOACC_DIM_POS:
5194 /* If the size is 1, we know the answer. */
5195 if (size == 1)
5196 result = build_int_cst (type, 0);
5197 break;
5198 case IFN_GOACC_DIM_SIZE:
5199 /* If the size is not dynamic, we know the answer. */
5200 if (size)
5201 result = build_int_cst (type, size);
5202 break;
5203 default:
5204 break;
451e8dae
NS
5205 }
5206
5207 return result;
5208}
5209
849a76a5
JJ
5210/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5211 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5212 &var where var is only addressable because of such calls. */
5213
5214bool
5215optimize_atomic_compare_exchange_p (gimple *stmt)
5216{
5217 if (gimple_call_num_args (stmt) != 6
5218 || !flag_inline_atomics
5219 || !optimize
45b2222a 5220 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
5221 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5222 || !gimple_vdef (stmt)
5223 || !gimple_vuse (stmt))
5224 return false;
5225
5226 tree fndecl = gimple_call_fndecl (stmt);
5227 switch (DECL_FUNCTION_CODE (fndecl))
5228 {
5229 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5230 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5231 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5232 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5233 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5234 break;
5235 default:
5236 return false;
5237 }
5238
5239 tree expected = gimple_call_arg (stmt, 1);
5240 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
5241 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5242 return false;
5243
5244 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5245 if (!is_gimple_reg_type (etype)
849a76a5 5246 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
5247 || TREE_THIS_VOLATILE (etype)
5248 || VECTOR_TYPE_P (etype)
5249 || TREE_CODE (etype) == COMPLEX_TYPE
5250 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5251 might not preserve all the bits. See PR71716. */
5252 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
5253 || maybe_ne (TYPE_PRECISION (etype),
5254 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
5255 return false;
5256
5257 tree weak = gimple_call_arg (stmt, 3);
5258 if (!integer_zerop (weak) && !integer_onep (weak))
5259 return false;
5260
5261 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5262 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5263 machine_mode mode = TYPE_MODE (itype);
5264
5265 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5266 == CODE_FOR_nothing
5267 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5268 return false;
5269
cf098191 5270 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
5271 return false;
5272
5273 return true;
5274}
5275
5276/* Fold
5277 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5278 into
5279 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5280 i = IMAGPART_EXPR <t>;
5281 r = (_Bool) i;
5282 e = REALPART_EXPR <t>; */
5283
5284void
5285fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5286{
5287 gimple *stmt = gsi_stmt (*gsi);
5288 tree fndecl = gimple_call_fndecl (stmt);
5289 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5290 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5291 tree ctype = build_complex_type (itype);
5292 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
5293 bool throws = false;
5294 edge e = NULL;
849a76a5
JJ
5295 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5296 expected);
5297 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5298 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5299 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5300 {
5301 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5302 build1 (VIEW_CONVERT_EXPR, itype,
5303 gimple_assign_lhs (g)));
5304 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5305 }
5306 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5307 + int_size_in_bytes (itype);
5308 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5309 gimple_call_arg (stmt, 0),
5310 gimple_assign_lhs (g),
5311 gimple_call_arg (stmt, 2),
5312 build_int_cst (integer_type_node, flag),
5313 gimple_call_arg (stmt, 4),
5314 gimple_call_arg (stmt, 5));
5315 tree lhs = make_ssa_name (ctype);
5316 gimple_call_set_lhs (g, lhs);
779724a5 5317 gimple_move_vops (g, stmt);
cc195d46 5318 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 5319 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
5320 {
5321 throws = true;
5322 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5323 }
5324 gimple_call_set_nothrow (as_a <gcall *> (g),
5325 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5326 gimple_call_set_lhs (stmt, NULL_TREE);
5327 gsi_replace (gsi, g, true);
5328 if (oldlhs)
849a76a5 5329 {
849a76a5
JJ
5330 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5331 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
5332 if (throws)
5333 {
5334 gsi_insert_on_edge_immediate (e, g);
5335 *gsi = gsi_for_stmt (g);
5336 }
5337 else
5338 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5339 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5340 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 5341 }
849a76a5
JJ
5342 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5343 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
5344 if (throws && oldlhs == NULL_TREE)
5345 {
5346 gsi_insert_on_edge_immediate (e, g);
5347 *gsi = gsi_for_stmt (g);
5348 }
5349 else
5350 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
5351 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5352 {
5353 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5354 VIEW_CONVERT_EXPR,
5355 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5356 gimple_assign_lhs (g)));
5357 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5358 }
5359 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5360 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5361 *gsi = gsiret;
5362}
5363
1304953e
JJ
5364/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5365 doesn't fit into TYPE. The test for overflow should be regardless of
5366 -fwrapv, and even for unsigned types. */
5367
5368bool
5369arith_overflowed_p (enum tree_code code, const_tree type,
5370 const_tree arg0, const_tree arg1)
5371{
1304953e
JJ
5372 widest2_int warg0 = widest2_int_cst (arg0);
5373 widest2_int warg1 = widest2_int_cst (arg1);
5374 widest2_int wres;
5375 switch (code)
5376 {
5377 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5378 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5379 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5380 default: gcc_unreachable ();
5381 }
5382 signop sign = TYPE_SIGN (type);
5383 if (sign == UNSIGNED && wi::neg_p (wres))
5384 return true;
5385 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5386}
5387
868363d4
RS
5388/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5389 for the memory it references, otherwise return null. VECTYPE is the
5390 type of the memory vector. */
5391
5392static tree
5393gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5394{
5395 tree ptr = gimple_call_arg (call, 0);
5396 tree alias_align = gimple_call_arg (call, 1);
5397 tree mask = gimple_call_arg (call, 2);
5398 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5399 return NULL_TREE;
5400
aa204d51 5401 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
868363d4
RS
5402 if (TYPE_ALIGN (vectype) != align)
5403 vectype = build_aligned_type (vectype, align);
5404 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5405 return fold_build2 (MEM_REF, vectype, ptr, offset);
5406}
5407
5408/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5409
5410static bool
5411gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5412{
5413 tree lhs = gimple_call_lhs (call);
5414 if (!lhs)
5415 return false;
5416
5417 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5418 {
5419 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5420 gimple_set_location (new_stmt, gimple_location (call));
5421 gimple_move_vops (new_stmt, call);
5422 gsi_replace (gsi, new_stmt, false);
5423 return true;
5424 }
5425 return false;
5426}
5427
5428/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5429
5430static bool
5431gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5432{
5433 tree rhs = gimple_call_arg (call, 3);
5434 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5435 {
5436 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5437 gimple_set_location (new_stmt, gimple_location (call));
5438 gimple_move_vops (new_stmt, call);
5439 gsi_replace (gsi, new_stmt, false);
5440 return true;
5441 }
5442 return false;
5443}
5444
cbdd87d4
RG
5445/* Attempt to fold a call statement referenced by the statement iterator GSI.
5446 The statement may be replaced by another statement, e.g., if the call
5447 simplifies to a constant value. Return true if any changes were made.
5448 It is assumed that the operands have been previously folded. */
5449
e021c122 5450static bool
ceeffab0 5451gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 5452{
538dd0b7 5453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 5454 tree callee;
e021c122 5455 bool changed = false;
3b45a007
RG
5456
5457 /* Check for virtual calls that became direct calls. */
5458 callee = gimple_call_fn (stmt);
25583c4f 5459 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 5460 {
49c471e3
MJ
5461 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5462 {
450ad0cd
JH
5463 if (dump_file && virtual_method_call_p (callee)
5464 && !possible_polymorphic_call_target_p
6f8091fc
JH
5465 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5466 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
5467 {
5468 fprintf (dump_file,
a70e9985 5469 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
5470 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5471 fprintf (dump_file, " to ");
5472 print_generic_expr (dump_file, callee, TDF_SLIM);
5473 fprintf (dump_file, "\n");
5474 }
5475
49c471e3 5476 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
5477 changed = true;
5478 }
a70e9985 5479 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 5480 {
61dd6a2e
JH
5481 bool final;
5482 vec <cgraph_node *>targets
058d0a90 5483 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 5484 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 5485 {
a70e9985 5486 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
5487 if (dump_enabled_p ())
5488 {
4f5b9c80 5489 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
5490 "folding virtual function call to %s\n",
5491 targets.length () == 1
5492 ? targets[0]->name ()
5493 : "__builtin_unreachable");
5494 }
61dd6a2e 5495 if (targets.length () == 1)
cf3e5a89 5496 {
18954840
JJ
5497 tree fndecl = targets[0]->decl;
5498 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 5499 changed = true;
18954840
JJ
5500 /* If changing the call to __cxa_pure_virtual
5501 or similar noreturn function, adjust gimple_call_fntype
5502 too. */
865f7046 5503 if (gimple_call_noreturn_p (stmt)
18954840
JJ
5504 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5505 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5506 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5507 == void_type_node))
5508 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 5509 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
5510 if (lhs
5511 && gimple_call_noreturn_p (stmt)
18954840 5512 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 5513 || should_remove_lhs_p (lhs)))
a70e9985
JJ
5514 {
5515 if (TREE_CODE (lhs) == SSA_NAME)
5516 {
b731b390 5517 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 5518 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 5519 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
5520 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5521 }
5522 gimple_call_set_lhs (stmt, NULL_TREE);
5523 }
0b986c6a 5524 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 5525 }
a70e9985 5526 else
cf3e5a89
JJ
5527 {
5528 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 5529 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 5530 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
5531 /* If the call had a SSA name as lhs morph that into
5532 an uninitialized value. */
a70e9985
JJ
5533 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5534 {
b731b390 5535 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
5536 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5537 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5538 set_ssa_default_def (cfun, var, lhs);
42e52a51 5539 }
779724a5 5540 gimple_move_vops (new_stmt, stmt);
2da6996c 5541 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
5542 return true;
5543 }
e021c122 5544 }
49c471e3 5545 }
e021c122 5546 }
49c471e3 5547
f2d3d07e
RH
5548 /* Check for indirect calls that became direct calls, and then
5549 no longer require a static chain. */
5550 if (gimple_call_chain (stmt))
5551 {
5552 tree fn = gimple_call_fndecl (stmt);
5553 if (fn && !DECL_STATIC_CHAIN (fn))
5554 {
5555 gimple_call_set_chain (stmt, NULL);
5556 changed = true;
5557 }
f2d3d07e
RH
5558 }
5559
e021c122
RG
5560 if (inplace)
5561 return changed;
5562
5563 /* Check for builtins that CCP can handle using information not
5564 available in the generic fold routines. */
fef5a0d9
RB
5565 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5566 {
5567 if (gimple_fold_builtin (gsi))
5568 changed = true;
5569 }
5570 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 5571 {
ea679d55 5572 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 5573 }
368b454d 5574 else if (gimple_call_internal_p (stmt))
ed9c79e1 5575 {
368b454d
JJ
5576 enum tree_code subcode = ERROR_MARK;
5577 tree result = NULL_TREE;
1304953e
JJ
5578 bool cplx_result = false;
5579 tree overflow = NULL_TREE;
368b454d
JJ
5580 switch (gimple_call_internal_fn (stmt))
5581 {
5582 case IFN_BUILTIN_EXPECT:
5583 result = fold_builtin_expect (gimple_location (stmt),
5584 gimple_call_arg (stmt, 0),
5585 gimple_call_arg (stmt, 1),
1e9168b2
ML
5586 gimple_call_arg (stmt, 2),
5587 NULL_TREE);
368b454d 5588 break;
0e82f089 5589 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
5590 {
5591 tree offset = gimple_call_arg (stmt, 1);
5592 tree objsize = gimple_call_arg (stmt, 2);
5593 if (integer_all_onesp (objsize)
5594 || (TREE_CODE (offset) == INTEGER_CST
5595 && TREE_CODE (objsize) == INTEGER_CST
5596 && tree_int_cst_le (offset, objsize)))
5597 {
5598 replace_call_with_value (gsi, NULL_TREE);
5599 return true;
5600 }
5601 }
5602 break;
5603 case IFN_UBSAN_PTR:
5604 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 5605 {
ca1150f0 5606 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
5607 return true;
5608 }
5609 break;
ca1150f0
JJ
5610 case IFN_UBSAN_BOUNDS:
5611 {
5612 tree index = gimple_call_arg (stmt, 1);
5613 tree bound = gimple_call_arg (stmt, 2);
5614 if (TREE_CODE (index) == INTEGER_CST
5615 && TREE_CODE (bound) == INTEGER_CST)
5616 {
5617 index = fold_convert (TREE_TYPE (bound), index);
5618 if (TREE_CODE (index) == INTEGER_CST
5619 && tree_int_cst_le (index, bound))
5620 {
5621 replace_call_with_value (gsi, NULL_TREE);
5622 return true;
5623 }
5624 }
5625 }
5626 break;
451e8dae
NS
5627 case IFN_GOACC_DIM_SIZE:
5628 case IFN_GOACC_DIM_POS:
5629 result = fold_internal_goacc_dim (stmt);
5630 break;
368b454d
JJ
5631 case IFN_UBSAN_CHECK_ADD:
5632 subcode = PLUS_EXPR;
5633 break;
5634 case IFN_UBSAN_CHECK_SUB:
5635 subcode = MINUS_EXPR;
5636 break;
5637 case IFN_UBSAN_CHECK_MUL:
5638 subcode = MULT_EXPR;
5639 break;
1304953e
JJ
5640 case IFN_ADD_OVERFLOW:
5641 subcode = PLUS_EXPR;
5642 cplx_result = true;
5643 break;
5644 case IFN_SUB_OVERFLOW:
5645 subcode = MINUS_EXPR;
5646 cplx_result = true;
5647 break;
5648 case IFN_MUL_OVERFLOW:
5649 subcode = MULT_EXPR;
5650 cplx_result = true;
5651 break;
868363d4
RS
5652 case IFN_MASK_LOAD:
5653 changed |= gimple_fold_mask_load (gsi, stmt);
5654 break;
5655 case IFN_MASK_STORE:
5656 changed |= gimple_fold_mask_store (gsi, stmt);
5657 break;
368b454d
JJ
5658 default:
5659 break;
5660 }
5661 if (subcode != ERROR_MARK)
5662 {
5663 tree arg0 = gimple_call_arg (stmt, 0);
5664 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
5665 tree type = TREE_TYPE (arg0);
5666 if (cplx_result)
5667 {
5668 tree lhs = gimple_call_lhs (stmt);
5669 if (lhs == NULL_TREE)
5670 type = NULL_TREE;
5671 else
5672 type = TREE_TYPE (TREE_TYPE (lhs));
5673 }
5674 if (type == NULL_TREE)
5675 ;
368b454d 5676 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
5677 else if (integer_zerop (arg1))
5678 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
5679 /* x = 0 + y; x = 0 * y; */
5680 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 5681 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
5682 /* x = y - y; */
5683 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 5684 result = integer_zero_node;
368b454d 5685 /* x = y * 1; x = 1 * y; */
1304953e
JJ
5686 else if (subcode == MULT_EXPR && integer_onep (arg1))
5687 result = arg0;
5688 else if (subcode == MULT_EXPR && integer_onep (arg0))
5689 result = arg1;
5690 else if (TREE_CODE (arg0) == INTEGER_CST
5691 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 5692 {
1304953e
JJ
5693 if (cplx_result)
5694 result = int_const_binop (subcode, fold_convert (type, arg0),
5695 fold_convert (type, arg1));
5696 else
5697 result = int_const_binop (subcode, arg0, arg1);
5698 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5699 {
5700 if (cplx_result)
5701 overflow = build_one_cst (type);
5702 else
5703 result = NULL_TREE;
5704 }
5705 }
5706 if (result)
5707 {
5708 if (result == integer_zero_node)
5709 result = build_zero_cst (type);
5710 else if (cplx_result && TREE_TYPE (result) != type)
5711 {
5712 if (TREE_CODE (result) == INTEGER_CST)
5713 {
5714 if (arith_overflowed_p (PLUS_EXPR, type, result,
5715 integer_zero_node))
5716 overflow = build_one_cst (type);
5717 }
5718 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5719 && TYPE_UNSIGNED (type))
5720 || (TYPE_PRECISION (type)
5721 < (TYPE_PRECISION (TREE_TYPE (result))
5722 + (TYPE_UNSIGNED (TREE_TYPE (result))
5723 && !TYPE_UNSIGNED (type)))))
5724 result = NULL_TREE;
5725 if (result)
5726 result = fold_convert (type, result);
5727 }
368b454d
JJ
5728 }
5729 }
1304953e 5730
ed9c79e1
JJ
5731 if (result)
5732 {
1304953e
JJ
5733 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5734 result = drop_tree_overflow (result);
5735 if (cplx_result)
5736 {
5737 if (overflow == NULL_TREE)
5738 overflow = build_zero_cst (TREE_TYPE (result));
5739 tree ctype = build_complex_type (TREE_TYPE (result));
5740 if (TREE_CODE (result) == INTEGER_CST
5741 && TREE_CODE (overflow) == INTEGER_CST)
5742 result = build_complex (ctype, result, overflow);
5743 else
5744 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5745 ctype, result, overflow);
5746 }
52a5515e 5747 gimplify_and_update_call_from_tree (gsi, result);
ed9c79e1
JJ
5748 changed = true;
5749 }
5750 }
3b45a007 5751
e021c122 5752 return changed;
cbdd87d4
RG
5753}
5754
e0ee10ed 5755
89a79e96
RB
5756/* Return true whether NAME has a use on STMT. */
5757
5758static bool
355fe088 5759has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
5760{
5761 imm_use_iterator iter;
5762 use_operand_p use_p;
5763 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5764 if (USE_STMT (use_p) == stmt)
5765 return true;
5766 return false;
5767}
5768
e0ee10ed
RB
5769/* Worker for fold_stmt_1 dispatch to pattern based folding with
5770 gimple_simplify.
5771
5772 Replaces *GSI with the simplification result in RCODE and OPS
5773 and the associated statements in *SEQ. Does the replacement
5774 according to INPLACE and returns true if the operation succeeded. */
5775
5776static bool
5777replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 5778 gimple_match_op *res_op,
e0ee10ed
RB
5779 gimple_seq *seq, bool inplace)
5780{
355fe088 5781 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
5782 tree *ops = res_op->ops;
5783 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
5784
5785 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
5786 newly created statements. See also maybe_push_res_to_seq.
5787 As an exception allow such uses if there was a use of the
5788 same SSA name on the old stmt. */
5d75ad95
RS
5789 for (unsigned int i = 0; i < num_ops; ++i)
5790 if (TREE_CODE (ops[i]) == SSA_NAME
5791 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5792 && !has_use_on_stmt (ops[i], stmt))
5793 return false;
5794
5795 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5796 for (unsigned int i = 0; i < 2; ++i)
5797 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5798 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5799 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5800 return false;
e0ee10ed 5801
fec40d06
RS
5802 /* Don't insert new statements when INPLACE is true, even if we could
5803 reuse STMT for the final statement. */
5804 if (inplace && !gimple_seq_empty_p (*seq))
5805 return false;
5806
538dd0b7 5807 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 5808 {
5d75ad95
RS
5809 gcc_assert (res_op->code.is_tree_code ());
5810 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
5811 /* GIMPLE_CONDs condition may not throw. */
5812 && (!flag_exceptions
5813 || !cfun->can_throw_non_call_exceptions
5d75ad95 5814 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
5815 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5816 false, NULL_TREE)))
5d75ad95
RS
5817 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5818 else if (res_op->code == SSA_NAME)
538dd0b7 5819 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 5820 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 5821 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
5822 {
5823 if (integer_zerop (ops[0]))
538dd0b7 5824 gimple_cond_make_false (cond_stmt);
e0ee10ed 5825 else
538dd0b7 5826 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
5827 }
5828 else if (!inplace)
5829 {
5d75ad95 5830 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
5831 if (!res)
5832 return false;
538dd0b7 5833 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
5834 build_zero_cst (TREE_TYPE (res)));
5835 }
5836 else
5837 return false;
5838 if (dump_file && (dump_flags & TDF_DETAILS))
5839 {
5840 fprintf (dump_file, "gimple_simplified to ");
5841 if (!gimple_seq_empty_p (*seq))
5842 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5843 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5844 0, TDF_SLIM);
5845 }
5846 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5847 return true;
5848 }
5849 else if (is_gimple_assign (stmt)
5d75ad95 5850 && res_op->code.is_tree_code ())
e0ee10ed
RB
5851 {
5852 if (!inplace
5d75ad95 5853 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 5854 {
5d75ad95
RS
5855 maybe_build_generic_op (res_op);
5856 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5857 res_op->op_or_null (0),
5858 res_op->op_or_null (1),
5859 res_op->op_or_null (2));
e0ee10ed
RB
5860 if (dump_file && (dump_flags & TDF_DETAILS))
5861 {
5862 fprintf (dump_file, "gimple_simplified to ");
5863 if (!gimple_seq_empty_p (*seq))
5864 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5865 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5866 0, TDF_SLIM);
5867 }
5868 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5869 return true;
5870 }
5871 }
5d75ad95
RS
5872 else if (res_op->code.is_fn_code ()
5873 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 5874 {
5d75ad95
RS
5875 gcc_assert (num_ops == gimple_call_num_args (stmt));
5876 for (unsigned int i = 0; i < num_ops; ++i)
5877 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
5878 if (dump_file && (dump_flags & TDF_DETAILS))
5879 {
5880 fprintf (dump_file, "gimple_simplified to ");
5881 if (!gimple_seq_empty_p (*seq))
5882 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5883 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5884 }
5885 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
5886 return true;
5887 }
e0ee10ed
RB
5888 else if (!inplace)
5889 {
5890 if (gimple_has_lhs (stmt))
5891 {
5892 tree lhs = gimple_get_lhs (stmt);
5d75ad95 5893 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 5894 return false;
e0ee10ed
RB
5895 if (dump_file && (dump_flags & TDF_DETAILS))
5896 {
5897 fprintf (dump_file, "gimple_simplified to ");
5898 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5899 }
5900 gsi_replace_with_seq_vops (gsi, *seq);
5901 return true;
5902 }
5903 else
5904 gcc_unreachable ();
5905 }
5906
5907 return false;
5908}
5909
040292e7
RB
5910/* Canonicalize MEM_REFs invariant address operand after propagation. */
5911
5912static bool
fabe0ede 5913maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
040292e7
RB
5914{
5915 bool res = false;
fe8c8f1e 5916 tree *orig_t = t;
040292e7
RB
5917
5918 if (TREE_CODE (*t) == ADDR_EXPR)
5919 t = &TREE_OPERAND (*t, 0);
5920
f17a223d
RB
5921 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5922 generic vector extension. The actual vector referenced is
5923 view-converted to an array type for this purpose. If the index
5924 is constant the canonical representation in the middle-end is a
5925 BIT_FIELD_REF so re-write the former to the latter here. */
5926 if (TREE_CODE (*t) == ARRAY_REF
5927 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5928 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5929 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5930 {
5931 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5932 if (VECTOR_TYPE_P (vtype))
5933 {
5934 tree low = array_ref_low_bound (*t);
5935 if (TREE_CODE (low) == INTEGER_CST)
5936 {
5937 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5938 {
5939 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5940 wi::to_widest (low));
5941 idx = wi::mul (idx, wi::to_widest
5942 (TYPE_SIZE (TREE_TYPE (*t))));
5943 widest_int ext
5944 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5945 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5946 {
5947 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5948 TREE_TYPE (*t),
5949 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5950 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 5951 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
5952 res = true;
5953 }
5954 }
5955 }
5956 }
5957 }
5958
040292e7
RB
5959 while (handled_component_p (*t))
5960 t = &TREE_OPERAND (*t, 0);
5961
5962 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5963 of invariant addresses into a SSA name MEM_REF address. */
5964 if (TREE_CODE (*t) == MEM_REF
5965 || TREE_CODE (*t) == TARGET_MEM_REF)
5966 {
5967 tree addr = TREE_OPERAND (*t, 0);
5968 if (TREE_CODE (addr) == ADDR_EXPR
5969 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5970 || handled_component_p (TREE_OPERAND (addr, 0))))
5971 {
5972 tree base;
a90c8804 5973 poly_int64 coffset;
040292e7
RB
5974 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5975 &coffset);
5976 if (!base)
fabe0ede
JJ
5977 {
5978 if (is_debug)
5979 return false;
5980 gcc_unreachable ();
5981 }
040292e7
RB
5982
5983 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5984 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5985 TREE_OPERAND (*t, 1),
5986 size_int (coffset));
5987 res = true;
5988 }
5989 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5990 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5991 }
5992
5993 /* Canonicalize back MEM_REFs to plain reference trees if the object
5994 accessed is a decl that has the same access semantics as the MEM_REF. */
5995 if (TREE_CODE (*t) == MEM_REF
5996 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
5997 && integer_zerop (TREE_OPERAND (*t, 1))
5998 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
5999 {
6000 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6001 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6002 if (/* Same volatile qualification. */
6003 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6004 /* Same TBAA behavior with -fstrict-aliasing. */
6005 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6006 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6007 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6008 /* Same alignment. */
6009 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6010 /* We have to look out here to not drop a required conversion
6011 from the rhs to the lhs if *t appears on the lhs or vice-versa
6012 if it appears on the rhs. Thus require strict type
6013 compatibility. */
6014 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6015 {
6016 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6017 res = true;
6018 }
6019 }
6020
fe8c8f1e
RB
6021 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6022 && TREE_CODE (*t) == MEM_REF
6023 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6024 {
6025 tree base;
6026 poly_int64 coffset;
6027 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6028 &coffset);
6029 if (base)
6030 {
6031 gcc_assert (TREE_CODE (base) == MEM_REF);
6032 poly_int64 moffset;
6033 if (mem_ref_offset (base).to_shwi (&moffset))
6034 {
6035 coffset += moffset;
6036 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6037 {
6038 coffset += moffset;
6039 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6040 return true;
6041 }
6042 }
6043 }
6044 }
6045
040292e7
RB
6046 /* Canonicalize TARGET_MEM_REF in particular with respect to
6047 the indexes becoming constant. */
6048 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6049 {
6050 tree tem = maybe_fold_tmr (*t);
6051 if (tem)
6052 {
6053 *t = tem;
c7789683
RS
6054 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6055 recompute_tree_invariant_for_addr_expr (*orig_t);
040292e7
RB
6056 res = true;
6057 }
6058 }
6059
6060 return res;
6061}
6062
cbdd87d4
RG
6063/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6064 distinguishes both cases. */
6065
6066static bool
e0ee10ed 6067fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
6068{
6069 bool changed = false;
355fe088 6070 gimple *stmt = gsi_stmt (*gsi);
e9e2bad7 6071 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
cbdd87d4 6072 unsigned i;
a8b85ce9 6073 fold_defer_overflow_warnings ();
cbdd87d4 6074
040292e7
RB
6075 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6076 after propagation.
6077 ??? This shouldn't be done in generic folding but in the
6078 propagation helpers which also know whether an address was
89a79e96
RB
6079 propagated.
6080 Also canonicalize operand order. */
040292e7
RB
6081 switch (gimple_code (stmt))
6082 {
6083 case GIMPLE_ASSIGN:
6084 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6085 {
6086 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6087 if ((REFERENCE_CLASS_P (*rhs)
6088 || TREE_CODE (*rhs) == ADDR_EXPR)
6089 && maybe_canonicalize_mem_ref_addr (rhs))
6090 changed = true;
6091 tree *lhs = gimple_assign_lhs_ptr (stmt);
6092 if (REFERENCE_CLASS_P (*lhs)
6093 && maybe_canonicalize_mem_ref_addr (lhs))
6094 changed = true;
6095 }
89a79e96
RB
6096 else
6097 {
6098 /* Canonicalize operand order. */
6099 enum tree_code code = gimple_assign_rhs_code (stmt);
6100 if (TREE_CODE_CLASS (code) == tcc_comparison
6101 || commutative_tree_code (code)
6102 || commutative_ternary_tree_code (code))
6103 {
6104 tree rhs1 = gimple_assign_rhs1 (stmt);
6105 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 6106 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
6107 {
6108 gimple_assign_set_rhs1 (stmt, rhs2);
6109 gimple_assign_set_rhs2 (stmt, rhs1);
6110 if (TREE_CODE_CLASS (code) == tcc_comparison)
6111 gimple_assign_set_rhs_code (stmt,
6112 swap_tree_comparison (code));
6113 changed = true;
6114 }
6115 }
6116 }
040292e7
RB
6117 break;
6118 case GIMPLE_CALL:
6119 {
6120 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6121 {
6122 tree *arg = gimple_call_arg_ptr (stmt, i);
6123 if (REFERENCE_CLASS_P (*arg)
6124 && maybe_canonicalize_mem_ref_addr (arg))
6125 changed = true;
6126 }
6127 tree *lhs = gimple_call_lhs_ptr (stmt);
6128 if (*lhs
6129 && REFERENCE_CLASS_P (*lhs)
6130 && maybe_canonicalize_mem_ref_addr (lhs))
6131 changed = true;
6132 break;
6133 }
6134 case GIMPLE_ASM:
6135 {
538dd0b7
DM
6136 gasm *asm_stmt = as_a <gasm *> (stmt);
6137 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 6138 {
538dd0b7 6139 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
6140 tree op = TREE_VALUE (link);
6141 if (REFERENCE_CLASS_P (op)
6142 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6143 changed = true;
6144 }
538dd0b7 6145 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 6146 {
538dd0b7 6147 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
6148 tree op = TREE_VALUE (link);
6149 if ((REFERENCE_CLASS_P (op)
6150 || TREE_CODE (op) == ADDR_EXPR)
6151 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6152 changed = true;
6153 }
6154 }
6155 break;
6156 case GIMPLE_DEBUG:
6157 if (gimple_debug_bind_p (stmt))
6158 {
6159 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6160 if (*val
6161 && (REFERENCE_CLASS_P (*val)
6162 || TREE_CODE (*val) == ADDR_EXPR)
fabe0ede 6163 && maybe_canonicalize_mem_ref_addr (val, true))
040292e7
RB
6164 changed = true;
6165 }
6166 break;
89a79e96
RB
6167 case GIMPLE_COND:
6168 {
6169 /* Canonicalize operand order. */
6170 tree lhs = gimple_cond_lhs (stmt);
6171 tree rhs = gimple_cond_rhs (stmt);
14e72812 6172 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
6173 {
6174 gcond *gc = as_a <gcond *> (stmt);
6175 gimple_cond_set_lhs (gc, rhs);
6176 gimple_cond_set_rhs (gc, lhs);
6177 gimple_cond_set_code (gc,
6178 swap_tree_comparison (gimple_cond_code (gc)));
6179 changed = true;
6180 }
6181 }
040292e7
RB
6182 default:;
6183 }
6184
e0ee10ed
RB
6185 /* Dispatch to pattern-based folding. */
6186 if (!inplace
6187 || is_gimple_assign (stmt)
6188 || gimple_code (stmt) == GIMPLE_COND)
6189 {
6190 gimple_seq seq = NULL;
5d75ad95
RS
6191 gimple_match_op res_op;
6192 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 6193 valueize, valueize))
e0ee10ed 6194 {
5d75ad95 6195 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
6196 changed = true;
6197 else
6198 gimple_seq_discard (seq);
6199 }
6200 }
6201
6202 stmt = gsi_stmt (*gsi);
6203
cbdd87d4
RG
6204 /* Fold the main computation performed by the statement. */
6205 switch (gimple_code (stmt))
6206 {
6207 case GIMPLE_ASSIGN:
6208 {
819ec64c
RB
6209 /* Try to canonicalize for boolean-typed X the comparisons
6210 X == 0, X == 1, X != 0, and X != 1. */
6211 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6212 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 6213 {
819ec64c
RB
6214 tree lhs = gimple_assign_lhs (stmt);
6215 tree op1 = gimple_assign_rhs1 (stmt);
6216 tree op2 = gimple_assign_rhs2 (stmt);
6217 tree type = TREE_TYPE (op1);
6218
6219 /* Check whether the comparison operands are of the same boolean
6220 type as the result type is.
6221 Check that second operand is an integer-constant with value
6222 one or zero. */
6223 if (TREE_CODE (op2) == INTEGER_CST
6224 && (integer_zerop (op2) || integer_onep (op2))
6225 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6226 {
6227 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6228 bool is_logical_not = false;
6229
6230 /* X == 0 and X != 1 is a logical-not.of X
6231 X == 1 and X != 0 is X */
6232 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6233 || (cmp_code == NE_EXPR && integer_onep (op2)))
6234 is_logical_not = true;
6235
6236 if (is_logical_not == false)
6237 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6238 /* Only for one-bit precision typed X the transformation
6239 !X -> ~X is valied. */
6240 else if (TYPE_PRECISION (type) == 1)
6241 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6242 /* Otherwise we use !X -> X ^ 1. */
6243 else
6244 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6245 build_int_cst (type, 1));
6246 changed = true;
6247 break;
6248 }
5fbcc0ed 6249 }
819ec64c
RB
6250
6251 unsigned old_num_ops = gimple_num_ops (stmt);
6252 tree lhs = gimple_assign_lhs (stmt);
6253 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
6254 if (new_rhs
6255 && !useless_type_conversion_p (TREE_TYPE (lhs),
6256 TREE_TYPE (new_rhs)))
6257 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6258 if (new_rhs
6259 && (!inplace
6260 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6261 {
6262 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6263 changed = true;
6264 }
6265 break;
6266 }
6267
cbdd87d4 6268 case GIMPLE_CALL:
ceeffab0 6269 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
6270 break;
6271
bd422c4a
RG
6272 case GIMPLE_DEBUG:
6273 if (gimple_debug_bind_p (stmt))
6274 {
6275 tree val = gimple_debug_bind_get_value (stmt);
6276 if (val
6277 && REFERENCE_CLASS_P (val))
6278 {
0bf8cd9d 6279 tree tem = maybe_fold_reference (val);
bd422c4a
RG
6280 if (tem)
6281 {
6282 gimple_debug_bind_set_value (stmt, tem);
6283 changed = true;
6284 }
6285 }
3e888a5e
RG
6286 else if (val
6287 && TREE_CODE (val) == ADDR_EXPR)
6288 {
6289 tree ref = TREE_OPERAND (val, 0);
0bf8cd9d 6290 tree tem = maybe_fold_reference (ref);
3e888a5e
RG
6291 if (tem)
6292 {
6293 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6294 gimple_debug_bind_set_value (stmt, tem);
6295 changed = true;
6296 }
6297 }
bd422c4a
RG
6298 }
6299 break;
6300
cfe3d653
PK
6301 case GIMPLE_RETURN:
6302 {
6303 greturn *ret_stmt = as_a<greturn *> (stmt);
6304 tree ret = gimple_return_retval(ret_stmt);
6305
6306 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6307 {
6308 tree val = valueize (ret);
1af928db
RB
6309 if (val && val != ret
6310 && may_propagate_copy (ret, val))
cfe3d653
PK
6311 {
6312 gimple_return_set_retval (ret_stmt, val);
6313 changed = true;
6314 }
6315 }
6316 }
6317 break;
6318
cbdd87d4
RG
6319 default:;
6320 }
6321
6322 stmt = gsi_stmt (*gsi);
6323
a8b85ce9 6324 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
6325 return changed;
6326}
6327
e0ee10ed
RB
6328/* Valueziation callback that ends up not following SSA edges. */
6329
6330tree
6331no_follow_ssa_edges (tree)
6332{
6333 return NULL_TREE;
6334}
6335
45cc9f96
RB
6336/* Valueization callback that ends up following single-use SSA edges only. */
6337
6338tree
6339follow_single_use_edges (tree val)
6340{
6341 if (TREE_CODE (val) == SSA_NAME
6342 && !has_single_use (val))
6343 return NULL_TREE;
6344 return val;
6345}
6346
c566cc9f
RS
6347/* Valueization callback that follows all SSA edges. */
6348
6349tree
6350follow_all_ssa_edges (tree val)
6351{
6352 return val;
6353}
6354
cbdd87d4
RG
6355/* Fold the statement pointed to by GSI. In some cases, this function may
6356 replace the whole statement with a new one. Returns true iff folding
6357 makes any changes.
6358 The statement pointed to by GSI should be in valid gimple form but may
6359 be in unfolded state as resulting from for example constant propagation
6360 which can produce *&x = 0. */
6361
6362bool
6363fold_stmt (gimple_stmt_iterator *gsi)
6364{
e0ee10ed
RB
6365 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6366}
6367
6368bool
6369fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6370{
6371 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
6372}
6373
59401b92 6374/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
6375 *&x created by constant propagation are handled. The statement cannot
6376 be replaced with a new one. Return true if the statement was
6377 changed, false otherwise.
59401b92 6378 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
6379 be in unfolded state as resulting from for example constant propagation
6380 which can produce *&x = 0. */
6381
6382bool
59401b92 6383fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 6384{
355fe088 6385 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 6386 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 6387 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
6388 return changed;
6389}
6390
e89065a1
SL
6391/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6392 if EXPR is null or we don't know how.
6393 If non-null, the result always has boolean type. */
6394
6395static tree
6396canonicalize_bool (tree expr, bool invert)
6397{
6398 if (!expr)
6399 return NULL_TREE;
6400 else if (invert)
6401 {
6402 if (integer_nonzerop (expr))
6403 return boolean_false_node;
6404 else if (integer_zerop (expr))
6405 return boolean_true_node;
6406 else if (TREE_CODE (expr) == SSA_NAME)
6407 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6408 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6409 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6410 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6411 boolean_type_node,
6412 TREE_OPERAND (expr, 0),
6413 TREE_OPERAND (expr, 1));
6414 else
6415 return NULL_TREE;
6416 }
6417 else
6418 {
6419 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6420 return expr;
6421 if (integer_nonzerop (expr))
6422 return boolean_true_node;
6423 else if (integer_zerop (expr))
6424 return boolean_false_node;
6425 else if (TREE_CODE (expr) == SSA_NAME)
6426 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6427 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6428 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6429 return fold_build2 (TREE_CODE (expr),
6430 boolean_type_node,
6431 TREE_OPERAND (expr, 0),
6432 TREE_OPERAND (expr, 1));
6433 else
6434 return NULL_TREE;
6435 }
6436}
6437
6438/* Check to see if a boolean expression EXPR is logically equivalent to the
6439 comparison (OP1 CODE OP2). Check for various identities involving
6440 SSA_NAMEs. */
6441
6442static bool
6443same_bool_comparison_p (const_tree expr, enum tree_code code,
6444 const_tree op1, const_tree op2)
6445{
355fe088 6446 gimple *s;
e89065a1
SL
6447
6448 /* The obvious case. */
6449 if (TREE_CODE (expr) == code
6450 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6451 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6452 return true;
6453
6454 /* Check for comparing (name, name != 0) and the case where expr
6455 is an SSA_NAME with a definition matching the comparison. */
6456 if (TREE_CODE (expr) == SSA_NAME
6457 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6458 {
6459 if (operand_equal_p (expr, op1, 0))
6460 return ((code == NE_EXPR && integer_zerop (op2))
6461 || (code == EQ_EXPR && integer_nonzerop (op2)));
6462 s = SSA_NAME_DEF_STMT (expr);
6463 if (is_gimple_assign (s)
6464 && gimple_assign_rhs_code (s) == code
6465 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6466 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6467 return true;
6468 }
6469
6470 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6471 of name is a comparison, recurse. */
6472 if (TREE_CODE (op1) == SSA_NAME
6473 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6474 {
6475 s = SSA_NAME_DEF_STMT (op1);
6476 if (is_gimple_assign (s)
6477 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6478 {
6479 enum tree_code c = gimple_assign_rhs_code (s);
6480 if ((c == NE_EXPR && integer_zerop (op2))
6481 || (c == EQ_EXPR && integer_nonzerop (op2)))
6482 return same_bool_comparison_p (expr, c,
6483 gimple_assign_rhs1 (s),
6484 gimple_assign_rhs2 (s));
6485 if ((c == EQ_EXPR && integer_zerop (op2))
6486 || (c == NE_EXPR && integer_nonzerop (op2)))
6487 return same_bool_comparison_p (expr,
6488 invert_tree_comparison (c, false),
6489 gimple_assign_rhs1 (s),
6490 gimple_assign_rhs2 (s));
6491 }
6492 }
6493 return false;
6494}
6495
6496/* Check to see if two boolean expressions OP1 and OP2 are logically
6497 equivalent. */
6498
6499static bool
6500same_bool_result_p (const_tree op1, const_tree op2)
6501{
6502 /* Simple cases first. */
6503 if (operand_equal_p (op1, op2, 0))
6504 return true;
6505
6506 /* Check the cases where at least one of the operands is a comparison.
6507 These are a bit smarter than operand_equal_p in that they apply some
6508 identifies on SSA_NAMEs. */
98209db3 6509 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
6510 && same_bool_comparison_p (op1, TREE_CODE (op2),
6511 TREE_OPERAND (op2, 0),
6512 TREE_OPERAND (op2, 1)))
6513 return true;
98209db3 6514 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
6515 && same_bool_comparison_p (op2, TREE_CODE (op1),
6516 TREE_OPERAND (op1, 0),
6517 TREE_OPERAND (op1, 1)))
6518 return true;
6519
6520 /* Default case. */
6521 return false;
6522}
6523
6524/* Forward declarations for some mutually recursive functions. */
6525
6526static tree
5f487a34 6527and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6528 enum tree_code code2, tree op2a, tree op2b);
6529static tree
5f487a34 6530and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6531 enum tree_code code2, tree op2a, tree op2b);
6532static tree
5f487a34 6533and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6534 enum tree_code code2, tree op2a, tree op2b);
6535static tree
5f487a34 6536or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6537 enum tree_code code2, tree op2a, tree op2b);
6538static tree
5f487a34 6539or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
6540 enum tree_code code2, tree op2a, tree op2b);
6541static tree
5f487a34 6542or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
6543 enum tree_code code2, tree op2a, tree op2b);
6544
6545/* Helper function for and_comparisons_1: try to simplify the AND of the
6546 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6547 If INVERT is true, invert the value of the VAR before doing the AND.
6548 Return NULL_EXPR if we can't simplify this to a single expression. */
6549
6550static tree
5f487a34 6551and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6552 enum tree_code code2, tree op2a, tree op2b)
6553{
6554 tree t;
355fe088 6555 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6556
6557 /* We can only deal with variables whose definitions are assignments. */
6558 if (!is_gimple_assign (stmt))
6559 return NULL_TREE;
6560
6561 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6562 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6563 Then we only have to consider the simpler non-inverted cases. */
6564 if (invert)
5f487a34 6565 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
6566 invert_tree_comparison (code2, false),
6567 op2a, op2b);
6568 else
5f487a34 6569 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6570 return canonicalize_bool (t, invert);
6571}
6572
6573/* Try to simplify the AND of the ssa variable defined by the assignment
6574 STMT with the comparison specified by (OP2A CODE2 OP2B).
6575 Return NULL_EXPR if we can't simplify this to a single expression. */
6576
6577static tree
5f487a34 6578and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6579 enum tree_code code2, tree op2a, tree op2b)
6580{
6581 tree var = gimple_assign_lhs (stmt);
6582 tree true_test_var = NULL_TREE;
6583 tree false_test_var = NULL_TREE;
6584 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6585
6586 /* Check for identities like (var AND (var == 0)) => false. */
6587 if (TREE_CODE (op2a) == SSA_NAME
6588 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6589 {
6590 if ((code2 == NE_EXPR && integer_zerop (op2b))
6591 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6592 {
6593 true_test_var = op2a;
6594 if (var == true_test_var)
6595 return var;
6596 }
6597 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6598 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6599 {
6600 false_test_var = op2a;
6601 if (var == false_test_var)
6602 return boolean_false_node;
6603 }
6604 }
6605
6606 /* If the definition is a comparison, recurse on it. */
6607 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6608 {
5f487a34 6609 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
6610 gimple_assign_rhs1 (stmt),
6611 gimple_assign_rhs2 (stmt),
6612 code2,
6613 op2a,
6614 op2b);
6615 if (t)
6616 return t;
6617 }
6618
6619 /* If the definition is an AND or OR expression, we may be able to
6620 simplify by reassociating. */
eb9820c0
KT
6621 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6622 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6623 {
6624 tree inner1 = gimple_assign_rhs1 (stmt);
6625 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6626 gimple *s;
e89065a1
SL
6627 tree t;
6628 tree partial = NULL_TREE;
eb9820c0 6629 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
6630
6631 /* Check for boolean identities that don't require recursive examination
6632 of inner1/inner2:
6633 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6634 inner1 AND (inner1 OR inner2) => inner1
6635 !inner1 AND (inner1 AND inner2) => false
6636 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6637 Likewise for similar cases involving inner2. */
6638 if (inner1 == true_test_var)
6639 return (is_and ? var : inner1);
6640 else if (inner2 == true_test_var)
6641 return (is_and ? var : inner2);
6642 else if (inner1 == false_test_var)
6643 return (is_and
6644 ? boolean_false_node
5f487a34
LJH
6645 : and_var_with_comparison (type, inner2, false, code2, op2a,
6646 op2b));
e89065a1
SL
6647 else if (inner2 == false_test_var)
6648 return (is_and
6649 ? boolean_false_node
5f487a34
LJH
6650 : and_var_with_comparison (type, inner1, false, code2, op2a,
6651 op2b));
e89065a1
SL
6652
6653 /* Next, redistribute/reassociate the AND across the inner tests.
6654 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6655 if (TREE_CODE (inner1) == SSA_NAME
6656 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6657 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6658 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6659 gimple_assign_rhs1 (s),
6660 gimple_assign_rhs2 (s),
6661 code2, op2a, op2b)))
6662 {
6663 /* Handle the AND case, where we are reassociating:
6664 (inner1 AND inner2) AND (op2a code2 op2b)
6665 => (t AND inner2)
6666 If the partial result t is a constant, we win. Otherwise
6667 continue on to try reassociating with the other inner test. */
6668 if (is_and)
6669 {
6670 if (integer_onep (t))
6671 return inner2;
6672 else if (integer_zerop (t))
6673 return boolean_false_node;
6674 }
6675
6676 /* Handle the OR case, where we are redistributing:
6677 (inner1 OR inner2) AND (op2a code2 op2b)
6678 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
6679 else if (integer_onep (t))
6680 return boolean_true_node;
6681
6682 /* Save partial result for later. */
6683 partial = t;
e89065a1
SL
6684 }
6685
6686 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6687 if (TREE_CODE (inner2) == SSA_NAME
6688 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6689 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6690 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6691 gimple_assign_rhs1 (s),
6692 gimple_assign_rhs2 (s),
6693 code2, op2a, op2b)))
6694 {
6695 /* Handle the AND case, where we are reassociating:
6696 (inner1 AND inner2) AND (op2a code2 op2b)
6697 => (inner1 AND t) */
6698 if (is_and)
6699 {
6700 if (integer_onep (t))
6701 return inner1;
6702 else if (integer_zerop (t))
6703 return boolean_false_node;
8236c8eb
JJ
6704 /* If both are the same, we can apply the identity
6705 (x AND x) == x. */
6706 else if (partial && same_bool_result_p (t, partial))
6707 return t;
e89065a1
SL
6708 }
6709
6710 /* Handle the OR case. where we are redistributing:
6711 (inner1 OR inner2) AND (op2a code2 op2b)
6712 => (t OR (inner1 AND (op2a code2 op2b)))
6713 => (t OR partial) */
6714 else
6715 {
6716 if (integer_onep (t))
6717 return boolean_true_node;
6718 else if (partial)
6719 {
6720 /* We already got a simplification for the other
6721 operand to the redistributed OR expression. The
6722 interesting case is when at least one is false.
6723 Or, if both are the same, we can apply the identity
6724 (x OR x) == x. */
6725 if (integer_zerop (partial))
6726 return t;
6727 else if (integer_zerop (t))
6728 return partial;
6729 else if (same_bool_result_p (t, partial))
6730 return t;
6731 }
6732 }
6733 }
6734 }
6735 return NULL_TREE;
6736}
6737
6738/* Try to simplify the AND of two comparisons defined by
6739 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6740 If this can be done without constructing an intermediate value,
6741 return the resulting tree; otherwise NULL_TREE is returned.
6742 This function is deliberately asymmetric as it recurses on SSA_DEFs
6743 in the first comparison but not the second. */
6744
6745static tree
5f487a34 6746and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6747 enum tree_code code2, tree op2a, tree op2b)
6748{
ae22ac3c 6749 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6750
e89065a1
SL
6751 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6752 if (operand_equal_p (op1a, op2a, 0)
6753 && operand_equal_p (op1b, op2b, 0))
6754 {
eb9820c0 6755 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6756 tree t = combine_comparisons (UNKNOWN_LOCATION,
6757 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 6758 truth_type, op1a, op1b);
e89065a1
SL
6759 if (t)
6760 return t;
6761 }
6762
6763 /* Likewise the swapped case of the above. */
6764 if (operand_equal_p (op1a, op2b, 0)
6765 && operand_equal_p (op1b, op2a, 0))
6766 {
eb9820c0 6767 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6768 tree t = combine_comparisons (UNKNOWN_LOCATION,
6769 TRUTH_ANDIF_EXPR, code1,
6770 swap_tree_comparison (code2),
31ed6226 6771 truth_type, op1a, op1b);
e89065a1
SL
6772 if (t)
6773 return t;
6774 }
6775
e89065a1
SL
6776 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6777 NAME's definition is a truth value. See if there are any simplifications
6778 that can be done against the NAME's definition. */
6779 if (TREE_CODE (op1a) == SSA_NAME
6780 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6781 && (integer_zerop (op1b) || integer_onep (op1b)))
6782 {
6783 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6784 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6785 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6786 switch (gimple_code (stmt))
6787 {
6788 case GIMPLE_ASSIGN:
6789 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6790 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6791 op2b);
e89065a1
SL
6792
6793 case GIMPLE_PHI:
6794 /* If every argument to the PHI produces the same result when
6795 ANDed with the second comparison, we win.
6796 Do not do this unless the type is bool since we need a bool
6797 result here anyway. */
6798 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6799 {
6800 tree result = NULL_TREE;
6801 unsigned i;
6802 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6803 {
6804 tree arg = gimple_phi_arg_def (stmt, i);
6805
6806 /* If this PHI has itself as an argument, ignore it.
6807 If all the other args produce the same result,
6808 we're still OK. */
6809 if (arg == gimple_phi_result (stmt))
6810 continue;
6811 else if (TREE_CODE (arg) == INTEGER_CST)
6812 {
6813 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6814 {
6815 if (!result)
6816 result = boolean_false_node;
6817 else if (!integer_zerop (result))
6818 return NULL_TREE;
6819 }
6820 else if (!result)
6821 result = fold_build2 (code2, boolean_type_node,
6822 op2a, op2b);
6823 else if (!same_bool_comparison_p (result,
6824 code2, op2a, op2b))
6825 return NULL_TREE;
6826 }
0e8b84ec
JJ
6827 else if (TREE_CODE (arg) == SSA_NAME
6828 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6829 {
6c66f733 6830 tree temp;
355fe088 6831 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6832 /* In simple cases we can look through PHI nodes,
6833 but we have to be careful with loops.
6834 See PR49073. */
6835 if (! dom_info_available_p (CDI_DOMINATORS)
6836 || gimple_bb (def_stmt) == gimple_bb (stmt)
6837 || dominated_by_p (CDI_DOMINATORS,
6838 gimple_bb (def_stmt),
6839 gimple_bb (stmt)))
6840 return NULL_TREE;
5f487a34 6841 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 6842 op2a, op2b);
e89065a1
SL
6843 if (!temp)
6844 return NULL_TREE;
6845 else if (!result)
6846 result = temp;
6847 else if (!same_bool_result_p (result, temp))
6848 return NULL_TREE;
6849 }
6850 else
6851 return NULL_TREE;
6852 }
6853 return result;
6854 }
6855
6856 default:
6857 break;
6858 }
6859 }
6860 return NULL_TREE;
6861}
6862
5f487a34
LJH
6863/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6864 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6865 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6866 simplify this to a single expression. As we are going to lower the cost
6867 of building SSA names / gimple stmts significantly, we need to allocate
6868 them ont the stack. This will cause the code to be a bit ugly. */
6869
6870static tree
6871maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6872 enum tree_code code1,
6873 tree op1a, tree op1b,
6874 enum tree_code code2, tree op2a,
6875 tree op2b)
6876{
6877 /* Allocate gimple stmt1 on the stack. */
6878 gassign *stmt1
6879 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6880 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6881 gimple_assign_set_rhs_code (stmt1, code1);
6882 gimple_assign_set_rhs1 (stmt1, op1a);
6883 gimple_assign_set_rhs2 (stmt1, op1b);
6884
6885 /* Allocate gimple stmt2 on the stack. */
6886 gassign *stmt2
6887 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6888 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6889 gimple_assign_set_rhs_code (stmt2, code2);
6890 gimple_assign_set_rhs1 (stmt2, op2a);
6891 gimple_assign_set_rhs2 (stmt2, op2b);
6892
6893 /* Allocate SSA names(lhs1) on the stack. */
6894 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6895 memset (lhs1, 0, sizeof (tree_ssa_name));
6896 TREE_SET_CODE (lhs1, SSA_NAME);
6897 TREE_TYPE (lhs1) = type;
6898 init_ssa_name_imm_use (lhs1);
6899
6900 /* Allocate SSA names(lhs2) on the stack. */
6901 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6902 memset (lhs2, 0, sizeof (tree_ssa_name));
6903 TREE_SET_CODE (lhs2, SSA_NAME);
6904 TREE_TYPE (lhs2) = type;
6905 init_ssa_name_imm_use (lhs2);
6906
6907 gimple_assign_set_lhs (stmt1, lhs1);
6908 gimple_assign_set_lhs (stmt2, lhs2);
6909
6910 gimple_match_op op (gimple_match_cond::UNCOND, code,
6911 type, gimple_assign_lhs (stmt1),
6912 gimple_assign_lhs (stmt2));
6913 if (op.resimplify (NULL, follow_all_ssa_edges))
6914 {
6915 if (gimple_simplified_result_is_gimple_val (&op))
6916 {
6917 tree res = op.ops[0];
6918 if (res == lhs1)
6919 return build2 (code1, type, op1a, op1b);
6920 else if (res == lhs2)
6921 return build2 (code2, type, op2a, op2b);
6922 else
6923 return res;
6924 }
ae9c3507
ML
6925 else if (op.code.is_tree_code ()
6926 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6927 {
6928 tree op0 = op.ops[0];
6929 tree op1 = op.ops[1];
6930 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6931 return NULL_TREE; /* not simple */
6932
6933 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6934 }
5f487a34
LJH
6935 }
6936
6937 return NULL_TREE;
6938}
6939
e89065a1
SL
6940/* Try to simplify the AND of two comparisons, specified by
6941 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6942 If this can be simplified to a single expression (without requiring
6943 introducing more SSA variables to hold intermediate values),
6944 return the resulting tree. Otherwise return NULL_TREE.
6945 If the result expression is non-null, it has boolean type. */
6946
6947tree
5f487a34
LJH
6948maybe_fold_and_comparisons (tree type,
6949 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6950 enum tree_code code2, tree op2a, tree op2b)
6951{
5f487a34 6952 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6953 return t;
5f487a34
LJH
6954
6955 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6956 return t;
6957
6958 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6959 op1a, op1b, code2, op2a,
6960 op2b))
6961 return t;
6962
6963 return NULL_TREE;
e89065a1
SL
6964}
6965
6966/* Helper function for or_comparisons_1: try to simplify the OR of the
6967 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6968 If INVERT is true, invert the value of VAR before doing the OR.
6969 Return NULL_EXPR if we can't simplify this to a single expression. */
6970
6971static tree
5f487a34 6972or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6973 enum tree_code code2, tree op2a, tree op2b)
6974{
6975 tree t;
355fe088 6976 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6977
6978 /* We can only deal with variables whose definitions are assignments. */
6979 if (!is_gimple_assign (stmt))
6980 return NULL_TREE;
6981
6982 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6983 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6984 Then we only have to consider the simpler non-inverted cases. */
6985 if (invert)
5f487a34 6986 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
6987 invert_tree_comparison (code2, false),
6988 op2a, op2b);
6989 else
5f487a34 6990 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6991 return canonicalize_bool (t, invert);
6992}
6993
6994/* Try to simplify the OR of the ssa variable defined by the assignment
6995 STMT with the comparison specified by (OP2A CODE2 OP2B).
6996 Return NULL_EXPR if we can't simplify this to a single expression. */
6997
6998static tree
5f487a34 6999or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
7000 enum tree_code code2, tree op2a, tree op2b)
7001{
7002 tree var = gimple_assign_lhs (stmt);
7003 tree true_test_var = NULL_TREE;
7004 tree false_test_var = NULL_TREE;
7005 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7006
7007 /* Check for identities like (var OR (var != 0)) => true . */
7008 if (TREE_CODE (op2a) == SSA_NAME
7009 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7010 {
7011 if ((code2 == NE_EXPR && integer_zerop (op2b))
7012 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7013 {
7014 true_test_var = op2a;
7015 if (var == true_test_var)
7016 return var;
7017 }
7018 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7019 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7020 {
7021 false_test_var = op2a;
7022 if (var == false_test_var)
7023 return boolean_true_node;
7024 }
7025 }
7026
7027 /* If the definition is a comparison, recurse on it. */
7028 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7029 {
5f487a34 7030 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
7031 gimple_assign_rhs1 (stmt),
7032 gimple_assign_rhs2 (stmt),
7033 code2,
7034 op2a,
7035 op2b);
7036 if (t)
7037 return t;
7038 }
7039
7040 /* If the definition is an AND or OR expression, we may be able to
7041 simplify by reassociating. */
eb9820c0
KT
7042 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7043 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
7044 {
7045 tree inner1 = gimple_assign_rhs1 (stmt);
7046 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 7047 gimple *s;
e89065a1
SL
7048 tree t;
7049 tree partial = NULL_TREE;
eb9820c0 7050 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
7051
7052 /* Check for boolean identities that don't require recursive examination
7053 of inner1/inner2:
7054 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7055 inner1 OR (inner1 AND inner2) => inner1
7056 !inner1 OR (inner1 OR inner2) => true
7057 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7058 */
7059 if (inner1 == true_test_var)
7060 return (is_or ? var : inner1);
7061 else if (inner2 == true_test_var)
7062 return (is_or ? var : inner2);
7063 else if (inner1 == false_test_var)
7064 return (is_or
7065 ? boolean_true_node
5f487a34
LJH
7066 : or_var_with_comparison (type, inner2, false, code2, op2a,
7067 op2b));
e89065a1
SL
7068 else if (inner2 == false_test_var)
7069 return (is_or
7070 ? boolean_true_node
5f487a34
LJH
7071 : or_var_with_comparison (type, inner1, false, code2, op2a,
7072 op2b));
e89065a1
SL
7073
7074 /* Next, redistribute/reassociate the OR across the inner tests.
7075 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7076 if (TREE_CODE (inner1) == SSA_NAME
7077 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7078 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7079 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7080 gimple_assign_rhs1 (s),
7081 gimple_assign_rhs2 (s),
7082 code2, op2a, op2b)))
7083 {
7084 /* Handle the OR case, where we are reassociating:
7085 (inner1 OR inner2) OR (op2a code2 op2b)
7086 => (t OR inner2)
7087 If the partial result t is a constant, we win. Otherwise
7088 continue on to try reassociating with the other inner test. */
8236c8eb 7089 if (is_or)
e89065a1
SL
7090 {
7091 if (integer_onep (t))
7092 return boolean_true_node;
7093 else if (integer_zerop (t))
7094 return inner2;
7095 }
7096
7097 /* Handle the AND case, where we are redistributing:
7098 (inner1 AND inner2) OR (op2a code2 op2b)
7099 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
7100 else if (integer_zerop (t))
7101 return boolean_false_node;
7102
7103 /* Save partial result for later. */
7104 partial = t;
e89065a1
SL
7105 }
7106
7107 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7108 if (TREE_CODE (inner2) == SSA_NAME
7109 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7110 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7111 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7112 gimple_assign_rhs1 (s),
7113 gimple_assign_rhs2 (s),
7114 code2, op2a, op2b)))
7115 {
7116 /* Handle the OR case, where we are reassociating:
7117 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
7118 => (inner1 OR t)
7119 => (t OR partial) */
7120 if (is_or)
e89065a1
SL
7121 {
7122 if (integer_zerop (t))
7123 return inner1;
7124 else if (integer_onep (t))
7125 return boolean_true_node;
8236c8eb
JJ
7126 /* If both are the same, we can apply the identity
7127 (x OR x) == x. */
7128 else if (partial && same_bool_result_p (t, partial))
7129 return t;
e89065a1
SL
7130 }
7131
7132 /* Handle the AND case, where we are redistributing:
7133 (inner1 AND inner2) OR (op2a code2 op2b)
7134 => (t AND (inner1 OR (op2a code2 op2b)))
7135 => (t AND partial) */
7136 else
7137 {
7138 if (integer_zerop (t))
7139 return boolean_false_node;
7140 else if (partial)
7141 {
7142 /* We already got a simplification for the other
7143 operand to the redistributed AND expression. The
7144 interesting case is when at least one is true.
7145 Or, if both are the same, we can apply the identity
8236c8eb 7146 (x AND x) == x. */
e89065a1
SL
7147 if (integer_onep (partial))
7148 return t;
7149 else if (integer_onep (t))
7150 return partial;
7151 else if (same_bool_result_p (t, partial))
8236c8eb 7152 return t;
e89065a1
SL
7153 }
7154 }
7155 }
7156 }
7157 return NULL_TREE;
7158}
7159
7160/* Try to simplify the OR of two comparisons defined by
7161 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7162 If this can be done without constructing an intermediate value,
7163 return the resulting tree; otherwise NULL_TREE is returned.
7164 This function is deliberately asymmetric as it recurses on SSA_DEFs
7165 in the first comparison but not the second. */
7166
7167static tree
5f487a34 7168or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7169 enum tree_code code2, tree op2a, tree op2b)
7170{
ae22ac3c 7171 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 7172
e89065a1
SL
7173 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7174 if (operand_equal_p (op1a, op2a, 0)
7175 && operand_equal_p (op1b, op2b, 0))
7176 {
eb9820c0 7177 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7178 tree t = combine_comparisons (UNKNOWN_LOCATION,
7179 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 7180 truth_type, op1a, op1b);
e89065a1
SL
7181 if (t)
7182 return t;
7183 }
7184
7185 /* Likewise the swapped case of the above. */
7186 if (operand_equal_p (op1a, op2b, 0)
7187 && operand_equal_p (op1b, op2a, 0))
7188 {
eb9820c0 7189 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7190 tree t = combine_comparisons (UNKNOWN_LOCATION,
7191 TRUTH_ORIF_EXPR, code1,
7192 swap_tree_comparison (code2),
31ed6226 7193 truth_type, op1a, op1b);
e89065a1
SL
7194 if (t)
7195 return t;
7196 }
7197
e89065a1
SL
7198 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7199 NAME's definition is a truth value. See if there are any simplifications
7200 that can be done against the NAME's definition. */
7201 if (TREE_CODE (op1a) == SSA_NAME
7202 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7203 && (integer_zerop (op1b) || integer_onep (op1b)))
7204 {
7205 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7206 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 7207 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
7208 switch (gimple_code (stmt))
7209 {
7210 case GIMPLE_ASSIGN:
7211 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
7212 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7213 op2b);
e89065a1
SL
7214
7215 case GIMPLE_PHI:
7216 /* If every argument to the PHI produces the same result when
7217 ORed with the second comparison, we win.
7218 Do not do this unless the type is bool since we need a bool
7219 result here anyway. */
7220 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7221 {
7222 tree result = NULL_TREE;
7223 unsigned i;
7224 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7225 {
7226 tree arg = gimple_phi_arg_def (stmt, i);
7227
7228 /* If this PHI has itself as an argument, ignore it.
7229 If all the other args produce the same result,
7230 we're still OK. */
7231 if (arg == gimple_phi_result (stmt))
7232 continue;
7233 else if (TREE_CODE (arg) == INTEGER_CST)
7234 {
7235 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7236 {
7237 if (!result)
7238 result = boolean_true_node;
7239 else if (!integer_onep (result))
7240 return NULL_TREE;
7241 }
7242 else if (!result)
7243 result = fold_build2 (code2, boolean_type_node,
7244 op2a, op2b);
7245 else if (!same_bool_comparison_p (result,
7246 code2, op2a, op2b))
7247 return NULL_TREE;
7248 }
0e8b84ec
JJ
7249 else if (TREE_CODE (arg) == SSA_NAME
7250 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 7251 {
6c66f733 7252 tree temp;
355fe088 7253 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
7254 /* In simple cases we can look through PHI nodes,
7255 but we have to be careful with loops.
7256 See PR49073. */
7257 if (! dom_info_available_p (CDI_DOMINATORS)
7258 || gimple_bb (def_stmt) == gimple_bb (stmt)
7259 || dominated_by_p (CDI_DOMINATORS,
7260 gimple_bb (def_stmt),
7261 gimple_bb (stmt)))
7262 return NULL_TREE;
5f487a34 7263 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 7264 op2a, op2b);
e89065a1
SL
7265 if (!temp)
7266 return NULL_TREE;
7267 else if (!result)
7268 result = temp;
7269 else if (!same_bool_result_p (result, temp))
7270 return NULL_TREE;
7271 }
7272 else
7273 return NULL_TREE;
7274 }
7275 return result;
7276 }
7277
7278 default:
7279 break;
7280 }
7281 }
7282 return NULL_TREE;
7283}
7284
7285/* Try to simplify the OR of two comparisons, specified by
7286 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7287 If this can be simplified to a single expression (without requiring
7288 introducing more SSA variables to hold intermediate values),
7289 return the resulting tree. Otherwise return NULL_TREE.
7290 If the result expression is non-null, it has boolean type. */
7291
7292tree
5f487a34
LJH
7293maybe_fold_or_comparisons (tree type,
7294 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7295 enum tree_code code2, tree op2a, tree op2b)
7296{
5f487a34 7297 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 7298 return t;
cfef45c8 7299
5f487a34
LJH
7300 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7301 return t;
7302
7303 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7304 op1a, op1b, code2, op2a,
7305 op2b))
7306 return t;
7307
7308 return NULL_TREE;
7309}
cfef45c8
RG
7310
7311/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7312
7313 Either NULL_TREE, a simplified but non-constant or a constant
7314 is returned.
7315
7316 ??? This should go into a gimple-fold-inline.h file to be eventually
7317 privatized with the single valueize function used in the various TUs
7318 to avoid the indirect function call overhead. */
7319
7320tree
355fe088 7321gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 7322 tree (*gvalueize) (tree))
cfef45c8 7323{
5d75ad95 7324 gimple_match_op res_op;
45cc9f96
RB
7325 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7326 edges if there are intermediate VARYING defs. For this reason
7327 do not follow SSA edges here even though SCCVN can technically
7328 just deal fine with that. */
5d75ad95 7329 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 7330 {
34050b6b 7331 tree res = NULL_TREE;
5d75ad95
RS
7332 if (gimple_simplified_result_is_gimple_val (&res_op))
7333 res = res_op.ops[0];
34050b6b 7334 else if (mprts_hook)
5d75ad95 7335 res = mprts_hook (&res_op);
34050b6b 7336 if (res)
45cc9f96 7337 {
34050b6b
RB
7338 if (dump_file && dump_flags & TDF_DETAILS)
7339 {
7340 fprintf (dump_file, "Match-and-simplified ");
7341 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7342 fprintf (dump_file, " to ");
ef6cb4c7 7343 print_generic_expr (dump_file, res);
34050b6b
RB
7344 fprintf (dump_file, "\n");
7345 }
7346 return res;
45cc9f96 7347 }
45cc9f96
RB
7348 }
7349
cfef45c8
RG
7350 location_t loc = gimple_location (stmt);
7351 switch (gimple_code (stmt))
7352 {
7353 case GIMPLE_ASSIGN:
7354 {
7355 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7356
7357 switch (get_gimple_rhs_class (subcode))
7358 {
7359 case GIMPLE_SINGLE_RHS:
7360 {
7361 tree rhs = gimple_assign_rhs1 (stmt);
7362 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7363
7364 if (TREE_CODE (rhs) == SSA_NAME)
7365 {
7366 /* If the RHS is an SSA_NAME, return its known constant value,
7367 if any. */
7368 return (*valueize) (rhs);
7369 }
7370 /* Handle propagating invariant addresses into address
7371 operations. */
7372 else if (TREE_CODE (rhs) == ADDR_EXPR
7373 && !is_gimple_min_invariant (rhs))
7374 {
a90c8804 7375 poly_int64 offset = 0;
cfef45c8
RG
7376 tree base;
7377 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7378 &offset,
7379 valueize);
7380 if (base
7381 && (CONSTANT_CLASS_P (base)
7382 || decl_address_invariant_p (base)))
7383 return build_invariant_address (TREE_TYPE (rhs),
7384 base, offset);
7385 }
7386 else if (TREE_CODE (rhs) == CONSTRUCTOR
7387 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
7388 && known_eq (CONSTRUCTOR_NELTS (rhs),
7389 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 7390 {
794e3180
RS
7391 unsigned i, nelts;
7392 tree val;
cfef45c8 7393
928686b1 7394 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 7395 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
7396 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7397 {
7398 val = (*valueize) (val);
7399 if (TREE_CODE (val) == INTEGER_CST
7400 || TREE_CODE (val) == REAL_CST
7401 || TREE_CODE (val) == FIXED_CST)
794e3180 7402 vec.quick_push (val);
cfef45c8
RG
7403 else
7404 return NULL_TREE;
7405 }
7406
5ebaa477 7407 return vec.build ();
cfef45c8 7408 }
bdf37f7a
JH
7409 if (subcode == OBJ_TYPE_REF)
7410 {
7411 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7412 /* If callee is constant, we can fold away the wrapper. */
7413 if (is_gimple_min_invariant (val))
7414 return val;
7415 }
cfef45c8
RG
7416
7417 if (kind == tcc_reference)
7418 {
7419 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7420 || TREE_CODE (rhs) == REALPART_EXPR
7421 || TREE_CODE (rhs) == IMAGPART_EXPR)
7422 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7423 {
7424 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7425 return fold_unary_loc (EXPR_LOCATION (rhs),
7426 TREE_CODE (rhs),
7427 TREE_TYPE (rhs), val);
7428 }
7429 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7430 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7431 {
7432 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7433 return fold_ternary_loc (EXPR_LOCATION (rhs),
7434 TREE_CODE (rhs),
7435 TREE_TYPE (rhs), val,
7436 TREE_OPERAND (rhs, 1),
7437 TREE_OPERAND (rhs, 2));
7438 }
7439 else if (TREE_CODE (rhs) == MEM_REF
7440 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7441 {
7442 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7443 if (TREE_CODE (val) == ADDR_EXPR
7444 && is_gimple_min_invariant (val))
7445 {
7446 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7447 unshare_expr (val),
7448 TREE_OPERAND (rhs, 1));
7449 if (tem)
7450 rhs = tem;
7451 }
7452 }
7453 return fold_const_aggregate_ref_1 (rhs, valueize);
7454 }
7455 else if (kind == tcc_declaration)
7456 return get_symbol_constant_value (rhs);
7457 return rhs;
7458 }
7459
7460 case GIMPLE_UNARY_RHS:
f3582e54 7461 return NULL_TREE;
cfef45c8
RG
7462
7463 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
7464 /* Translate &x + CST into an invariant form suitable for
7465 further propagation. */
7466 if (subcode == POINTER_PLUS_EXPR)
7467 {
4b1b9e64
RB
7468 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7469 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
7470 if (TREE_CODE (op0) == ADDR_EXPR
7471 && TREE_CODE (op1) == INTEGER_CST)
7472 {
7473 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
7474 return build1_loc
7475 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
7476 fold_build2 (MEM_REF,
7477 TREE_TYPE (TREE_TYPE (op0)),
7478 unshare_expr (op0), off));
7479 }
7480 }
59c20dc7
RB
7481 /* Canonicalize bool != 0 and bool == 0 appearing after
7482 valueization. While gimple_simplify handles this
7483 it can get confused by the ~X == 1 -> X == 0 transform
7484 which we cant reduce to a SSA name or a constant
7485 (and we have no way to tell gimple_simplify to not
7486 consider those transforms in the first place). */
7487 else if (subcode == EQ_EXPR
7488 || subcode == NE_EXPR)
7489 {
7490 tree lhs = gimple_assign_lhs (stmt);
7491 tree op0 = gimple_assign_rhs1 (stmt);
7492 if (useless_type_conversion_p (TREE_TYPE (lhs),
7493 TREE_TYPE (op0)))
7494 {
7495 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7496 op0 = (*valueize) (op0);
8861704d
RB
7497 if (TREE_CODE (op0) == INTEGER_CST)
7498 std::swap (op0, op1);
7499 if (TREE_CODE (op1) == INTEGER_CST
7500 && ((subcode == NE_EXPR && integer_zerop (op1))
7501 || (subcode == EQ_EXPR && integer_onep (op1))))
7502 return op0;
59c20dc7
RB
7503 }
7504 }
4b1b9e64 7505 return NULL_TREE;
cfef45c8
RG
7506
7507 case GIMPLE_TERNARY_RHS:
7508 {
7509 /* Handle ternary operators that can appear in GIMPLE form. */
7510 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7511 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7512 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8 7513 return fold_ternary_loc (loc, subcode,
ce777eae
RB
7514 TREE_TYPE (gimple_assign_lhs (stmt)),
7515 op0, op1, op2);
cfef45c8
RG
7516 }
7517
7518 default:
7519 gcc_unreachable ();
7520 }
7521 }
7522
7523 case GIMPLE_CALL:
7524 {
25583c4f 7525 tree fn;
538dd0b7 7526 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
7527
7528 if (gimple_call_internal_p (stmt))
31e071ae
MP
7529 {
7530 enum tree_code subcode = ERROR_MARK;
7531 switch (gimple_call_internal_fn (stmt))
7532 {
7533 case IFN_UBSAN_CHECK_ADD:
7534 subcode = PLUS_EXPR;
7535 break;
7536 case IFN_UBSAN_CHECK_SUB:
7537 subcode = MINUS_EXPR;
7538 break;
7539 case IFN_UBSAN_CHECK_MUL:
7540 subcode = MULT_EXPR;
7541 break;
68fa96d6
ML
7542 case IFN_BUILTIN_EXPECT:
7543 {
7544 tree arg0 = gimple_call_arg (stmt, 0);
7545 tree op0 = (*valueize) (arg0);
7546 if (TREE_CODE (op0) == INTEGER_CST)
7547 return op0;
7548 return NULL_TREE;
7549 }
31e071ae
MP
7550 default:
7551 return NULL_TREE;
7552 }
368b454d
JJ
7553 tree arg0 = gimple_call_arg (stmt, 0);
7554 tree arg1 = gimple_call_arg (stmt, 1);
7555 tree op0 = (*valueize) (arg0);
7556 tree op1 = (*valueize) (arg1);
31e071ae
MP
7557
7558 if (TREE_CODE (op0) != INTEGER_CST
7559 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
7560 {
7561 switch (subcode)
7562 {
7563 case MULT_EXPR:
7564 /* x * 0 = 0 * x = 0 without overflow. */
7565 if (integer_zerop (op0) || integer_zerop (op1))
7566 return build_zero_cst (TREE_TYPE (arg0));
7567 break;
7568 case MINUS_EXPR:
7569 /* y - y = 0 without overflow. */
7570 if (operand_equal_p (op0, op1, 0))
7571 return build_zero_cst (TREE_TYPE (arg0));
7572 break;
7573 default:
7574 break;
7575 }
7576 }
7577 tree res
7578 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
7579 if (res
7580 && TREE_CODE (res) == INTEGER_CST
7581 && !TREE_OVERFLOW (res))
7582 return res;
7583 return NULL_TREE;
7584 }
25583c4f
RS
7585
7586 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 7587 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 7588 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 7589 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
7590 && gimple_builtin_call_types_compatible_p (stmt,
7591 TREE_OPERAND (fn, 0)))
cfef45c8
RG
7592 {
7593 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 7594 tree retval;
cfef45c8
RG
7595 unsigned i;
7596 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7597 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 7598 retval = fold_builtin_call_array (loc,
538dd0b7 7599 gimple_call_return_type (call_stmt),
cfef45c8 7600 fn, gimple_call_num_args (stmt), args);
cfef45c8 7601 if (retval)
5c944c6c
RB
7602 {
7603 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7604 STRIP_NOPS (retval);
538dd0b7
DM
7605 retval = fold_convert (gimple_call_return_type (call_stmt),
7606 retval);
5c944c6c 7607 }
cfef45c8
RG
7608 return retval;
7609 }
7610 return NULL_TREE;
7611 }
7612
7613 default:
7614 return NULL_TREE;
7615 }
7616}
7617
7618/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7619 Returns NULL_TREE if folding to a constant is not possible, otherwise
7620 returns a constant according to is_gimple_min_invariant. */
7621
7622tree
355fe088 7623gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
7624{
7625 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7626 if (res && is_gimple_min_invariant (res))
7627 return res;
7628 return NULL_TREE;
7629}
7630
7631
7632/* The following set of functions are supposed to fold references using
7633 their constant initializers. */
7634
cfef45c8
RG
7635/* See if we can find constructor defining value of BASE.
7636 When we know the consructor with constant offset (such as
7637 base is array[40] and we do know constructor of array), then
7638 BIT_OFFSET is adjusted accordingly.
7639
7640 As a special case, return error_mark_node when constructor
7641 is not explicitly available, but it is known to be zero
7642 such as 'static const int a;'. */
7643static tree
588db50c 7644get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
7645 tree (*valueize)(tree))
7646{
588db50c 7647 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
7648 bool reverse;
7649
cfef45c8
RG
7650 if (TREE_CODE (base) == MEM_REF)
7651 {
6a5aca53
ML
7652 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7653 if (!boff.to_shwi (bit_offset))
7654 return NULL_TREE;
cfef45c8
RG
7655
7656 if (valueize
7657 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7658 base = valueize (TREE_OPERAND (base, 0));
7659 if (!base || TREE_CODE (base) != ADDR_EXPR)
7660 return NULL_TREE;
7661 base = TREE_OPERAND (base, 0);
7662 }
13e88953
RB
7663 else if (valueize
7664 && TREE_CODE (base) == SSA_NAME)
7665 base = valueize (base);
cfef45c8
RG
7666
7667 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7668 DECL_INITIAL. If BASE is a nested reference into another
7669 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7670 the inner reference. */
7671 switch (TREE_CODE (base))
7672 {
7673 case VAR_DECL:
cfef45c8 7674 case CONST_DECL:
6a6dac52
JH
7675 {
7676 tree init = ctor_for_folding (base);
7677
688010ba 7678 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
7679 NULL means unknown, while error_mark_node is 0. */
7680 if (init == error_mark_node)
7681 return NULL_TREE;
7682 if (!init)
7683 return error_mark_node;
7684 return init;
7685 }
cfef45c8 7686
13e88953
RB
7687 case VIEW_CONVERT_EXPR:
7688 return get_base_constructor (TREE_OPERAND (base, 0),
7689 bit_offset, valueize);
7690
cfef45c8
RG
7691 case ARRAY_REF:
7692 case COMPONENT_REF:
ee45a32d
EB
7693 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7694 &reverse);
588db50c 7695 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
7696 return NULL_TREE;
7697 *bit_offset += bit_offset2;
7698 return get_base_constructor (base, bit_offset, valueize);
7699
cfef45c8
RG
7700 case CONSTRUCTOR:
7701 return base;
7702
7703 default:
13e88953
RB
7704 if (CONSTANT_CLASS_P (base))
7705 return base;
7706
cfef45c8
RG
7707 return NULL_TREE;
7708 }
7709}
7710
35b4d3a6
MS
7711/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7712 to the memory at bit OFFSET. When non-null, TYPE is the expected
7713 type of the reference; otherwise the type of the referenced element
7714 is used instead. When SIZE is zero, attempt to fold a reference to
7715 the entire element which OFFSET refers to. Increment *SUBOFF by
7716 the bit offset of the accessed element. */
cfef45c8
RG
7717
7718static tree
7719fold_array_ctor_reference (tree type, tree ctor,
7720 unsigned HOST_WIDE_INT offset,
c44c2088 7721 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7722 tree from_decl,
7723 unsigned HOST_WIDE_INT *suboff)
cfef45c8 7724{
807e902e
KZ
7725 offset_int low_bound;
7726 offset_int elt_size;
807e902e 7727 offset_int access_index;
6a636014 7728 tree domain_type = NULL_TREE;
cfef45c8
RG
7729 HOST_WIDE_INT inner_offset;
7730
7731 /* Compute low bound and elt size. */
eb8f1123
RG
7732 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7733 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
7734 if (domain_type && TYPE_MIN_VALUE (domain_type))
7735 {
6aa238a1 7736 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7737 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7738 return NULL_TREE;
807e902e 7739 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
7740 }
7741 else
807e902e 7742 low_bound = 0;
6aa238a1 7743 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7744 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7745 return NULL_TREE;
807e902e 7746 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 7747
35b4d3a6 7748 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 7749 access of a multiple of the array element size. Avoid division
6aa238a1
MS
7750 by zero below when ELT_SIZE is zero, such as with the result of
7751 an initializer for a zero-length array or an empty struct. */
7752 if (elt_size == 0
7753 || (type
7754 && (!TYPE_SIZE_UNIT (type)
831e688a 7755 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
7756 return NULL_TREE;
7757
7758 /* Compute the array index we look for. */
807e902e
KZ
7759 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7760 elt_size);
27bcd47c 7761 access_index += low_bound;
cfef45c8
RG
7762
7763 /* And offset within the access. */
27bcd47c 7764 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 7765
3c076c96
JJ
7766 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7767 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
7768 {
7769 /* native_encode_expr constraints. */
7770 if (size > MAX_BITSIZE_MODE_ANY_MODE
7771 || size % BITS_PER_UNIT != 0
3c076c96
JJ
7772 || inner_offset % BITS_PER_UNIT != 0
7773 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
7774 return NULL_TREE;
7775
7776 unsigned ctor_idx;
7777 tree val = get_array_ctor_element_at_index (ctor, access_index,
7778 &ctor_idx);
7779 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7780 return build_zero_cst (type);
7781
7782 /* native-encode adjacent ctor elements. */
7783 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7784 unsigned bufoff = 0;
7785 offset_int index = 0;
7786 offset_int max_index = access_index;
7787 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7788 if (!val)
7789 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7790 else if (!CONSTANT_CLASS_P (val))
7791 return NULL_TREE;
7792 if (!elt->index)
7793 ;
7794 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7795 {
7796 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7797 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7798 }
7799 else
7800 index = max_index = wi::to_offset (elt->index);
7801 index = wi::umax (index, access_index);
7802 do
7803 {
3c076c96
JJ
7804 if (bufoff + elt_sz > sizeof (buf))
7805 elt_sz = sizeof (buf) - bufoff;
7806 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 7807 inner_offset / BITS_PER_UNIT);
3c076c96 7808 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
7809 return NULL_TREE;
7810 inner_offset = 0;
7811 bufoff += len;
7812
7813 access_index += 1;
7814 if (wi::cmpu (access_index, index) == 0)
7815 val = elt->value;
7816 else if (wi::cmpu (access_index, max_index) > 0)
7817 {
7818 ctor_idx++;
7819 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7820 {
7821 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7822 ++max_index;
7823 }
7824 else
7825 {
7826 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7827 index = 0;
7828 max_index = access_index;
7829 if (!elt->index)
7830 ;
7831 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7832 {
7833 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7834 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7835 }
7836 else
7837 index = max_index = wi::to_offset (elt->index);
7838 index = wi::umax (index, access_index);
7839 if (wi::cmpu (access_index, index) == 0)
7840 val = elt->value;
7841 else
7842 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7843 }
7844 }
7845 }
7846 while (bufoff < size / BITS_PER_UNIT);
7847 *suboff += size;
7848 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7849 }
7850
6a636014 7851 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
7852 {
7853 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7854 {
7855 /* For the final reference to the entire accessed element
7856 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7857 may be null) in favor of the type of the element, and set
7858 SIZE to the size of the accessed element. */
7859 inner_offset = 0;
7860 type = TREE_TYPE (val);
6e41c27b 7861 size = elt_sz * BITS_PER_UNIT;
35b4d3a6 7862 }
6e41c27b
RB
7863 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7864 && TREE_CODE (val) == CONSTRUCTOR
7865 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7866 /* If this isn't the last element in the CTOR and a CTOR itself
7867 and it does not cover the whole object we are requesting give up
7868 since we're not set up for combining from multiple CTORs. */
7869 return NULL_TREE;
35b4d3a6 7870
6e41c27b 7871 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
35b4d3a6
MS
7872 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7873 suboff);
7874 }
cfef45c8 7875
35b4d3a6
MS
7876 /* Memory not explicitly mentioned in constructor is 0 (or
7877 the reference is out of range). */
7878 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
7879}
7880
35b4d3a6
MS
7881/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7882 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7883 is the expected type of the reference; otherwise the type of
7884 the referenced member is used instead. When SIZE is zero,
7885 attempt to fold a reference to the entire member which OFFSET
7886 refers to; in this case. Increment *SUBOFF by the bit offset
7887 of the accessed member. */
cfef45c8
RG
7888
7889static tree
7890fold_nonarray_ctor_reference (tree type, tree ctor,
7891 unsigned HOST_WIDE_INT offset,
c44c2088 7892 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7893 tree from_decl,
7894 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
7895{
7896 unsigned HOST_WIDE_INT cnt;
7897 tree cfield, cval;
7898
7899 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7900 cval)
7901 {
7902 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7903 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7904 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
7905
7906 if (!field_size)
7907 {
7908 /* Determine the size of the flexible array member from
7909 the size of the initializer provided for it. */
7910 field_size = TYPE_SIZE (TREE_TYPE (cval));
7911 }
cfef45c8
RG
7912
7913 /* Variable sized objects in static constructors makes no sense,
7914 but field_size can be NULL for flexible array members. */
7915 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7916 && TREE_CODE (byte_offset) == INTEGER_CST
7917 && (field_size != NULL_TREE
7918 ? TREE_CODE (field_size) == INTEGER_CST
7919 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7920
7921 /* Compute bit offset of the field. */
35b4d3a6
MS
7922 offset_int bitoffset
7923 = (wi::to_offset (field_offset)
7924 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 7925 /* Compute bit offset where the field ends. */
35b4d3a6 7926 offset_int bitoffset_end;
cfef45c8 7927 if (field_size != NULL_TREE)
807e902e 7928 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 7929 else
807e902e 7930 bitoffset_end = 0;
cfef45c8 7931
35b4d3a6
MS
7932 /* Compute the bit offset of the end of the desired access.
7933 As a special case, if the size of the desired access is
7934 zero, assume the access is to the entire field (and let
7935 the caller make any necessary adjustments by storing
7936 the actual bounds of the field in FIELDBOUNDS). */
7937 offset_int access_end = offset_int (offset);
7938 if (size)
7939 access_end += size;
7940 else
7941 access_end = bitoffset_end;
b8b2b009 7942
35b4d3a6
MS
7943 /* Is there any overlap between the desired access at
7944 [OFFSET, OFFSET+SIZE) and the offset of the field within
7945 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 7946 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 7947 && (field_size == NULL_TREE
807e902e 7948 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 7949 {
35b4d3a6
MS
7950 *suboff += bitoffset.to_uhwi ();
7951
7952 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7953 {
7954 /* For the final reference to the entire accessed member
7955 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7956 be null) in favor of the type of the member, and set
7957 SIZE to the size of the accessed member. */
7958 offset = bitoffset.to_uhwi ();
7959 type = TREE_TYPE (cval);
7960 size = (bitoffset_end - bitoffset).to_uhwi ();
7961 }
7962
7963 /* We do have overlap. Now see if the field is large enough
7964 to cover the access. Give up for accesses that extend
7965 beyond the end of the object or that span multiple fields. */
807e902e 7966 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 7967 return NULL_TREE;
032c80e9 7968 if (offset < bitoffset)
b8b2b009 7969 return NULL_TREE;
35b4d3a6
MS
7970
7971 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 7972 return fold_ctor_reference (type, cval,
27bcd47c 7973 inner_offset.to_uhwi (), size,
35b4d3a6 7974 from_decl, suboff);
cfef45c8
RG
7975 }
7976 }
14b7950f
MS
7977
7978 if (!type)
7979 return NULL_TREE;
7980
7981 return build_zero_cst (type);
cfef45c8
RG
7982}
7983
35b4d3a6 7984/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 7985 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
7986 is zero, attempt to fold a reference to the entire subobject
7987 which OFFSET refers to. This is used when folding accesses to
7988 string members of aggregates. When non-null, set *SUBOFF to
7989 the bit offset of the accessed subobject. */
cfef45c8 7990
8403c2cf 7991tree
35b4d3a6
MS
7992fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7993 const poly_uint64 &poly_size, tree from_decl,
7994 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
7995{
7996 tree ret;
7997
7998 /* We found the field with exact match. */
35b4d3a6
MS
7999 if (type
8000 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 8001 && known_eq (poly_offset, 0U))
9d60be38 8002 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8003
30acf282
RS
8004 /* The remaining optimizations need a constant size and offset. */
8005 unsigned HOST_WIDE_INT size, offset;
8006 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8007 return NULL_TREE;
8008
cfef45c8
RG
8009 /* We are at the end of walk, see if we can view convert the
8010 result. */
8011 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8012 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
8013 && !compare_tree_int (TYPE_SIZE (type), size)
8014 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 8015 {
9d60be38 8016 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8017 if (ret)
672d9f8e
RB
8018 {
8019 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8020 if (ret)
8021 STRIP_USELESS_TYPE_CONVERSION (ret);
8022 }
cfef45c8
RG
8023 return ret;
8024 }
b2505143
RB
8025 /* For constants and byte-aligned/sized reads try to go through
8026 native_encode/interpret. */
8027 if (CONSTANT_CLASS_P (ctor)
8028 && BITS_PER_UNIT == 8
8029 && offset % BITS_PER_UNIT == 0
ea69031c 8030 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 8031 && size % BITS_PER_UNIT == 0
ea69031c
JJ
8032 && size <= MAX_BITSIZE_MODE_ANY_MODE
8033 && can_native_interpret_type_p (type))
b2505143
RB
8034 {
8035 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
8036 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8037 offset / BITS_PER_UNIT);
8038 if (len > 0)
8039 return native_interpret_expr (type, buf, len);
b2505143 8040 }
cfef45c8
RG
8041 if (TREE_CODE (ctor) == CONSTRUCTOR)
8042 {
35b4d3a6
MS
8043 unsigned HOST_WIDE_INT dummy = 0;
8044 if (!suboff)
8045 suboff = &dummy;
cfef45c8 8046
ea69031c 8047 tree ret;
eb8f1123
RG
8048 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8049 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
8050 ret = fold_array_ctor_reference (type, ctor, offset, size,
8051 from_decl, suboff);
8052 else
8053 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8054 from_decl, suboff);
8055
8056 /* Fall back to native_encode_initializer. Needs to be done
8057 only in the outermost fold_ctor_reference call (because it itself
8058 recurses into CONSTRUCTORs) and doesn't update suboff. */
8059 if (ret == NULL_TREE
8060 && suboff == &dummy
8061 && BITS_PER_UNIT == 8
8062 && offset % BITS_PER_UNIT == 0
8063 && offset / BITS_PER_UNIT <= INT_MAX
8064 && size % BITS_PER_UNIT == 0
8065 && size <= MAX_BITSIZE_MODE_ANY_MODE
8066 && can_native_interpret_type_p (type))
8067 {
8068 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8069 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8070 offset / BITS_PER_UNIT);
8071 if (len > 0)
8072 return native_interpret_expr (type, buf, len);
8073 }
35b4d3a6 8074
ea69031c 8075 return ret;
cfef45c8
RG
8076 }
8077
8078 return NULL_TREE;
8079}
8080
8081/* Return the tree representing the element referenced by T if T is an
8082 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8083 names using VALUEIZE. Return NULL_TREE otherwise. */
8084
8085tree
8086fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8087{
8088 tree ctor, idx, base;
588db50c 8089 poly_int64 offset, size, max_size;
cfef45c8 8090 tree tem;
ee45a32d 8091 bool reverse;
cfef45c8 8092
f8a7df45
RG
8093 if (TREE_THIS_VOLATILE (t))
8094 return NULL_TREE;
8095
3a65ee74 8096 if (DECL_P (t))
cfef45c8
RG
8097 return get_symbol_constant_value (t);
8098
8099 tem = fold_read_from_constant_string (t);
8100 if (tem)
8101 return tem;
8102
8103 switch (TREE_CODE (t))
8104 {
8105 case ARRAY_REF:
8106 case ARRAY_RANGE_REF:
8107 /* Constant indexes are handled well by get_base_constructor.
8108 Only special case variable offsets.
8109 FIXME: This code can't handle nested references with variable indexes
8110 (they will be handled only by iteration of ccp). Perhaps we can bring
8111 get_ref_base_and_extent here and make it use a valueize callback. */
8112 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8113 && valueize
8114 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 8115 && poly_int_tree_p (idx))
cfef45c8
RG
8116 {
8117 tree low_bound, unit_size;
8118
8119 /* If the resulting bit-offset is constant, track it. */
8120 if ((low_bound = array_ref_low_bound (t),
588db50c 8121 poly_int_tree_p (low_bound))
cfef45c8 8122 && (unit_size = array_ref_element_size (t),
807e902e 8123 tree_fits_uhwi_p (unit_size)))
cfef45c8 8124 {
588db50c
RS
8125 poly_offset_int woffset
8126 = wi::sext (wi::to_poly_offset (idx)
8127 - wi::to_poly_offset (low_bound),
e287a2a1 8128 TYPE_PRECISION (sizetype));
a9e6359a
RB
8129 woffset *= tree_to_uhwi (unit_size);
8130 woffset *= BITS_PER_UNIT;
588db50c 8131 if (woffset.to_shwi (&offset))
807e902e 8132 {
807e902e
KZ
8133 base = TREE_OPERAND (t, 0);
8134 ctor = get_base_constructor (base, &offset, valueize);
8135 /* Empty constructor. Always fold to 0. */
8136 if (ctor == error_mark_node)
8137 return build_zero_cst (TREE_TYPE (t));
8138 /* Out of bound array access. Value is undefined,
8139 but don't fold. */
588db50c 8140 if (maybe_lt (offset, 0))
807e902e 8141 return NULL_TREE;
67914693 8142 /* We cannot determine ctor. */
807e902e
KZ
8143 if (!ctor)
8144 return NULL_TREE;
8145 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8146 tree_to_uhwi (unit_size)
8147 * BITS_PER_UNIT,
8148 base);
8149 }
cfef45c8
RG
8150 }
8151 }
8152 /* Fallthru. */
8153
8154 case COMPONENT_REF:
8155 case BIT_FIELD_REF:
8156 case TARGET_MEM_REF:
8157 case MEM_REF:
ee45a32d 8158 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
8159 ctor = get_base_constructor (base, &offset, valueize);
8160
8161 /* Empty constructor. Always fold to 0. */
8162 if (ctor == error_mark_node)
8163 return build_zero_cst (TREE_TYPE (t));
8164 /* We do not know precise address. */
588db50c 8165 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 8166 return NULL_TREE;
67914693 8167 /* We cannot determine ctor. */
cfef45c8
RG
8168 if (!ctor)
8169 return NULL_TREE;
8170
8171 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 8172 if (maybe_lt (offset, 0))
cfef45c8
RG
8173 return NULL_TREE;
8174
e4f1cbc3
JJ
8175 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8176 if (tem)
8177 return tem;
8178
8179 /* For bit field reads try to read the representative and
8180 adjust. */
8181 if (TREE_CODE (t) == COMPONENT_REF
8182 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8183 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8184 {
8185 HOST_WIDE_INT csize, coffset;
8186 tree field = TREE_OPERAND (t, 1);
8187 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8188 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8189 && size.is_constant (&csize)
8190 && offset.is_constant (&coffset)
8191 && (coffset % BITS_PER_UNIT != 0
8192 || csize % BITS_PER_UNIT != 0)
8193 && !reverse
8194 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8195 {
8196 poly_int64 bitoffset;
8197 poly_uint64 field_offset, repr_offset;
8198 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8199 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8200 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8201 else
8202 bitoffset = 0;
8203 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8204 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8205 HOST_WIDE_INT bitoff;
8206 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8207 - TYPE_PRECISION (TREE_TYPE (field)));
8208 if (bitoffset.is_constant (&bitoff)
8209 && bitoff >= 0
8210 && bitoff <= diff)
8211 {
8212 offset -= bitoff;
8213 size = tree_to_uhwi (DECL_SIZE (repr));
8214
8215 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8216 size, base);
8217 if (tem && TREE_CODE (tem) == INTEGER_CST)
8218 {
8219 if (!BYTES_BIG_ENDIAN)
8220 tem = wide_int_to_tree (TREE_TYPE (field),
8221 wi::lrshift (wi::to_wide (tem),
8222 bitoff));
8223 else
8224 tem = wide_int_to_tree (TREE_TYPE (field),
8225 wi::lrshift (wi::to_wide (tem),
8226 diff - bitoff));
8227 return tem;
8228 }
8229 }
8230 }
8231 }
8232 break;
cfef45c8
RG
8233
8234 case REALPART_EXPR:
8235 case IMAGPART_EXPR:
8236 {
8237 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8238 if (c && TREE_CODE (c) == COMPLEX_CST)
8239 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 8240 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
8241 break;
8242 }
8243
8244 default:
8245 break;
8246 }
8247
8248 return NULL_TREE;
8249}
8250
8251tree
8252fold_const_aggregate_ref (tree t)
8253{
8254 return fold_const_aggregate_ref_1 (t, NULL);
8255}
06bc3ec7 8256
85942f45 8257/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
8258 at OFFSET.
8259 Set CAN_REFER if non-NULL to false if method
8260 is not referable or if the virtual table is ill-formed (such as rewriten
8261 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
8262
8263tree
85942f45
JH
8264gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8265 tree v,
ec77d61f
JH
8266 unsigned HOST_WIDE_INT offset,
8267 bool *can_refer)
81fa35bd 8268{
85942f45
JH
8269 tree vtable = v, init, fn;
8270 unsigned HOST_WIDE_INT size;
8c311b50
JH
8271 unsigned HOST_WIDE_INT elt_size, access_index;
8272 tree domain_type;
81fa35bd 8273
ec77d61f
JH
8274 if (can_refer)
8275 *can_refer = true;
8276
9de2f554 8277 /* First of all double check we have virtual table. */
8813a647 8278 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 8279 {
ec77d61f
JH
8280 /* Pass down that we lost track of the target. */
8281 if (can_refer)
8282 *can_refer = false;
8283 return NULL_TREE;
8284 }
9de2f554 8285
2aa3da06
JH
8286 init = ctor_for_folding (v);
8287
9de2f554 8288 /* The virtual tables should always be born with constructors
2aa3da06
JH
8289 and we always should assume that they are avaialble for
8290 folding. At the moment we do not stream them in all cases,
8291 but it should never happen that ctor seem unreachable. */
8292 gcc_assert (init);
8293 if (init == error_mark_node)
8294 {
ec77d61f
JH
8295 /* Pass down that we lost track of the target. */
8296 if (can_refer)
8297 *can_refer = false;
2aa3da06
JH
8298 return NULL_TREE;
8299 }
81fa35bd 8300 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 8301 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 8302 offset *= BITS_PER_UNIT;
81fa35bd 8303 offset += token * size;
9de2f554 8304
8c311b50
JH
8305 /* Lookup the value in the constructor that is assumed to be array.
8306 This is equivalent to
8307 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8308 offset, size, NULL);
8309 but in a constant time. We expect that frontend produced a simple
8310 array without indexed initializers. */
8311
8312 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8313 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8314 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8315 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8316
8317 access_index = offset / BITS_PER_UNIT / elt_size;
8318 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8319
bf8d8309
MP
8320 /* The C++ FE can now produce indexed fields, and we check if the indexes
8321 match. */
8c311b50
JH
8322 if (access_index < CONSTRUCTOR_NELTS (init))
8323 {
8324 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
8325 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8326 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
8327 STRIP_NOPS (fn);
8328 }
8329 else
8330 fn = NULL;
9de2f554
JH
8331
8332 /* For type inconsistent program we may end up looking up virtual method
8333 in virtual table that does not contain TOKEN entries. We may overrun
8334 the virtual table and pick up a constant or RTTI info pointer.
8335 In any case the call is undefined. */
8336 if (!fn
8337 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8338 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8339 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8340 else
8341 {
8342 fn = TREE_OPERAND (fn, 0);
8343
8344 /* When cgraph node is missing and function is not public, we cannot
8345 devirtualize. This can happen in WHOPR when the actual method
8346 ends up in other partition, because we found devirtualization
8347 possibility too late. */
8348 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
8349 {
8350 if (can_refer)
8351 {
8352 *can_refer = false;
8353 return fn;
8354 }
8355 return NULL_TREE;
8356 }
9de2f554 8357 }
81fa35bd 8358
7501ca28
RG
8359 /* Make sure we create a cgraph node for functions we'll reference.
8360 They can be non-existent if the reference comes from an entry
8361 of an external vtable for example. */
d52f5295 8362 cgraph_node::get_create (fn);
7501ca28 8363
81fa35bd
MJ
8364 return fn;
8365}
8366
85942f45
JH
8367/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8368 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8369 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
8370 OBJ_TYPE_REF_OBJECT(REF).
8371 Set CAN_REFER if non-NULL to false if method
8372 is not referable or if the virtual table is ill-formed (such as rewriten
8373 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
8374
8375tree
ec77d61f
JH
8376gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8377 bool *can_refer)
85942f45
JH
8378{
8379 unsigned HOST_WIDE_INT offset;
8380 tree v;
8381
8382 v = BINFO_VTABLE (known_binfo);
8383 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8384 if (!v)
8385 return NULL_TREE;
8386
8387 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
8388 {
8389 if (can_refer)
8390 *can_refer = false;
8391 return NULL_TREE;
8392 }
8393 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
8394}
8395
737f500a
RB
8396/* Given a pointer value T, return a simplified version of an
8397 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
8398 possible. Note that the resulting type may be different from
8399 the type pointed to in the sense that it is still compatible
8400 from the langhooks point of view. */
8401
8402tree
8403gimple_fold_indirect_ref (tree t)
8404{
8405 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8406 tree sub = t;
8407 tree subtype;
8408
8409 STRIP_NOPS (sub);
8410 subtype = TREE_TYPE (sub);
737f500a
RB
8411 if (!POINTER_TYPE_P (subtype)
8412 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
8413 return NULL_TREE;
8414
8415 if (TREE_CODE (sub) == ADDR_EXPR)
8416 {
8417 tree op = TREE_OPERAND (sub, 0);
8418 tree optype = TREE_TYPE (op);
8419 /* *&p => p */
8420 if (useless_type_conversion_p (type, optype))
8421 return op;
8422
8423 /* *(foo *)&fooarray => fooarray[0] */
8424 if (TREE_CODE (optype) == ARRAY_TYPE
8425 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8426 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8427 {
8428 tree type_domain = TYPE_DOMAIN (optype);
8429 tree min_val = size_zero_node;
8430 if (type_domain && TYPE_MIN_VALUE (type_domain))
8431 min_val = TYPE_MIN_VALUE (type_domain);
8432 if (TREE_CODE (min_val) == INTEGER_CST)
8433 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8434 }
8435 /* *(foo *)&complexfoo => __real__ complexfoo */
8436 else if (TREE_CODE (optype) == COMPLEX_TYPE
8437 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8438 return fold_build1 (REALPART_EXPR, type, op);
8439 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8440 else if (TREE_CODE (optype) == VECTOR_TYPE
8441 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8442 {
8443 tree part_width = TYPE_SIZE (type);
8444 tree index = bitsize_int (0);
8445 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8446 }
8447 }
8448
8449 /* *(p + CST) -> ... */
8450 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8451 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8452 {
8453 tree addr = TREE_OPERAND (sub, 0);
8454 tree off = TREE_OPERAND (sub, 1);
8455 tree addrtype;
8456
8457 STRIP_NOPS (addr);
8458 addrtype = TREE_TYPE (addr);
8459
8460 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8461 if (TREE_CODE (addr) == ADDR_EXPR
8462 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8463 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 8464 && tree_fits_uhwi_p (off))
b184c8f1 8465 {
ae7e9ddd 8466 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
8467 tree part_width = TYPE_SIZE (type);
8468 unsigned HOST_WIDE_INT part_widthi
9439e9a1 8469 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
8470 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8471 tree index = bitsize_int (indexi);
928686b1
RS
8472 if (known_lt (offset / part_widthi,
8473 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
8474 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8475 part_width, index);
8476 }
8477
8478 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8479 if (TREE_CODE (addr) == ADDR_EXPR
8480 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8481 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8482 {
8483 tree size = TYPE_SIZE_UNIT (type);
8484 if (tree_int_cst_equal (size, off))
8485 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8486 }
8487
8488 /* *(p + CST) -> MEM_REF <p, CST>. */
8489 if (TREE_CODE (addr) != ADDR_EXPR
8490 || DECL_P (TREE_OPERAND (addr, 0)))
8491 return fold_build2 (MEM_REF, type,
8492 addr,
8e6cdc90 8493 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
8494 }
8495
8496 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8497 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8498 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8499 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8500 {
8501 tree type_domain;
8502 tree min_val = size_zero_node;
8503 tree osub = sub;
8504 sub = gimple_fold_indirect_ref (sub);
8505 if (! sub)
8506 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8507 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8508 if (type_domain && TYPE_MIN_VALUE (type_domain))
8509 min_val = TYPE_MIN_VALUE (type_domain);
8510 if (TREE_CODE (min_val) == INTEGER_CST)
8511 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8512 }
8513
8514 return NULL_TREE;
8515}
19e51b40
JJ
8516
8517/* Return true if CODE is an operation that when operating on signed
8518 integer types involves undefined behavior on overflow and the
8519 operation can be expressed with unsigned arithmetic. */
8520
8521bool
8522arith_code_with_undefined_signed_overflow (tree_code code)
8523{
8524 switch (code)
8525 {
8e2c037d 8526 case ABS_EXPR:
19e51b40
JJ
8527 case PLUS_EXPR:
8528 case MINUS_EXPR:
8529 case MULT_EXPR:
8530 case NEGATE_EXPR:
8531 case POINTER_PLUS_EXPR:
8532 return true;
8533 default:
8534 return false;
8535 }
8536}
8537
8538/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8539 operation that can be transformed to unsigned arithmetic by converting
8540 its operand, carrying out the operation in the corresponding unsigned
8541 type and converting the result back to the original type.
8542
8543 Returns a sequence of statements that replace STMT and also contain
8544 a modified form of STMT itself. */
8545
8546gimple_seq
355fe088 8547rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
8548{
8549 if (dump_file && (dump_flags & TDF_DETAILS))
8550 {
8551 fprintf (dump_file, "rewriting stmt with undefined signed "
8552 "overflow ");
8553 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8554 }
8555
8556 tree lhs = gimple_assign_lhs (stmt);
8557 tree type = unsigned_type_for (TREE_TYPE (lhs));
8558 gimple_seq stmts = NULL;
8e2c037d
RB
8559 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8560 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8561 else
8562 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8563 {
8564 tree op = gimple_op (stmt, i);
8565 op = gimple_convert (&stmts, type, op);
8566 gimple_set_op (stmt, i, op);
8567 }
19e51b40
JJ
8568 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8569 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8570 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 8571 gimple_set_modified (stmt, true);
19e51b40 8572 gimple_seq_add_stmt (&stmts, stmt);
355fe088 8573 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
8574 gimple_seq_add_stmt (&stmts, cvt);
8575
8576 return stmts;
8577}
d4f5cd5e 8578
3d2cf79f 8579
c26de36d
RB
8580/* The valueization hook we use for the gimple_build API simplification.
8581 This makes us match fold_buildN behavior by only combining with
8582 statements in the sequence(s) we are currently building. */
8583
8584static tree
8585gimple_build_valueize (tree op)
8586{
8587 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8588 return op;
8589 return NULL_TREE;
8590}
8591
3d2cf79f 8592/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 8593 simplifying it first if possible. Returns the built
3d2cf79f
RB
8594 expression value and appends statements possibly defining it
8595 to SEQ. */
8596
8597tree
8598gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8599 enum tree_code code, tree type, tree op0)
3d2cf79f 8600{
c26de36d 8601 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
8602 if (!res)
8603 {
a15ebbcd 8604 res = create_tmp_reg_or_ssa_name (type);
355fe088 8605 gimple *stmt;
3d2cf79f
RB
8606 if (code == REALPART_EXPR
8607 || code == IMAGPART_EXPR
8608 || code == VIEW_CONVERT_EXPR)
0d0e4a03 8609 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 8610 else
0d0e4a03 8611 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
8612 gimple_set_location (stmt, loc);
8613 gimple_seq_add_stmt_without_update (seq, stmt);
8614 }
8615 return res;
8616}
8617
8618/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 8619 simplifying it first if possible. Returns the built
3d2cf79f
RB
8620 expression value and appends statements possibly defining it
8621 to SEQ. */
8622
8623tree
8624gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8625 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 8626{
c26de36d 8627 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
8628 if (!res)
8629 {
a15ebbcd 8630 res = create_tmp_reg_or_ssa_name (type);
355fe088 8631 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
8632 gimple_set_location (stmt, loc);
8633 gimple_seq_add_stmt_without_update (seq, stmt);
8634 }
8635 return res;
8636}
8637
8638/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 8639 simplifying it first if possible. Returns the built
3d2cf79f
RB
8640 expression value and appends statements possibly defining it
8641 to SEQ. */
8642
8643tree
8644gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8645 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
8646{
8647 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 8648 seq, gimple_build_valueize);
3d2cf79f
RB
8649 if (!res)
8650 {
a15ebbcd 8651 res = create_tmp_reg_or_ssa_name (type);
355fe088 8652 gimple *stmt;
3d2cf79f 8653 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
8654 stmt = gimple_build_assign (res, code,
8655 build3 (code, type, op0, op1, op2));
3d2cf79f 8656 else
0d0e4a03 8657 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
8658 gimple_set_location (stmt, loc);
8659 gimple_seq_add_stmt_without_update (seq, stmt);
8660 }
8661 return res;
8662}
8663
93a73251
MM
8664/* Build the call FN () with a result of type TYPE (or no result if TYPE is
8665 void) with a location LOC. Returns the built expression value (or NULL_TREE
8666 if TYPE is void) and appends statements possibly defining it to SEQ. */
8667
8668tree
8669gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8670{
8671 tree res = NULL_TREE;
8672 gcall *stmt;
8673 if (internal_fn_p (fn))
8674 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8675 else
8676 {
8677 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8678 stmt = gimple_build_call (decl, 0);
8679 }
8680 if (!VOID_TYPE_P (type))
8681 {
8682 res = create_tmp_reg_or_ssa_name (type);
8683 gimple_call_set_lhs (stmt, res);
8684 }
8685 gimple_set_location (stmt, loc);
8686 gimple_seq_add_stmt_without_update (seq, stmt);
8687 return res;
8688}
8689
3d2cf79f
RB
8690/* Build the call FN (ARG0) with a result of type TYPE
8691 (or no result if TYPE is void) with location LOC,
c26de36d 8692 simplifying it first if possible. Returns the built
3d2cf79f
RB
8693 expression value (or NULL_TREE if TYPE is void) and appends
8694 statements possibly defining it to SEQ. */
8695
8696tree
eb69361d
RS
8697gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8698 tree type, tree arg0)
3d2cf79f 8699{
c26de36d 8700 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
8701 if (!res)
8702 {
eb69361d
RS
8703 gcall *stmt;
8704 if (internal_fn_p (fn))
8705 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8706 else
8707 {
8708 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8709 stmt = gimple_build_call (decl, 1, arg0);
8710 }
3d2cf79f
RB
8711 if (!VOID_TYPE_P (type))
8712 {
a15ebbcd 8713 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8714 gimple_call_set_lhs (stmt, res);
8715 }
8716 gimple_set_location (stmt, loc);
8717 gimple_seq_add_stmt_without_update (seq, stmt);
8718 }
8719 return res;
8720}
8721
8722/* Build the call FN (ARG0, ARG1) with a result of type TYPE
8723 (or no result if TYPE is void) with location LOC,
c26de36d 8724 simplifying it first if possible. Returns the built
3d2cf79f
RB
8725 expression value (or NULL_TREE if TYPE is void) and appends
8726 statements possibly defining it to SEQ. */
8727
8728tree
eb69361d
RS
8729gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8730 tree type, tree arg0, tree arg1)
3d2cf79f 8731{
c26de36d 8732 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
8733 if (!res)
8734 {
eb69361d
RS
8735 gcall *stmt;
8736 if (internal_fn_p (fn))
8737 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8738 else
8739 {
8740 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8741 stmt = gimple_build_call (decl, 2, arg0, arg1);
8742 }
3d2cf79f
RB
8743 if (!VOID_TYPE_P (type))
8744 {
a15ebbcd 8745 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8746 gimple_call_set_lhs (stmt, res);
8747 }
8748 gimple_set_location (stmt, loc);
8749 gimple_seq_add_stmt_without_update (seq, stmt);
8750 }
8751 return res;
8752}
8753
8754/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8755 (or no result if TYPE is void) with location LOC,
c26de36d 8756 simplifying it first if possible. Returns the built
3d2cf79f
RB
8757 expression value (or NULL_TREE if TYPE is void) and appends
8758 statements possibly defining it to SEQ. */
8759
8760tree
eb69361d
RS
8761gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8762 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 8763{
c26de36d
RB
8764 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8765 seq, gimple_build_valueize);
3d2cf79f
RB
8766 if (!res)
8767 {
eb69361d
RS
8768 gcall *stmt;
8769 if (internal_fn_p (fn))
8770 stmt = gimple_build_call_internal (as_internal_fn (fn),
8771 3, arg0, arg1, arg2);
8772 else
8773 {
8774 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8775 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8776 }
3d2cf79f
RB
8777 if (!VOID_TYPE_P (type))
8778 {
a15ebbcd 8779 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8780 gimple_call_set_lhs (stmt, res);
8781 }
8782 gimple_set_location (stmt, loc);
8783 gimple_seq_add_stmt_without_update (seq, stmt);
8784 }
8785 return res;
8786}
8787
8788/* Build the conversion (TYPE) OP with a result of type TYPE
8789 with location LOC if such conversion is neccesary in GIMPLE,
8790 simplifying it first.
8791 Returns the built expression value and appends
8792 statements possibly defining it to SEQ. */
d4f5cd5e
RB
8793
8794tree
8795gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8796{
8797 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8798 return op;
3d2cf79f 8799 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 8800}
68e57f04 8801
74e3c262
RB
8802/* Build the conversion (ptrofftype) OP with a result of a type
8803 compatible with ptrofftype with location LOC if such conversion
8804 is neccesary in GIMPLE, simplifying it first.
8805 Returns the built expression value and appends
8806 statements possibly defining it to SEQ. */
8807
8808tree
8809gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8810{
8811 if (ptrofftype_p (TREE_TYPE (op)))
8812 return op;
8813 return gimple_convert (seq, loc, sizetype, op);
8814}
8815
e7c45b66
RS
8816/* Build a vector of type TYPE in which each element has the value OP.
8817 Return a gimple value for the result, appending any new statements
8818 to SEQ. */
8819
8820tree
8821gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8822 tree op)
8823{
928686b1
RS
8824 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8825 && !CONSTANT_CLASS_P (op))
8826 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8827
e7c45b66
RS
8828 tree res, vec = build_vector_from_val (type, op);
8829 if (is_gimple_val (vec))
8830 return vec;
8831 if (gimple_in_ssa_p (cfun))
8832 res = make_ssa_name (type);
8833 else
8834 res = create_tmp_reg (type);
8835 gimple *stmt = gimple_build_assign (res, vec);
8836 gimple_set_location (stmt, loc);
8837 gimple_seq_add_stmt_without_update (seq, stmt);
8838 return res;
8839}
8840
abe73c3d
RS
8841/* Build a vector from BUILDER, handling the case in which some elements
8842 are non-constant. Return a gimple value for the result, appending any
8843 new instructions to SEQ.
8844
8845 BUILDER must not have a stepped encoding on entry. This is because
8846 the function is not geared up to handle the arithmetic that would
8847 be needed in the variable case, and any code building a vector that
8848 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
8849
8850tree
abe73c3d
RS
8851gimple_build_vector (gimple_seq *seq, location_t loc,
8852 tree_vector_builder *builder)
e7c45b66 8853{
abe73c3d
RS
8854 gcc_assert (builder->nelts_per_pattern () <= 2);
8855 unsigned int encoded_nelts = builder->encoded_nelts ();
8856 for (unsigned int i = 0; i < encoded_nelts; ++i)
8ebedfcd 8857 if (!CONSTANT_CLASS_P ((*builder)[i]))
e7c45b66 8858 {
abe73c3d 8859 tree type = builder->type ();
928686b1 8860 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
8861 vec<constructor_elt, va_gc> *v;
8862 vec_alloc (v, nelts);
8863 for (i = 0; i < nelts; ++i)
abe73c3d 8864 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
8865
8866 tree res;
8867 if (gimple_in_ssa_p (cfun))
8868 res = make_ssa_name (type);
8869 else
8870 res = create_tmp_reg (type);
8871 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8872 gimple_set_location (stmt, loc);
8873 gimple_seq_add_stmt_without_update (seq, stmt);
8874 return res;
8875 }
abe73c3d 8876 return builder->build ();
e7c45b66
RS
8877}
8878
93a73251
MM
8879/* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8880 and generate a value guaranteed to be rounded upwards to ALIGN.
8881
8882 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8883
8884tree
8885gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8886 tree old_size, unsigned HOST_WIDE_INT align)
8887{
8888 unsigned HOST_WIDE_INT tg_mask = align - 1;
8889 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8890 gcc_assert (INTEGRAL_TYPE_P (type));
8891 tree tree_mask = build_int_cst (type, tg_mask);
8892 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8893 tree_mask);
8894
8895 tree mask = build_int_cst (type, -align);
8896 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8897}
8898
68e57f04
RS
8899/* Return true if the result of assignment STMT is known to be non-negative.
8900 If the return value is based on the assumption that signed overflow is
8901 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8902 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8903
8904static bool
8905gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8906 int depth)
8907{
8908 enum tree_code code = gimple_assign_rhs_code (stmt);
ce777eae 8909 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
68e57f04
RS
8910 switch (get_gimple_rhs_class (code))
8911 {
8912 case GIMPLE_UNARY_RHS:
8913 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 8914 type,
68e57f04
RS
8915 gimple_assign_rhs1 (stmt),
8916 strict_overflow_p, depth);
8917 case GIMPLE_BINARY_RHS:
8918 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 8919 type,
68e57f04
RS
8920 gimple_assign_rhs1 (stmt),
8921 gimple_assign_rhs2 (stmt),
8922 strict_overflow_p, depth);
8923 case GIMPLE_TERNARY_RHS:
8924 return false;
8925 case GIMPLE_SINGLE_RHS:
8926 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8927 strict_overflow_p, depth);
8928 case GIMPLE_INVALID_RHS:
8929 break;
8930 }
8931 gcc_unreachable ();
8932}
8933
8934/* Return true if return value of call STMT is known to be non-negative.
8935 If the return value is based on the assumption that signed overflow is
8936 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8937 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8938
8939static bool
8940gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8941 int depth)
8942{
8943 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8944 gimple_call_arg (stmt, 0) : NULL_TREE;
8945 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8946 gimple_call_arg (stmt, 1) : NULL_TREE;
ce777eae
RB
8947 tree lhs = gimple_call_lhs (stmt);
8948 return (lhs
8949 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
8950 gimple_call_combined_fn (stmt),
8951 arg0, arg1,
8952 strict_overflow_p, depth));
68e57f04
RS
8953}
8954
4534c203
RB
8955/* Return true if return value of call STMT is known to be non-negative.
8956 If the return value is based on the assumption that signed overflow is
8957 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8958 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8959
8960static bool
8961gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8962 int depth)
8963{
8964 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8965 {
8966 tree arg = gimple_phi_arg_def (stmt, i);
8967 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8968 return false;
8969 }
8970 return true;
8971}
8972
68e57f04
RS
8973/* Return true if STMT is known to compute a non-negative value.
8974 If the return value is based on the assumption that signed overflow is
8975 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8976 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8977
8978bool
8979gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8980 int depth)
8981{
8982 switch (gimple_code (stmt))
8983 {
8984 case GIMPLE_ASSIGN:
8985 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8986 depth);
8987 case GIMPLE_CALL:
8988 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8989 depth);
4534c203
RB
8990 case GIMPLE_PHI:
8991 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8992 depth);
68e57f04
RS
8993 default:
8994 return false;
8995 }
8996}
67dbe582
RS
8997
8998/* Return true if the floating-point value computed by assignment STMT
8999 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 9000 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
9001
9002 DEPTH is the current nesting depth of the query. */
9003
9004static bool
9005gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9006{
9007 enum tree_code code = gimple_assign_rhs_code (stmt);
9008 switch (get_gimple_rhs_class (code))
9009 {
9010 case GIMPLE_UNARY_RHS:
9011 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9012 gimple_assign_rhs1 (stmt), depth);
9013 case GIMPLE_BINARY_RHS:
9014 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9015 gimple_assign_rhs1 (stmt),
9016 gimple_assign_rhs2 (stmt), depth);
9017 case GIMPLE_TERNARY_RHS:
9018 return false;
9019 case GIMPLE_SINGLE_RHS:
9020 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9021 case GIMPLE_INVALID_RHS:
9022 break;
9023 }
9024 gcc_unreachable ();
9025}
9026
9027/* Return true if the floating-point value computed by call STMT is known
9028 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9029 considered integer values. Return false for signaling NaN.
67dbe582
RS
9030
9031 DEPTH is the current nesting depth of the query. */
9032
9033static bool
9034gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9035{
9036 tree arg0 = (gimple_call_num_args (stmt) > 0
9037 ? gimple_call_arg (stmt, 0)
9038 : NULL_TREE);
9039 tree arg1 = (gimple_call_num_args (stmt) > 1
9040 ? gimple_call_arg (stmt, 1)
9041 : NULL_TREE);
1d9da71f 9042 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
9043 arg0, arg1, depth);
9044}
9045
9046/* Return true if the floating-point result of phi STMT is known to have
9047 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 9048 integer values. Return false for signaling NaN.
67dbe582
RS
9049
9050 DEPTH is the current nesting depth of the query. */
9051
9052static bool
9053gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9054{
9055 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9056 {
9057 tree arg = gimple_phi_arg_def (stmt, i);
9058 if (!integer_valued_real_single_p (arg, depth + 1))
9059 return false;
9060 }
9061 return true;
9062}
9063
9064/* Return true if the floating-point value computed by STMT is known
9065 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9066 considered integer values. Return false for signaling NaN.
67dbe582
RS
9067
9068 DEPTH is the current nesting depth of the query. */
9069
9070bool
9071gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9072{
9073 switch (gimple_code (stmt))
9074 {
9075 case GIMPLE_ASSIGN:
9076 return gimple_assign_integer_valued_real_p (stmt, depth);
9077 case GIMPLE_CALL:
9078 return gimple_call_integer_valued_real_p (stmt, depth);
9079 case GIMPLE_PHI:
9080 return gimple_phi_integer_valued_real_p (stmt, depth);
9081 default:
9082 return false;
9083 }
9084}