]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
PR c/100719 - missing -Wvla-parameter on a mismatch in second parameter
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
99dee823 2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
e7868dc6 68#include "varasm.h"
cbdd87d4 69
598f7235
MS
70enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
598f7235
MS
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83};
84
03c4a945
MS
85static bool
86get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 87
b3b9f3d0 88/* Return true when DECL can be referenced from current unit.
c44c2088
JH
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
1389294c 92
1389294c
JH
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
3e89949e 105 we devirtualize only during final compilation stage.
b3b9f3d0
JH
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
1389294c 110static bool
c44c2088 111can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 112{
2c8326a5 113 varpool_node *vnode;
1389294c 114 struct cgraph_node *node;
5e20cdc9 115 symtab_node *snode;
c44c2088 116
00de328a 117 if (DECL_ABSTRACT_P (decl))
1632a686
JH
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 122 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
123 return true;
124
d4babd37
JM
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
1632a686 128 {
d4babd37
JM
129 if (DECL_EXTERNAL (decl))
130 return false;
3aaf0529
JH
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
3dafb85c 133 if (symtab->function_flags_ready)
3aaf0529 134 return true;
d52f5295 135 snode = symtab_node::get (decl);
3aaf0529 136 if (!snode || !snode->definition)
1632a686 137 return false;
7de90a6c 138 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 139 return !node || !node->inlined_to;
1632a686
JH
140 }
141
6da8be89 142 /* We will later output the initializer, so we can refer to it.
c44c2088 143 So we are concerned only when DECL comes from initializer of
3aaf0529 144 external var or var that has been optimized out. */
c44c2088 145 if (!from_decl
8813a647 146 || !VAR_P (from_decl)
3aaf0529 147 || (!DECL_EXTERNAL (from_decl)
9041d2e6 148 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 149 && vnode->definition)
6da8be89 150 || (flag_ltrans
9041d2e6 151 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 152 && vnode->in_other_partition))
c44c2088 153 return true;
c44c2088
JH
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
a33a931b 159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 161 return false;
b3b9f3d0
JH
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
3aaf0529
JH
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
170
171 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 172 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
3dafb85c 178 if (!symtab->function_flags_ready)
b3b9f3d0 179 return true;
c44c2088 180
d52f5295 181 snode = symtab_node::get (decl);
3aaf0529
JH
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 188 return !node || !node->inlined_to;
1389294c
JH
189}
190
a15ebbcd
ML
191/* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
edc19e03
WS
195tree
196create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
197{
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202}
203
0038d4e0 204/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
207
208tree
c44c2088 209canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 210{
37f808c4
RB
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
50619002
EB
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
315f5f1b
RG
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 218 {
315f5f1b
RG
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
17f39a39
JH
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
5a27a197
RG
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
5a27a197
RG
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
239 if (!base)
240 return NULL_TREE;
b3b9f3d0 241
8813a647 242 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 243 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 244 return NULL_TREE;
13f92e8d
JJ
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
8813a647 247 if (VAR_P (base))
a076632e
RB
248 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
249 but since the use can be in a debug stmt we can't. */
250 ;
7501ca28
RG
251 else if (TREE_CODE (base) == FUNCTION_DECL)
252 {
253 /* Make sure we create a cgraph node for functions we'll reference.
254 They can be non-existent if the reference comes from an entry
255 of an external vtable for example. */
d52f5295 256 cgraph_node::get_create (base);
7501ca28 257 }
0038d4e0 258 /* Fixup types in global initializers. */
73aef89e
RG
259 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
260 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
261
262 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
263 cval = fold_convert (TREE_TYPE (orig_cval), cval);
264 return cval;
17f39a39 265 }
37f808c4
RB
266 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
267 if (TREE_CODE (cval) == INTEGER_CST)
268 {
269 if (TREE_OVERFLOW_P (cval))
270 cval = drop_tree_overflow (cval);
271 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
272 cval = fold_convert (TREE_TYPE (orig_cval), cval);
273 return cval;
274 }
50619002 275 return orig_cval;
17f39a39 276}
cbdd87d4
RG
277
278/* If SYM is a constant variable with known value, return the value.
279 NULL_TREE is returned otherwise. */
280
281tree
282get_symbol_constant_value (tree sym)
283{
6a6dac52
JH
284 tree val = ctor_for_folding (sym);
285 if (val != error_mark_node)
cbdd87d4 286 {
cbdd87d4
RG
287 if (val)
288 {
9d60be38 289 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 290 if (val && is_gimple_min_invariant (val))
17f39a39 291 return val;
1389294c
JH
292 else
293 return NULL_TREE;
cbdd87d4
RG
294 }
295 /* Variables declared 'const' without an initializer
296 have zero as the initializer if they may not be
297 overridden at link or run time. */
298 if (!val
b8a8c472 299 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 300 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
301 }
302
303 return NULL_TREE;
304}
305
306
cbdd87d4 307
0bf8cd9d
RB
308/* Subroutine of fold_stmt. We perform constant folding of the
309 memory reference tree EXPR. */
cbdd87d4
RG
310
311static tree
0bf8cd9d 312maybe_fold_reference (tree expr)
cbdd87d4 313{
2301a394 314 tree result = NULL_TREE;
cbdd87d4 315
f0eddb90
RG
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
2301a394 320 result = fold_unary_loc (EXPR_LOCATION (expr),
f0eddb90
RG
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
2301a394
RB
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 result = fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332 else
333 result = fold_const_aggregate_ref (expr);
f0eddb90 334
2301a394 335 if (result && is_gimple_min_invariant (result))
f0eddb90 336 return result;
cbdd87d4 337
cbdd87d4
RG
338 return NULL_TREE;
339}
340
52a5515e
RB
341/* Return true if EXPR is an acceptable right-hand-side for a
342 GIMPLE assignment. We validate the entire tree, not just
343 the root node, thus catching expressions that embed complex
344 operands that are not permitted in GIMPLE. This function
345 is needed because the folding routines in fold-const.c
346 may return such expressions in some cases, e.g., an array
347 access with an embedded index addition. It may make more
348 sense to have folding routines that are sensitive to the
349 constraints on GIMPLE operands, rather than abandoning any
350 any attempt to fold if the usual folding turns out to be too
351 aggressive. */
352
353bool
354valid_gimple_rhs_p (tree expr)
355{
356 enum tree_code code = TREE_CODE (expr);
357
358 switch (TREE_CODE_CLASS (code))
359 {
360 case tcc_declaration:
361 if (!is_gimple_variable (expr))
362 return false;
363 break;
364
365 case tcc_constant:
366 /* All constants are ok. */
367 break;
368
369 case tcc_comparison:
370 /* GENERIC allows comparisons with non-boolean types, reject
371 those for GIMPLE. Let vector-typed comparisons pass - rules
372 for GENERIC and GIMPLE are the same here. */
373 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
374 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
375 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
376 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
377 return false;
378
379 /* Fallthru. */
380 case tcc_binary:
381 if (!is_gimple_val (TREE_OPERAND (expr, 0))
382 || !is_gimple_val (TREE_OPERAND (expr, 1)))
383 return false;
384 break;
385
386 case tcc_unary:
387 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
388 return false;
389 break;
390
391 case tcc_expression:
392 switch (code)
393 {
394 case ADDR_EXPR:
395 {
396 tree t;
397 if (is_gimple_min_invariant (expr))
398 return true;
399 t = TREE_OPERAND (expr, 0);
400 while (handled_component_p (t))
401 {
402 /* ??? More checks needed, see the GIMPLE verifier. */
403 if ((TREE_CODE (t) == ARRAY_REF
404 || TREE_CODE (t) == ARRAY_RANGE_REF)
405 && !is_gimple_val (TREE_OPERAND (t, 1)))
406 return false;
407 t = TREE_OPERAND (t, 0);
408 }
409 if (!is_gimple_id (t))
410 return false;
411 }
412 break;
413
414 default:
415 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
416 {
417 if ((code == COND_EXPR
418 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
419 : !is_gimple_val (TREE_OPERAND (expr, 0)))
420 || !is_gimple_val (TREE_OPERAND (expr, 1))
421 || !is_gimple_val (TREE_OPERAND (expr, 2)))
422 return false;
423 break;
424 }
425 return false;
426 }
427 break;
428
429 case tcc_vl_exp:
430 return false;
431
432 case tcc_exceptional:
433 if (code == CONSTRUCTOR)
434 {
435 unsigned i;
436 tree elt;
437 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
438 if (!is_gimple_val (elt))
439 return false;
440 return true;
441 }
442 if (code != SSA_NAME)
443 return false;
444 break;
445
446 case tcc_reference:
447 if (code == BIT_FIELD_REF)
448 return is_gimple_val (TREE_OPERAND (expr, 0));
449 return false;
450
451 default:
452 return false;
453 }
454
455 return true;
456}
457
cbdd87d4
RG
458
459/* Attempt to fold an assignment statement pointed-to by SI. Returns a
460 replacement rhs for the statement or NULL_TREE if no simplification
461 could be made. It is assumed that the operands have been previously
462 folded. */
463
464static tree
465fold_gimple_assign (gimple_stmt_iterator *si)
466{
355fe088 467 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
468 enum tree_code subcode = gimple_assign_rhs_code (stmt);
469 location_t loc = gimple_location (stmt);
470
471 tree result = NULL_TREE;
472
473 switch (get_gimple_rhs_class (subcode))
474 {
475 case GIMPLE_SINGLE_RHS:
476 {
477 tree rhs = gimple_assign_rhs1 (stmt);
478
8c00ba08
JW
479 if (TREE_CLOBBER_P (rhs))
480 return NULL_TREE;
481
4e71066d 482 if (REFERENCE_CLASS_P (rhs))
0bf8cd9d 483 return maybe_fold_reference (rhs);
cbdd87d4 484
bdf37f7a
JH
485 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
486 {
487 tree val = OBJ_TYPE_REF_EXPR (rhs);
488 if (is_gimple_min_invariant (val))
489 return val;
f8a39967 490 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
491 {
492 bool final;
493 vec <cgraph_node *>targets
f8a39967 494 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 495 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 496 {
2b5f0895
XDL
497 if (dump_enabled_p ())
498 {
4f5b9c80 499 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
500 "resolving virtual function address "
501 "reference to function %s\n",
502 targets.length () == 1
503 ? targets[0]->name ()
3ef276e4 504 : "NULL");
2b5f0895 505 }
3ef276e4
RB
506 if (targets.length () == 1)
507 {
508 val = fold_convert (TREE_TYPE (val),
509 build_fold_addr_expr_loc
510 (loc, targets[0]->decl));
511 STRIP_USELESS_TYPE_CONVERSION (val);
512 }
513 else
67914693
SL
514 /* We cannot use __builtin_unreachable here because it
515 cannot have address taken. */
3ef276e4 516 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
517 return val;
518 }
519 }
bdf37f7a 520 }
7524f419 521
cbdd87d4
RG
522 else if (TREE_CODE (rhs) == ADDR_EXPR)
523 {
70f34814 524 tree ref = TREE_OPERAND (rhs, 0);
0bf8cd9d
RB
525 if (TREE_CODE (ref) == MEM_REF
526 && integer_zerop (TREE_OPERAND (ref, 1)))
7524f419 527 {
0bf8cd9d
RB
528 result = TREE_OPERAND (ref, 0);
529 if (!useless_type_conversion_p (TREE_TYPE (rhs),
530 TREE_TYPE (result)))
531 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
532 return result;
7524f419 533 }
cbdd87d4
RG
534 }
535
536 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 537 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
538 {
539 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
540 unsigned i;
541 tree val;
542
543 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 544 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
545 return NULL_TREE;
546
547 return build_vector_from_ctor (TREE_TYPE (rhs),
548 CONSTRUCTOR_ELTS (rhs));
549 }
550
ca8e8301
RB
551 else if (DECL_P (rhs)
552 && is_gimple_reg_type (TREE_TYPE (rhs)))
9d60be38 553 return get_symbol_constant_value (rhs);
cbdd87d4
RG
554 }
555 break;
556
557 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
558 break;
559
560 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
561 break;
562
0354c0c7 563 case GIMPLE_TERNARY_RHS:
5c099d40
RB
564 result = fold_ternary_loc (loc, subcode,
565 TREE_TYPE (gimple_assign_lhs (stmt)),
566 gimple_assign_rhs1 (stmt),
567 gimple_assign_rhs2 (stmt),
568 gimple_assign_rhs3 (stmt));
0354c0c7
BS
569
570 if (result)
571 {
572 STRIP_USELESS_TYPE_CONVERSION (result);
573 if (valid_gimple_rhs_p (result))
574 return result;
0354c0c7
BS
575 }
576 break;
577
cbdd87d4
RG
578 case GIMPLE_INVALID_RHS:
579 gcc_unreachable ();
580 }
581
582 return NULL_TREE;
583}
584
fef5a0d9
RB
585
586/* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
590
591static void
592gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
593{
355fe088 594 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
595
596 if (gimple_has_location (stmt))
597 annotate_all_with_location (stmts, gimple_location (stmt));
598
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
355fe088 601 gimple *laststore = NULL;
fef5a0d9
RB
602 for (gimple_stmt_iterator i = gsi_last (stmts);
603 !gsi_end_p (i); gsi_prev (&i))
604 {
355fe088 605 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
606 if ((gimple_assign_single_p (new_stmt)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
608 || (is_gimple_call (new_stmt)
609 && (gimple_call_flags (new_stmt)
610 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
611 {
612 tree vdef;
613 if (!laststore)
614 vdef = gimple_vdef (stmt);
615 else
616 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
617 gimple_set_vdef (new_stmt, vdef);
618 if (vdef && TREE_CODE (vdef) == SSA_NAME)
619 SSA_NAME_DEF_STMT (vdef) = new_stmt;
620 laststore = new_stmt;
621 }
622 }
623
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse = gimple_vuse (stmt);
627 for (gimple_stmt_iterator i = gsi_start (stmts);
628 !gsi_end_p (i); gsi_next (&i))
629 {
355fe088 630 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt))
634 gimple_set_vuse (new_stmt, reaching_vuse);
635 gimple_set_modified (new_stmt, true);
636 if (gimple_vdef (new_stmt))
637 reaching_vuse = gimple_vdef (new_stmt);
638 }
639
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
642 if (reaching_vuse
643 && reaching_vuse == gimple_vuse (stmt))
644 {
645 tree vdef = gimple_vdef (stmt);
646 if (vdef
647 && TREE_CODE (vdef) == SSA_NAME)
648 {
649 unlink_stmt_vdef (stmt);
650 release_ssa_name (vdef);
651 }
652 }
653
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p, stmts, false);
656}
657
52a5515e
RB
658/* Helper function for update_gimple_call and
659 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
660 with GIMPLE_CALL NEW_STMT. */
661
662static void
663finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
664 gimple *stmt)
665{
666 tree lhs = gimple_call_lhs (stmt);
667 gimple_call_set_lhs (new_stmt, lhs);
668 if (lhs && TREE_CODE (lhs) == SSA_NAME)
669 SSA_NAME_DEF_STMT (lhs) = new_stmt;
670 gimple_move_vops (new_stmt, stmt);
671 gimple_set_location (new_stmt, gimple_location (stmt));
672 if (gimple_block (new_stmt) == NULL_TREE)
673 gimple_set_block (new_stmt, gimple_block (stmt));
674 gsi_replace (si_p, new_stmt, false);
675}
676
677/* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
678 with number of arguments NARGS, where the arguments in GIMPLE form
679 follow NARGS argument. */
680
681bool
682update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
683{
684 va_list ap;
685 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
686
687 gcc_assert (is_gimple_call (stmt));
688 va_start (ap, nargs);
689 new_stmt = gimple_build_call_valist (fn, nargs, ap);
690 finish_update_gimple_call (si_p, new_stmt, stmt);
691 va_end (ap);
692 return true;
693}
694
695/* Return true if EXPR is a CALL_EXPR suitable for representation
696 as a single GIMPLE_CALL statement. If the arguments require
697 further gimplification, return false. */
698
699static bool
700valid_gimple_call_p (tree expr)
701{
702 unsigned i, nargs;
703
704 if (TREE_CODE (expr) != CALL_EXPR)
705 return false;
706
707 nargs = call_expr_nargs (expr);
708 for (i = 0; i < nargs; i++)
709 {
710 tree arg = CALL_EXPR_ARG (expr, i);
711 if (is_gimple_reg_type (TREE_TYPE (arg)))
712 {
713 if (!is_gimple_val (arg))
714 return false;
715 }
716 else
717 if (!is_gimple_lvalue (arg))
718 return false;
719 }
720
721 return true;
722}
723
cbdd87d4
RG
724/* Convert EXPR into a GIMPLE value suitable for substitution on the
725 RHS of an assignment. Insert the necessary statements before
726 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
727 is replaced. If the call is expected to produces a result, then it
728 is replaced by an assignment of the new RHS to the result variable.
729 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
730 GIMPLE_NOP. A proper VDEF chain is retained by making the first
731 VUSE and the last VDEF of the whole sequence be the same as the replaced
732 statement and using new SSA names for stores in between. */
cbdd87d4
RG
733
734void
735gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
736{
737 tree lhs;
355fe088 738 gimple *stmt, *new_stmt;
cbdd87d4 739 gimple_stmt_iterator i;
355a7673 740 gimple_seq stmts = NULL;
cbdd87d4
RG
741
742 stmt = gsi_stmt (*si_p);
743
744 gcc_assert (is_gimple_call (stmt));
745
52a5515e
RB
746 if (valid_gimple_call_p (expr))
747 {
748 /* The call has simplified to another call. */
749 tree fn = CALL_EXPR_FN (expr);
750 unsigned i;
751 unsigned nargs = call_expr_nargs (expr);
752 vec<tree> args = vNULL;
753 gcall *new_stmt;
754
755 if (nargs > 0)
756 {
757 args.create (nargs);
758 args.safe_grow_cleared (nargs, true);
759
760 for (i = 0; i < nargs; i++)
761 args[i] = CALL_EXPR_ARG (expr, i);
762 }
763
764 new_stmt = gimple_build_call_vec (fn, args);
765 finish_update_gimple_call (si_p, new_stmt, stmt);
766 args.release ();
767 return;
768 }
cbdd87d4 769
e256dfce 770 lhs = gimple_call_lhs (stmt);
cbdd87d4 771 if (lhs == NULL_TREE)
6e572326 772 {
52a5515e 773 push_gimplify_context (gimple_in_ssa_p (cfun));
6e572326 774 gimplify_and_add (expr, &stmts);
52a5515e
RB
775 pop_gimplify_context (NULL);
776
6e572326
RG
777 /* We can end up with folding a memcpy of an empty class assignment
778 which gets optimized away by C++ gimplification. */
779 if (gimple_seq_empty_p (stmts))
780 {
781 if (gimple_in_ssa_p (cfun))
782 {
783 unlink_stmt_vdef (stmt);
784 release_defs (stmt);
785 }
f6b4dc28 786 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
787 return;
788 }
789 }
cbdd87d4 790 else
e256dfce 791 {
381cdae4 792 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
793 new_stmt = gimple_build_assign (lhs, tmp);
794 i = gsi_last (stmts);
795 gsi_insert_after_without_update (&i, new_stmt,
796 GSI_CONTINUE_LINKING);
797 }
cbdd87d4 798
fef5a0d9
RB
799 gsi_replace_with_seq_vops (si_p, stmts);
800}
cbdd87d4 801
fef5a0d9
RB
802
803/* Replace the call at *GSI with the gimple value VAL. */
804
e3174bdf 805void
fef5a0d9
RB
806replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
807{
355fe088 808 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 809 tree lhs = gimple_call_lhs (stmt);
355fe088 810 gimple *repl;
fef5a0d9 811 if (lhs)
e256dfce 812 {
fef5a0d9
RB
813 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
814 val = fold_convert (TREE_TYPE (lhs), val);
815 repl = gimple_build_assign (lhs, val);
816 }
817 else
818 repl = gimple_build_nop ();
819 tree vdef = gimple_vdef (stmt);
820 if (vdef && TREE_CODE (vdef) == SSA_NAME)
821 {
822 unlink_stmt_vdef (stmt);
823 release_ssa_name (vdef);
824 }
f6b4dc28 825 gsi_replace (gsi, repl, false);
fef5a0d9
RB
826}
827
828/* Replace the call at *GSI with the new call REPL and fold that
829 again. */
830
831static void
355fe088 832replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 833{
355fe088 834 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
835 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
836 gimple_set_location (repl, gimple_location (stmt));
779724a5 837 gimple_move_vops (repl, stmt);
f6b4dc28 838 gsi_replace (gsi, repl, false);
fef5a0d9
RB
839 fold_stmt (gsi);
840}
841
842/* Return true if VAR is a VAR_DECL or a component thereof. */
843
844static bool
845var_decl_component_p (tree var)
846{
847 tree inner = var;
848 while (handled_component_p (inner))
849 inner = TREE_OPERAND (inner, 0);
47cac108
RB
850 return (DECL_P (inner)
851 || (TREE_CODE (inner) == MEM_REF
852 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
853}
854
c89af696
AH
855/* Return TRUE if the SIZE argument, representing the size of an
856 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
857
858static bool
859size_must_be_zero_p (tree size)
860{
861 if (integer_zerop (size))
862 return true;
863
3f27391f 864 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
865 return false;
866
6512c0f1
MS
867 tree type = TREE_TYPE (size);
868 int prec = TYPE_PRECISION (type);
869
6512c0f1
MS
870 /* Compute the value of SSIZE_MAX, the largest positive value that
871 can be stored in ssize_t, the signed counterpart of size_t. */
872 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 873 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
874 wide_int_to_tree (type, ssize_max));
875 value_range vr;
45f4e2b0
AH
876 if (cfun)
877 get_range_query (cfun)->range_of_expr (vr, size);
878 else
879 get_global_range_query ()->range_of_expr (vr, size);
880 if (vr.undefined_p ())
881 vr.set_varying (TREE_TYPE (size));
c89af696
AH
882 vr.intersect (&valid_range);
883 return vr.zero_p ();
6512c0f1
MS
884}
885
cc8bea0a
MS
886/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
887 diagnose (otherwise undefined) overlapping copies without preventing
888 folding. When folded, GCC guarantees that overlapping memcpy has
889 the same semantics as memmove. Call to the library memcpy need not
890 provide the same guarantee. Return false if no simplification can
891 be made. */
fef5a0d9
RB
892
893static bool
894gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 895 tree dest, tree src, enum built_in_function code)
fef5a0d9 896{
355fe088 897 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
898 tree lhs = gimple_call_lhs (stmt);
899 tree len = gimple_call_arg (stmt, 2);
fef5a0d9
RB
900 location_t loc = gimple_location (stmt);
901
6512c0f1
MS
902 /* If the LEN parameter is a constant zero or in range where
903 the only valid value is zero, return DEST. */
904 if (size_must_be_zero_p (len))
fef5a0d9 905 {
355fe088 906 gimple *repl;
fef5a0d9
RB
907 if (gimple_call_lhs (stmt))
908 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
909 else
910 repl = gimple_build_nop ();
911 tree vdef = gimple_vdef (stmt);
912 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 913 {
fef5a0d9
RB
914 unlink_stmt_vdef (stmt);
915 release_ssa_name (vdef);
916 }
f6b4dc28 917 gsi_replace (gsi, repl, false);
fef5a0d9
RB
918 return true;
919 }
920
921 /* If SRC and DEST are the same (and not volatile), return
922 DEST{,+LEN,+LEN-1}. */
923 if (operand_equal_p (src, dest, 0))
924 {
cc8bea0a
MS
925 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
926 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 927 32667). */
fef5a0d9
RB
928 unlink_stmt_vdef (stmt);
929 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
930 release_ssa_name (gimple_vdef (stmt));
931 if (!lhs)
932 {
f6b4dc28 933 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
934 return true;
935 }
936 goto done;
937 }
938 else
939 {
b541b871
EB
940 /* We cannot (easily) change the type of the copy if it is a storage
941 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
942 modify the storage order of objects (see storage_order_barrier_p). */
943 tree srctype
944 = POINTER_TYPE_P (TREE_TYPE (src))
945 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
946 tree desttype
947 = POINTER_TYPE_P (TREE_TYPE (dest))
948 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
949 tree destvar, srcvar, srcoff;
fef5a0d9 950 unsigned int src_align, dest_align;
d01b568a 951 unsigned HOST_WIDE_INT tmp_len;
b541b871 952 const char *tmp_str;
fef5a0d9
RB
953
954 /* Build accesses at offset zero with a ref-all character type. */
b541b871
EB
955 tree off0
956 = build_int_cst (build_pointer_type_for_mode (char_type_node,
957 ptr_mode, true), 0);
fef5a0d9
RB
958
959 /* If we can perform the copy efficiently with first doing all loads
960 and then all stores inline it that way. Currently efficiently
961 means that we can load all the memory into a single integer
962 register which is what MOVE_MAX gives us. */
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (tree_fits_uhwi_p (len)
966 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
967 /* FIXME: Don't transform copies from strings with known length.
968 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
969 from being handled, and the case was XFAILed for that reason.
970 Now that it is handled and the XFAIL removed, as soon as other
971 strlenopt tests that rely on it for passing are adjusted, this
972 hack can be removed. */
973 && !c_strlen (src, 1)
866626ef 974 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
b541b871
EB
975 && memchr (tmp_str, 0, tmp_len) == NULL)
976 && !(srctype
977 && AGGREGATE_TYPE_P (srctype)
978 && TYPE_REVERSE_STORAGE_ORDER (srctype))
979 && !(desttype
980 && AGGREGATE_TYPE_P (desttype)
981 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
982 {
983 unsigned ilen = tree_to_uhwi (len);
146ec50f 984 if (pow2p_hwi (ilen))
fef5a0d9 985 {
213694e5
MS
986 /* Detect out-of-bounds accesses without issuing warnings.
987 Avoid folding out-of-bounds copies but to avoid false
988 positives for unreachable code defer warning until after
989 DCE has worked its magic.
990 -Wrestrict is still diagnosed. */
991 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
992 dest, src, len, len,
993 false, false))
994 if (warning != OPT_Wrestrict)
995 return false;
cc8bea0a 996
64ab8765 997 scalar_int_mode mode;
fef5a0d9
RB
998 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
999 if (type
64ab8765
RS
1000 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
1001 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
1002 /* If the destination pointer is not aligned we must be able
1003 to emit an unaligned store. */
64ab8765 1004 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1005 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 1006 || (optab_handler (movmisalign_optab, mode)
f869c12f 1007 != CODE_FOR_nothing)))
fef5a0d9
RB
1008 {
1009 tree srctype = type;
1010 tree desttype = type;
64ab8765 1011 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1012 srctype = build_aligned_type (type, src_align);
1013 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1014 tree tem = fold_const_aggregate_ref (srcmem);
1015 if (tem)
1016 srcmem = tem;
64ab8765 1017 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1018 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 1019 && (optab_handler (movmisalign_optab, mode)
f869c12f 1020 == CODE_FOR_nothing))
fef5a0d9
RB
1021 srcmem = NULL_TREE;
1022 if (srcmem)
1023 {
355fe088 1024 gimple *new_stmt;
fef5a0d9
RB
1025 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1026 {
1027 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
1028 srcmem
1029 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1030 new_stmt);
fef5a0d9
RB
1031 gimple_assign_set_lhs (new_stmt, srcmem);
1032 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1033 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1034 }
64ab8765 1035 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1036 desttype = build_aligned_type (type, dest_align);
1037 new_stmt
1038 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1039 dest, off0),
1040 srcmem);
779724a5 1041 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1042 if (!lhs)
1043 {
f6b4dc28 1044 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1045 return true;
1046 }
1047 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1048 goto done;
1049 }
1050 }
1051 }
1052 }
1053
0d67a510 1054 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
1055 {
1056 /* Both DEST and SRC must be pointer types.
1057 ??? This is what old code did. Is the testing for pointer types
1058 really mandatory?
1059
1060 If either SRC is readonly or length is 1, we can use memcpy. */
1061 if (!dest_align || !src_align)
1062 return false;
1063 if (readonly_data_expr (src)
1064 || (tree_fits_uhwi_p (len)
1065 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1066 >= tree_to_uhwi (len))))
1067 {
1068 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1069 if (!fn)
1070 return false;
1071 gimple_call_set_fndecl (stmt, fn);
1072 gimple_call_set_arg (stmt, 0, dest);
1073 gimple_call_set_arg (stmt, 1, src);
1074 fold_stmt (gsi);
1075 return true;
1076 }
1077
1078 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1079 if (TREE_CODE (src) == ADDR_EXPR
1080 && TREE_CODE (dest) == ADDR_EXPR)
1081 {
1082 tree src_base, dest_base, fn;
a90c8804
RS
1083 poly_int64 src_offset = 0, dest_offset = 0;
1084 poly_uint64 maxsize;
fef5a0d9
RB
1085
1086 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
1087 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1088 if (src_base == NULL)
1089 src_base = srcvar;
fef5a0d9 1090 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
1091 dest_base = get_addr_base_and_unit_offset (destvar,
1092 &dest_offset);
1093 if (dest_base == NULL)
1094 dest_base = destvar;
a90c8804 1095 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 1096 maxsize = -1;
fef5a0d9
RB
1097 if (SSA_VAR_P (src_base)
1098 && SSA_VAR_P (dest_base))
1099 {
1100 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
1101 && ranges_maybe_overlap_p (src_offset, maxsize,
1102 dest_offset, maxsize))
fef5a0d9
RB
1103 return false;
1104 }
1105 else if (TREE_CODE (src_base) == MEM_REF
1106 && TREE_CODE (dest_base) == MEM_REF)
1107 {
1108 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1109 TREE_OPERAND (dest_base, 0), 0))
1110 return false;
a90c8804
RS
1111 poly_offset_int full_src_offset
1112 = mem_ref_offset (src_base) + src_offset;
1113 poly_offset_int full_dest_offset
1114 = mem_ref_offset (dest_base) + dest_offset;
1115 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1116 full_dest_offset, maxsize))
fef5a0d9
RB
1117 return false;
1118 }
1119 else
1120 return false;
1121
1122 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1123 if (!fn)
1124 return false;
1125 gimple_call_set_fndecl (stmt, fn);
1126 gimple_call_set_arg (stmt, 0, dest);
1127 gimple_call_set_arg (stmt, 1, src);
1128 fold_stmt (gsi);
1129 return true;
1130 }
1131
1132 /* If the destination and source do not alias optimize into
1133 memcpy as well. */
1134 if ((is_gimple_min_invariant (dest)
1135 || TREE_CODE (dest) == SSA_NAME)
1136 && (is_gimple_min_invariant (src)
1137 || TREE_CODE (src) == SSA_NAME))
1138 {
1139 ao_ref destr, srcr;
1140 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1141 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1142 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1143 {
1144 tree fn;
1145 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1146 if (!fn)
1147 return false;
1148 gimple_call_set_fndecl (stmt, fn);
1149 gimple_call_set_arg (stmt, 0, dest);
1150 gimple_call_set_arg (stmt, 1, src);
1151 fold_stmt (gsi);
1152 return true;
1153 }
1154 }
1155
1156 return false;
1157 }
1158
1159 if (!tree_fits_shwi_p (len))
1160 return false;
b541b871
EB
1161 if (!srctype
1162 || (AGGREGATE_TYPE_P (srctype)
1163 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1164 return false;
1165 if (!desttype
1166 || (AGGREGATE_TYPE_P (desttype)
1167 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
1168 return false;
1169 /* In the following try to find a type that is most natural to be
1170 used for the memcpy source and destination and that allows
1171 the most optimization when memcpy is turned into a plain assignment
1172 using that type. In theory we could always use a char[len] type
1173 but that only gains us that the destination and source possibly
1174 no longer will have their address taken. */
fef5a0d9
RB
1175 if (TREE_CODE (srctype) == ARRAY_TYPE
1176 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 1177 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
1178 if (TREE_CODE (desttype) == ARRAY_TYPE
1179 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 1180 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
1181 if (TREE_ADDRESSABLE (srctype)
1182 || TREE_ADDRESSABLE (desttype))
1183 return false;
1184
1185 /* Make sure we are not copying using a floating-point mode or
1186 a type whose size possibly does not match its precision. */
1187 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1188 || TREE_CODE (desttype) == BOOLEAN_TYPE
1189 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1190 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1191 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1192 || TREE_CODE (srctype) == BOOLEAN_TYPE
1193 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1194 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1195 if (!srctype)
1196 srctype = desttype;
1197 if (!desttype)
1198 desttype = srctype;
1199 if (!srctype)
1200 return false;
1201
1202 src_align = get_pointer_alignment (src);
1203 dest_align = get_pointer_alignment (dest);
fef5a0d9 1204
5105b576
RB
1205 /* Choose between src and destination type for the access based
1206 on alignment, whether the access constitutes a register access
1207 and whether it may actually expose a declaration for SSA rewrite
e362a897
EB
1208 or SRA decomposition. Also try to expose a string constant, we
1209 might be able to concatenate several of them later into a single
1210 string store. */
42f74245 1211 destvar = NULL_TREE;
5105b576 1212 srcvar = NULL_TREE;
42f74245
RB
1213 if (TREE_CODE (dest) == ADDR_EXPR
1214 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
1215 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1216 && dest_align >= TYPE_ALIGN (desttype)
1217 && (is_gimple_reg_type (desttype)
1218 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1219 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1220 else if (TREE_CODE (src) == ADDR_EXPR
1221 && var_decl_component_p (TREE_OPERAND (src, 0))
1222 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1223 && src_align >= TYPE_ALIGN (srctype)
1224 && (is_gimple_reg_type (srctype)
1225 || dest_align >= TYPE_ALIGN (srctype)))
1226 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
e362a897
EB
1227 /* FIXME: Don't transform copies from strings with known original length.
1228 As soon as strlenopt tests that rely on it for passing are adjusted,
1229 this hack can be removed. */
1230 else if (gimple_call_alloca_for_var_p (stmt)
1231 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1232 && integer_zerop (srcoff)
1233 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1234 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1235 srctype = TREE_TYPE (srcvar);
1236 else
fef5a0d9
RB
1237 return false;
1238
5105b576
RB
1239 /* Now that we chose an access type express the other side in
1240 terms of it if the target allows that with respect to alignment
1241 constraints. */
fef5a0d9
RB
1242 if (srcvar == NULL_TREE)
1243 {
fef5a0d9
RB
1244 if (src_align >= TYPE_ALIGN (desttype))
1245 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1246 else
1247 {
1248 if (STRICT_ALIGNMENT)
1249 return false;
1250 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1251 src_align);
1252 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1253 }
1254 }
1255 else if (destvar == NULL_TREE)
1256 {
fef5a0d9
RB
1257 if (dest_align >= TYPE_ALIGN (srctype))
1258 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1259 else
1260 {
1261 if (STRICT_ALIGNMENT)
1262 return false;
1263 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1264 dest_align);
1265 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1266 }
1267 }
1268
213694e5
MS
1269 /* Same as above, detect out-of-bounds accesses without issuing
1270 warnings. Avoid folding out-of-bounds copies but to avoid
1271 false positives for unreachable code defer warning until
1272 after DCE has worked its magic.
1273 -Wrestrict is still diagnosed. */
1274 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1275 dest, src, len, len,
1276 false, false))
1277 if (warning != OPT_Wrestrict)
1278 return false;
cc8bea0a 1279
355fe088 1280 gimple *new_stmt;
fef5a0d9
RB
1281 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1282 {
921b13d0
RB
1283 tree tem = fold_const_aggregate_ref (srcvar);
1284 if (tem)
1285 srcvar = tem;
1286 if (! is_gimple_min_invariant (srcvar))
1287 {
1288 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1289 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1290 new_stmt);
921b13d0
RB
1291 gimple_assign_set_lhs (new_stmt, srcvar);
1292 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1293 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1294 }
d7257171
RB
1295 new_stmt = gimple_build_assign (destvar, srcvar);
1296 goto set_vop_and_replace;
fef5a0d9 1297 }
d7257171 1298
e362a897
EB
1299 /* We get an aggregate copy. If the source is a STRING_CST, then
1300 directly use its type to perform the copy. */
1301 if (TREE_CODE (srcvar) == STRING_CST)
1302 desttype = srctype;
1303
1304 /* Or else, use an unsigned char[] type to perform the copy in order
1305 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1306 types or float modes behavior on copying. */
1307 else
1308 {
1309 desttype = build_array_type_nelts (unsigned_char_type_node,
1310 tree_to_uhwi (len));
1311 srctype = desttype;
1312 if (src_align > TYPE_ALIGN (srctype))
1313 srctype = build_aligned_type (srctype, src_align);
1314 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1315 }
1316
d7257171
RB
1317 if (dest_align > TYPE_ALIGN (desttype))
1318 desttype = build_aligned_type (desttype, dest_align);
e362a897
EB
1319 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1320 new_stmt = gimple_build_assign (destvar, srcvar);
1321
d7257171 1322set_vop_and_replace:
779724a5 1323 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1324 if (!lhs)
1325 {
f6b4dc28 1326 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1327 return true;
1328 }
1329 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1330 }
1331
1332done:
74e3c262 1333 gimple_seq stmts = NULL;
0d67a510 1334 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1335 len = NULL_TREE;
0d67a510 1336 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1337 {
1338 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1339 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1340 TREE_TYPE (dest), dest, len);
1341 }
0d67a510
ML
1342 else
1343 gcc_unreachable ();
fef5a0d9 1344
74e3c262 1345 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1346 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1347 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1348 return true;
1349}
1350
b3d8d88e
MS
1351/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1352 to built-in memcmp (a, b, len). */
1353
1354static bool
1355gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1356{
1357 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1358
1359 if (!fn)
1360 return false;
1361
1362 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1363
1364 gimple *stmt = gsi_stmt (*gsi);
1365 tree a = gimple_call_arg (stmt, 0);
1366 tree b = gimple_call_arg (stmt, 1);
1367 tree len = gimple_call_arg (stmt, 2);
1368
1369 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1370 replace_call_with_call_and_fold (gsi, repl);
1371
1372 return true;
1373}
1374
1375/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1376 to built-in memmove (dest, src, len). */
1377
1378static bool
1379gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1380{
1381 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1382
1383 if (!fn)
1384 return false;
1385
1386 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1387 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1388 len) into memmove (dest, src, len). */
1389
1390 gimple *stmt = gsi_stmt (*gsi);
1391 tree src = gimple_call_arg (stmt, 0);
1392 tree dest = gimple_call_arg (stmt, 1);
1393 tree len = gimple_call_arg (stmt, 2);
1394
1395 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1396 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1397 replace_call_with_call_and_fold (gsi, repl);
1398
1399 return true;
1400}
1401
1402/* Transform a call to built-in bzero (dest, len) at *GSI into one
1403 to built-in memset (dest, 0, len). */
1404
1405static bool
1406gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1407{
1408 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1409
1410 if (!fn)
1411 return false;
1412
1413 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1414
1415 gimple *stmt = gsi_stmt (*gsi);
1416 tree dest = gimple_call_arg (stmt, 0);
1417 tree len = gimple_call_arg (stmt, 1);
1418
1419 gimple_seq seq = NULL;
1420 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1421 gimple_seq_add_stmt_without_update (&seq, repl);
1422 gsi_replace_with_seq_vops (gsi, seq);
1423 fold_stmt (gsi);
1424
1425 return true;
1426}
1427
fef5a0d9
RB
1428/* Fold function call to builtin memset or bzero at *GSI setting the
1429 memory of size LEN to VAL. Return whether a simplification was made. */
1430
1431static bool
1432gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1433{
355fe088 1434 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1435 tree etype;
1436 unsigned HOST_WIDE_INT length, cval;
1437
1438 /* If the LEN parameter is zero, return DEST. */
1439 if (integer_zerop (len))
1440 {
1441 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1442 return true;
1443 }
1444
1445 if (! tree_fits_uhwi_p (len))
1446 return false;
1447
1448 if (TREE_CODE (c) != INTEGER_CST)
1449 return false;
1450
1451 tree dest = gimple_call_arg (stmt, 0);
1452 tree var = dest;
1453 if (TREE_CODE (var) != ADDR_EXPR)
1454 return false;
1455
1456 var = TREE_OPERAND (var, 0);
1457 if (TREE_THIS_VOLATILE (var))
1458 return false;
1459
1460 etype = TREE_TYPE (var);
1461 if (TREE_CODE (etype) == ARRAY_TYPE)
1462 etype = TREE_TYPE (etype);
1463
1464 if (!INTEGRAL_TYPE_P (etype)
1465 && !POINTER_TYPE_P (etype))
1466 return NULL_TREE;
1467
1468 if (! var_decl_component_p (var))
1469 return NULL_TREE;
1470
1471 length = tree_to_uhwi (len);
7a504f33 1472 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1473 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1474 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1475 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1476 return NULL_TREE;
1477
1478 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1479 return NULL_TREE;
1480
1ba9acb1
RB
1481 if (!type_has_mode_precision_p (etype))
1482 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1483 TYPE_UNSIGNED (etype));
1484
fef5a0d9
RB
1485 if (integer_zerop (c))
1486 cval = 0;
1487 else
1488 {
1489 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1490 return NULL_TREE;
1491
1492 cval = TREE_INT_CST_LOW (c);
1493 cval &= 0xff;
1494 cval |= cval << 8;
1495 cval |= cval << 16;
1496 cval |= (cval << 31) << 1;
1497 }
1498
1499 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1500 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1501 gimple_move_vops (store, stmt);
fef5a0d9
RB
1502 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1503 if (gimple_call_lhs (stmt))
1504 {
355fe088 1505 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1506 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1507 }
1508 else
1509 {
1510 gimple_stmt_iterator gsi2 = *gsi;
1511 gsi_prev (gsi);
1512 gsi_remove (&gsi2, true);
1513 }
1514
1515 return true;
1516}
1517
fb471a13 1518/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1519
1520static bool
03c4a945
MS
1521get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1522 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1523{
fb471a13 1524 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1525
fb471a13
MS
1526 /* The length computed by this invocation of the function. */
1527 tree val = NULL_TREE;
1528
eef2da67
MS
1529 /* True if VAL is an optimistic (tight) bound determined from
1530 the size of the character array in which the string may be
1531 stored. In that case, the computed VAL is used to set
1532 PDATA->MAXBOUND. */
1533 bool tight_bound = false;
1534
fb471a13
MS
1535 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1536 if (TREE_CODE (arg) == ADDR_EXPR
1537 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1538 {
fb471a13
MS
1539 tree op = TREE_OPERAND (arg, 0);
1540 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1541 {
fb471a13
MS
1542 tree aop0 = TREE_OPERAND (op, 0);
1543 if (TREE_CODE (aop0) == INDIRECT_REF
1544 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1545 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1546 pdata, eltsize);
fef5a0d9 1547 }
598f7235 1548 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1549 && rkind == SRK_LENRANGE)
fef5a0d9 1550 {
fb471a13
MS
1551 /* Fail if an array is the last member of a struct object
1552 since it could be treated as a (fake) flexible array
1553 member. */
1554 tree idx = TREE_OPERAND (op, 1);
1555
1556 arg = TREE_OPERAND (op, 0);
1557 tree optype = TREE_TYPE (arg);
1558 if (tree dom = TYPE_DOMAIN (optype))
1559 if (tree bound = TYPE_MAX_VALUE (dom))
1560 if (TREE_CODE (bound) == INTEGER_CST
1561 && TREE_CODE (idx) == INTEGER_CST
1562 && tree_int_cst_lt (bound, idx))
1563 return false;
fef5a0d9 1564 }
fb471a13 1565 }
7d583f42 1566
598f7235 1567 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1568 {
1569 /* We are computing the maximum value (not string length). */
1570 val = arg;
1571 if (TREE_CODE (val) != INTEGER_CST
1572 || tree_int_cst_sgn (val) < 0)
1573 return false;
1574 }
1575 else
1576 {
1577 c_strlen_data lendata = { };
1578 val = c_strlen (arg, 1, &lendata, eltsize);
1579
fb471a13
MS
1580 if (!val && lendata.decl)
1581 {
03c4a945
MS
1582 /* ARG refers to an unterminated const character array.
1583 DATA.DECL with size DATA.LEN. */
1584 val = lendata.minlen;
730832cd 1585 pdata->decl = lendata.decl;
7d583f42 1586 }
fb471a13
MS
1587 }
1588
a7160771
MS
1589 /* Set if VAL represents the maximum length based on array size (set
1590 when exact length cannot be determined). */
1591 bool maxbound = false;
1592
84de9426 1593 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1594 {
1595 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1596 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1597 pdata, eltsize);
88d0c3f0 1598
fb471a13 1599 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1600 {
fb471a13 1601 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1602
fb471a13
MS
1603 /* Determine the "innermost" array type. */
1604 while (TREE_CODE (optype) == ARRAY_TYPE
1605 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1606 optype = TREE_TYPE (optype);
c42d0aa0 1607
fb471a13
MS
1608 /* Avoid arrays of pointers. */
1609 tree eltype = TREE_TYPE (optype);
1610 if (TREE_CODE (optype) != ARRAY_TYPE
1611 || !INTEGRAL_TYPE_P (eltype))
1612 return false;
c42d0aa0 1613
fb471a13
MS
1614 /* Fail when the array bound is unknown or zero. */
1615 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1616 if (!val
1617 || TREE_CODE (val) != INTEGER_CST
1618 || integer_zerop (val))
fb471a13 1619 return false;
1bfd6a00 1620
fb471a13
MS
1621 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1622 integer_one_node);
c42d0aa0 1623
fb471a13
MS
1624 /* Set the minimum size to zero since the string in
1625 the array could have zero length. */
730832cd 1626 pdata->minlen = ssize_int (0);
204a7ecb 1627
eef2da67 1628 tight_bound = true;
fb471a13
MS
1629 }
1630 else if (TREE_CODE (arg) == COMPONENT_REF
1631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1632 == ARRAY_TYPE))
1633 {
1634 /* Use the type of the member array to determine the upper
1635 bound on the length of the array. This may be overly
1636 optimistic if the array itself isn't NUL-terminated and
1637 the caller relies on the subsequent member to contain
1638 the NUL but that would only be considered valid if
03c4a945 1639 the array were the last member of a struct. */
fb471a13
MS
1640
1641 tree fld = TREE_OPERAND (arg, 1);
1642
1643 tree optype = TREE_TYPE (fld);
1644
1645 /* Determine the "innermost" array type. */
1646 while (TREE_CODE (optype) == ARRAY_TYPE
1647 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1648 optype = TREE_TYPE (optype);
1649
1650 /* Fail when the array bound is unknown or zero. */
1651 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1652 if (!val
1653 || TREE_CODE (val) != INTEGER_CST
1654 || integer_zerop (val))
fb471a13
MS
1655 return false;
1656 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1657 integer_one_node);
1658
1659 /* Set the minimum size to zero since the string in
1660 the array could have zero length. */
730832cd 1661 pdata->minlen = ssize_int (0);
fb471a13 1662
eef2da67
MS
1663 /* The array size determined above is an optimistic bound
1664 on the length. If the array isn't nul-terminated the
1665 length computed by the library function would be greater.
1666 Even though using strlen to cross the subobject boundary
1667 is undefined, avoid drawing conclusions from the member
1668 type about the length here. */
1669 tight_bound = true;
1670 }
e7868dc6
MS
1671 else if (TREE_CODE (arg) == MEM_REF
1672 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1674 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1675 {
1676 /* Handle a MEM_REF into a DECL accessing an array of integers,
1677 being conservative about references to extern structures with
1678 flexible array members that can be initialized to arbitrary
1679 numbers of elements as an extension (static structs are okay).
1680 FIXME: Make this less conservative -- see
1681 component_ref_size in tree.c. */
1682 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1683 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1684 && (decl_binds_to_current_def_p (ref)
1685 || !array_at_struct_end_p (arg)))
1686 {
1687 /* Fail if the offset is out of bounds. Such accesses
1688 should be diagnosed at some point. */
1689 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1690 if (!val
1691 || TREE_CODE (val) != INTEGER_CST
1692 || integer_zerop (val))
e7868dc6
MS
1693 return false;
1694
1695 poly_offset_int psiz = wi::to_offset (val);
1696 poly_offset_int poff = mem_ref_offset (arg);
1697 if (known_le (psiz, poff))
1698 return false;
1699
1700 pdata->minlen = ssize_int (0);
1701
1702 /* Subtract the offset and one for the terminating nul. */
1703 psiz -= poff;
1704 psiz -= 1;
1705 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1706 /* Since VAL reflects the size of a declared object
1707 rather the type of the access it is not a tight bound. */
1708 }
1709 }
1710 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1711 {
eef2da67
MS
1712 /* Avoid handling pointers to arrays. GCC might misuse
1713 a pointer to an array of one bound to point to an array
1714 object of a greater bound. */
1715 tree argtype = TREE_TYPE (arg);
1716 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1717 {
eef2da67 1718 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1719 if (!val
1720 || TREE_CODE (val) != INTEGER_CST
1721 || integer_zerop (val))
88d0c3f0 1722 return false;
fb471a13
MS
1723 val = wide_int_to_tree (TREE_TYPE (val),
1724 wi::sub (wi::to_wide (val), 1));
1725
e495e31a
MS
1726 /* Set the minimum size to zero since the string in
1727 the array could have zero length. */
730832cd 1728 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1729 }
1730 }
a7160771 1731 maxbound = true;
fb471a13 1732 }
88d0c3f0 1733
fb471a13
MS
1734 if (!val)
1735 return false;
fef5a0d9 1736
fb471a13 1737 /* Adjust the lower bound on the string length as necessary. */
730832cd 1738 if (!pdata->minlen
598f7235 1739 || (rkind != SRK_STRLEN
730832cd 1740 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1741 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1742 && tree_int_cst_lt (val, pdata->minlen)))
1743 pdata->minlen = val;
88d0c3f0 1744
a7160771 1745 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1746 {
1747 /* Adjust the tighter (more optimistic) string length bound
1748 if necessary and proceed to adjust the more conservative
1749 bound. */
1750 if (TREE_CODE (val) == INTEGER_CST)
1751 {
a7160771
MS
1752 if (tree_int_cst_lt (pdata->maxbound, val))
1753 pdata->maxbound = val;
730832cd
MS
1754 }
1755 else
1756 pdata->maxbound = val;
1757 }
a7160771
MS
1758 else if (pdata->maxbound || maxbound)
1759 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1760 if VAL corresponds to the maximum length determined based
1761 on the type of the object. */
730832cd
MS
1762 pdata->maxbound = val;
1763
eef2da67
MS
1764 if (tight_bound)
1765 {
1766 /* VAL computed above represents an optimistically tight bound
1767 on the length of the string based on the referenced object's
1768 or subobject's type. Determine the conservative upper bound
1769 based on the enclosing object's size if possible. */
84de9426 1770 if (rkind == SRK_LENRANGE)
eef2da67
MS
1771 {
1772 poly_int64 offset;
1773 tree base = get_addr_base_and_unit_offset (arg, &offset);
1774 if (!base)
1775 {
1776 /* When the call above fails due to a non-constant offset
1777 assume the offset is zero and use the size of the whole
1778 enclosing object instead. */
1779 base = get_base_address (arg);
1780 offset = 0;
1781 }
1782 /* If the base object is a pointer no upper bound on the length
1783 can be determined. Otherwise the maximum length is equal to
1784 the size of the enclosing object minus the offset of
1785 the referenced subobject minus 1 (for the terminating nul). */
1786 tree type = TREE_TYPE (base);
1787 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1788 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1789 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1790 val = build_all_ones_cst (size_type_node);
1791 else
1792 {
1793 val = DECL_SIZE_UNIT (base);
1794 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1795 size_int (offset + 1));
1796 }
1797 }
1798 else
1799 return false;
1800 }
1801
730832cd 1802 if (pdata->maxlen)
fb471a13
MS
1803 {
1804 /* Adjust the more conservative bound if possible/necessary
1805 and fail otherwise. */
598f7235 1806 if (rkind != SRK_STRLEN)
fef5a0d9 1807 {
730832cd 1808 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1809 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1810 return false;
fef5a0d9 1811
730832cd
MS
1812 if (tree_int_cst_lt (pdata->maxlen, val))
1813 pdata->maxlen = val;
fb471a13
MS
1814 return true;
1815 }
730832cd 1816 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1817 {
1818 /* Fail if the length of this ARG is different from that
1819 previously determined from another ARG. */
1820 return false;
1821 }
fef5a0d9
RB
1822 }
1823
730832cd 1824 pdata->maxlen = val;
84de9426 1825 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1826}
1827
5d6655eb
MS
1828/* For an ARG referencing one or more strings, try to obtain the range
1829 of their lengths, or the size of the largest array ARG referes to if
1830 the range of lengths cannot be determined, and store all in *PDATA.
1831 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1832 the maximum constant value.
1833 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1834 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1835 length or if we are unable to determine the length, return false.
fb471a13 1836 VISITED is a bitmap of visited variables.
598f7235
MS
1837 RKIND determines the kind of value or range to obtain (see
1838 strlen_range_kind).
1839 Set PDATA->DECL if ARG refers to an unterminated constant array.
1840 On input, set ELTSIZE to 1 for normal single byte character strings,
1841 and either 2 or 4 for wide characer strings (the size of wchar_t).
1842 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1843
1844static bool
03c4a945
MS
1845get_range_strlen (tree arg, bitmap *visited,
1846 strlen_range_kind rkind,
1847 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1848{
1849
1850 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1851 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1852
fef5a0d9
RB
1853 /* If ARG is registered for SSA update we cannot look at its defining
1854 statement. */
1855 if (name_registered_for_update_p (arg))
1856 return false;
1857
1858 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1859 if (!*visited)
1860 *visited = BITMAP_ALLOC (NULL);
1861 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1862 return true;
1863
fb471a13
MS
1864 tree var = arg;
1865 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1866
fef5a0d9
RB
1867 switch (gimple_code (def_stmt))
1868 {
1869 case GIMPLE_ASSIGN:
598f7235
MS
1870 /* The RHS of the statement defining VAR must either have a
1871 constant length or come from another SSA_NAME with a constant
1872 length. */
fef5a0d9
RB
1873 if (gimple_assign_single_p (def_stmt)
1874 || gimple_assign_unary_nop_p (def_stmt))
1875 {
598f7235 1876 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1877 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1878 }
1879 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1880 {
c8602fe6
JJ
1881 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1882 gimple_assign_rhs3 (def_stmt) };
1883
1884 for (unsigned int i = 0; i < 2; i++)
03c4a945 1885 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1886 {
84de9426 1887 if (rkind != SRK_LENRANGE)
c8602fe6 1888 return false;
80c2bad6
MS
1889 /* Set the upper bound to the maximum to prevent
1890 it from being adjusted in the next iteration but
1891 leave MINLEN and the more conservative MAXBOUND
1892 determined so far alone (or leave them null if
1893 they haven't been set yet). That the MINLEN is
1894 in fact zero can be determined from MAXLEN being
1895 unbounded but the discovered minimum is used for
1896 diagnostics. */
730832cd 1897 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1898 }
1899 return true;
cc8bea0a 1900 }
fef5a0d9
RB
1901 return false;
1902
1903 case GIMPLE_PHI:
598f7235
MS
1904 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1905 must have a constant length. */
c8602fe6 1906 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1907 {
1908 tree arg = gimple_phi_arg (def_stmt, i)->def;
1909
1910 /* If this PHI has itself as an argument, we cannot
1911 determine the string length of this argument. However,
1912 if we can find a constant string length for the other
1913 PHI args then we can still be sure that this is a
1914 constant string length. So be optimistic and just
1915 continue with the next argument. */
1916 if (arg == gimple_phi_result (def_stmt))
1917 continue;
1918
03c4a945 1919 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1920 {
84de9426 1921 if (rkind != SRK_LENRANGE)
88d0c3f0 1922 return false;
80c2bad6
MS
1923 /* Set the upper bound to the maximum to prevent
1924 it from being adjusted in the next iteration but
1925 leave MINLEN and the more conservative MAXBOUND
1926 determined so far alone (or leave them null if
1927 they haven't been set yet). That the MINLEN is
1928 in fact zero can be determined from MAXLEN being
1929 unbounded but the discovered minimum is used for
1930 diagnostics. */
730832cd 1931 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1932 }
fef5a0d9 1933 }
fef5a0d9
RB
1934 return true;
1935
1936 default:
1937 return false;
1938 }
1939}
5d6655eb 1940
97623b52
MS
1941/* Try to obtain the range of the lengths of the string(s) referenced
1942 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1943 of lengths cannot be determined, and store all in *PDATA which must
1944 be zero-initialized on input except PDATA->MAXBOUND may be set to
1945 a non-null tree node other than INTEGER_CST to request to have it
1946 set to the length of the longest string in a PHI. ELTSIZE is
1947 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1948 some power of 2 for wide characters.
1949 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1950 for optimization. Returning false means that a nonzero PDATA->MINLEN
1951 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1952 is -1 (in that case, the actual range is indeterminate, i.e.,
1953 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1954
3f343040 1955bool
84de9426 1956get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1957{
1958 bitmap visited = NULL;
a7160771 1959 tree maxbound = pdata->maxbound;
88d0c3f0 1960
84de9426 1961 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1962 {
5d6655eb
MS
1963 /* On failure extend the length range to an impossible maximum
1964 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1965 members can stay unchanged regardless. */
1966 pdata->minlen = ssize_int (0);
1967 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1968 }
5d6655eb
MS
1969 else if (!pdata->minlen)
1970 pdata->minlen = ssize_int (0);
1971
a7160771
MS
1972 /* If it's unchanged from it initial non-null value, set the conservative
1973 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1974 if (maxbound && pdata->maxbound == maxbound)
1975 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1976
1977 if (visited)
1978 BITMAP_FREE (visited);
3f343040 1979
03c4a945 1980 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1981}
1982
5d6655eb
MS
1983/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1984 For ARG of pointer types, NONSTR indicates if the caller is prepared
1985 to handle unterminated strings. For integer ARG and when RKIND ==
1986 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1987
5d6655eb
MS
1988 If an unterminated array is discovered and our caller handles
1989 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1990 return the maximum size. Otherwise return NULL. */
1991
598f7235
MS
1992static tree
1993get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1994{
598f7235
MS
1995 /* A non-null NONSTR is meaningless when determining the maximum
1996 value of an integer ARG. */
1997 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1998 /* ARG must have an integral type when RKIND says so. */
1999 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2000
dcb7fae2 2001 bitmap visited = NULL;
3f343040 2002
5d6655eb
MS
2003 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2004 is unbounded. */
730832cd 2005 c_strlen_data lendata = { };
03c4a945 2006 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 2007 lendata.maxlen = NULL_TREE;
5d6655eb
MS
2008 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2009 lendata.maxlen = NULL_TREE;
2010
dcb7fae2
RB
2011 if (visited)
2012 BITMAP_FREE (visited);
2013
e08341bb
MS
2014 if (nonstr)
2015 {
2016 /* For callers prepared to handle unterminated arrays set
2017 *NONSTR to point to the declaration of the array and return
2018 the maximum length/size. */
730832cd
MS
2019 *nonstr = lendata.decl;
2020 return lendata.maxlen;
e08341bb
MS
2021 }
2022
2023 /* Fail if the constant array isn't nul-terminated. */
730832cd 2024 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
2025}
2026
fef5a0d9
RB
2027
2028/* Fold function call to builtin strcpy with arguments DEST and SRC.
2029 If LEN is not NULL, it represents the length of the string to be
2030 copied. Return NULL_TREE if no simplification can be made. */
2031
2032static bool
2033gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 2034 tree dest, tree src)
fef5a0d9 2035{
cc8bea0a
MS
2036 gimple *stmt = gsi_stmt (*gsi);
2037 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2038 tree fn;
2039
2040 /* If SRC and DEST are the same (and not volatile), return DEST. */
2041 if (operand_equal_p (src, dest, 0))
2042 {
8cd95cec
MS
2043 /* Issue -Wrestrict unless the pointers are null (those do
2044 not point to objects and so do not indicate an overlap;
2045 such calls could be the result of sanitization and jump
2046 threading). */
2047 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2048 {
2049 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2050
e9b9fa4c
MS
2051 warning_at (loc, OPT_Wrestrict,
2052 "%qD source argument is the same as destination",
2053 func);
2054 }
cc8bea0a 2055
fef5a0d9
RB
2056 replace_call_with_value (gsi, dest);
2057 return true;
2058 }
2059
2060 if (optimize_function_for_size_p (cfun))
2061 return false;
2062
2063 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2064 if (!fn)
2065 return false;
2066
e08341bb
MS
2067 /* Set to non-null if ARG refers to an unterminated array. */
2068 tree nonstr = NULL;
598f7235 2069 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
2070
2071 if (nonstr)
2072 {
2073 /* Avoid folding calls with unterminated arrays. */
2074 if (!gimple_no_warning_p (stmt))
d14c547a 2075 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
e08341bb
MS
2076 gimple_set_no_warning (stmt, true);
2077 return false;
2078 }
2079
fef5a0d9 2080 if (!len)
dcb7fae2 2081 return false;
fef5a0d9
RB
2082
2083 len = fold_convert_loc (loc, size_type_node, len);
2084 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2085 len = force_gimple_operand_gsi (gsi, len, true,
2086 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2087 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2088 replace_call_with_call_and_fold (gsi, repl);
2089 return true;
2090}
2091
2092/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2093 If SLEN is not NULL, it represents the length of the source string.
2094 Return NULL_TREE if no simplification can be made. */
2095
2096static bool
dcb7fae2
RB
2097gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2098 tree dest, tree src, tree len)
fef5a0d9 2099{
025d57f0
MS
2100 gimple *stmt = gsi_stmt (*gsi);
2101 location_t loc = gimple_location (stmt);
6a33d0ff 2102 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
2103
2104 /* If the LEN parameter is zero, return DEST. */
2105 if (integer_zerop (len))
2106 {
53b28abf 2107 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
2108 decorate with attribute nonstring. */
2109 if (!nonstring)
2110 {
2111 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
2112
2113 /* Warn about the lack of nul termination: the result is not
2114 a (nul-terminated) string. */
598f7235 2115 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
2116 if (slen && !integer_zerop (slen))
2117 warning_at (loc, OPT_Wstringop_truncation,
2118 "%G%qD destination unchanged after copying no bytes "
2119 "from a string of length %E",
8a45b051 2120 stmt, fndecl, slen);
6a33d0ff
MS
2121 else
2122 warning_at (loc, OPT_Wstringop_truncation,
2123 "%G%qD destination unchanged after copying no bytes",
8a45b051 2124 stmt, fndecl);
6a33d0ff 2125 }
025d57f0 2126
fef5a0d9
RB
2127 replace_call_with_value (gsi, dest);
2128 return true;
2129 }
2130
2131 /* We can't compare slen with len as constants below if len is not a
2132 constant. */
dcb7fae2 2133 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
2134 return false;
2135
fef5a0d9 2136 /* Now, we must be passed a constant src ptr parameter. */
598f7235 2137 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 2138 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
2139 return false;
2140
025d57f0
MS
2141 /* The size of the source string including the terminating nul. */
2142 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
2143
2144 /* We do not support simplification of this case, though we do
2145 support it when expanding trees into RTL. */
2146 /* FIXME: generate a call to __builtin_memset. */
025d57f0 2147 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
2148 return false;
2149
5d0d5d68
MS
2150 /* Diagnose truncation that leaves the copy unterminated. */
2151 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 2152
fef5a0d9 2153 /* OK transform into builtin memcpy. */
025d57f0 2154 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
2155 if (!fn)
2156 return false;
2157
2158 len = fold_convert_loc (loc, size_type_node, len);
2159 len = force_gimple_operand_gsi (gsi, len, true,
2160 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2161 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 2162 replace_call_with_call_and_fold (gsi, repl);
025d57f0 2163
fef5a0d9
RB
2164 return true;
2165}
2166
71dea1dd
WD
2167/* Fold function call to builtin strchr or strrchr.
2168 If both arguments are constant, evaluate and fold the result,
2169 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
2170 In general strlen is significantly faster than strchr
2171 due to being a simpler operation. */
2172static bool
71dea1dd 2173gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
2174{
2175 gimple *stmt = gsi_stmt (*gsi);
2176 tree str = gimple_call_arg (stmt, 0);
2177 tree c = gimple_call_arg (stmt, 1);
2178 location_t loc = gimple_location (stmt);
71dea1dd
WD
2179 const char *p;
2180 char ch;
912d9ec3 2181
71dea1dd 2182 if (!gimple_call_lhs (stmt))
912d9ec3
WD
2183 return false;
2184
b5338fb3
MS
2185 /* Avoid folding if the first argument is not a nul-terminated array.
2186 Defer warning until later. */
2187 if (!check_nul_terminated_array (NULL_TREE, str))
2188 return false;
2189
71dea1dd
WD
2190 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2191 {
2192 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2193
2194 if (p1 == NULL)
2195 {
2196 replace_call_with_value (gsi, integer_zero_node);
2197 return true;
2198 }
2199
2200 tree len = build_int_cst (size_type_node, p1 - p);
2201 gimple_seq stmts = NULL;
2202 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2203 POINTER_PLUS_EXPR, str, len);
2204 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2205 gsi_replace_with_seq_vops (gsi, stmts);
2206 return true;
2207 }
2208
2209 if (!integer_zerop (c))
912d9ec3
WD
2210 return false;
2211
71dea1dd 2212 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 2213 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
2214 {
2215 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2216
c8952930 2217 if (strchr_fn)
71dea1dd
WD
2218 {
2219 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2220 replace_call_with_call_and_fold (gsi, repl);
2221 return true;
2222 }
2223
2224 return false;
2225 }
2226
912d9ec3
WD
2227 tree len;
2228 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2229
2230 if (!strlen_fn)
2231 return false;
2232
2233 /* Create newstr = strlen (str). */
2234 gimple_seq stmts = NULL;
2235 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2236 gimple_set_location (new_stmt, loc);
a15ebbcd 2237 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2238 gimple_call_set_lhs (new_stmt, len);
2239 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2240
2241 /* Create (str p+ strlen (str)). */
2242 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2243 POINTER_PLUS_EXPR, str, len);
2244 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2245 gsi_replace_with_seq_vops (gsi, stmts);
2246 /* gsi now points at the assignment to the lhs, get a
2247 stmt iterator to the strlen.
2248 ??? We can't use gsi_for_stmt as that doesn't work when the
2249 CFG isn't built yet. */
2250 gimple_stmt_iterator gsi2 = *gsi;
2251 gsi_prev (&gsi2);
2252 fold_stmt (&gsi2);
2253 return true;
2254}
2255
c8952930
JJ
2256/* Fold function call to builtin strstr.
2257 If both arguments are constant, evaluate and fold the result,
2258 additionally fold strstr (x, "") into x and strstr (x, "c")
2259 into strchr (x, 'c'). */
2260static bool
2261gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2262{
2263 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2264 if (!gimple_call_lhs (stmt))
2265 return false;
2266
c8952930
JJ
2267 tree haystack = gimple_call_arg (stmt, 0);
2268 tree needle = gimple_call_arg (stmt, 1);
c8952930 2269
b5338fb3
MS
2270 /* Avoid folding if either argument is not a nul-terminated array.
2271 Defer warning until later. */
2272 if (!check_nul_terminated_array (NULL_TREE, haystack)
2273 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2274 return false;
2275
b5338fb3 2276 const char *q = c_getstr (needle);
c8952930
JJ
2277 if (q == NULL)
2278 return false;
2279
b5338fb3 2280 if (const char *p = c_getstr (haystack))
c8952930
JJ
2281 {
2282 const char *r = strstr (p, q);
2283
2284 if (r == NULL)
2285 {
2286 replace_call_with_value (gsi, integer_zero_node);
2287 return true;
2288 }
2289
2290 tree len = build_int_cst (size_type_node, r - p);
2291 gimple_seq stmts = NULL;
2292 gimple *new_stmt
2293 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2294 haystack, len);
2295 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2296 gsi_replace_with_seq_vops (gsi, stmts);
2297 return true;
2298 }
2299
2300 /* For strstr (x, "") return x. */
2301 if (q[0] == '\0')
2302 {
2303 replace_call_with_value (gsi, haystack);
2304 return true;
2305 }
2306
2307 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2308 if (q[1] == '\0')
2309 {
2310 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2311 if (strchr_fn)
2312 {
2313 tree c = build_int_cst (integer_type_node, q[0]);
2314 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2315 replace_call_with_call_and_fold (gsi, repl);
2316 return true;
2317 }
2318 }
2319
2320 return false;
2321}
2322
fef5a0d9
RB
2323/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2324 to the call.
2325
2326 Return NULL_TREE if no simplification was possible, otherwise return the
2327 simplified form of the call as a tree.
2328
2329 The simplified form may be a constant or other expression which
2330 computes the same value, but in a more efficient manner (including
2331 calls to other builtin functions).
2332
2333 The call may contain arguments which need to be evaluated, but
2334 which are not useful to determine the result of the call. In
2335 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2336 COMPOUND_EXPR will be an argument which must be evaluated.
2337 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2338 COMPOUND_EXPR in the chain will contain the tree for the simplified
2339 form of the builtin function call. */
2340
2341static bool
dcb7fae2 2342gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2343{
355fe088 2344 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2345 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2346
2347 const char *p = c_getstr (src);
2348
2349 /* If the string length is zero, return the dst parameter. */
2350 if (p && *p == '\0')
2351 {
2352 replace_call_with_value (gsi, dst);
2353 return true;
2354 }
2355
2356 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2357 return false;
2358
2359 /* See if we can store by pieces into (dst + strlen(dst)). */
2360 tree newdst;
2361 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2362 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2363
2364 if (!strlen_fn || !memcpy_fn)
2365 return false;
2366
2367 /* If the length of the source string isn't computable don't
2368 split strcat into strlen and memcpy. */
598f7235 2369 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2370 if (! len)
fef5a0d9
RB
2371 return false;
2372
2373 /* Create strlen (dst). */
2374 gimple_seq stmts = NULL, stmts2;
355fe088 2375 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2376 gimple_set_location (repl, loc);
a15ebbcd 2377 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2378 gimple_call_set_lhs (repl, newdst);
2379 gimple_seq_add_stmt_without_update (&stmts, repl);
2380
2381 /* Create (dst p+ strlen (dst)). */
2382 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2383 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2384 gimple_seq_add_seq_without_update (&stmts, stmts2);
2385
2386 len = fold_convert_loc (loc, size_type_node, len);
2387 len = size_binop_loc (loc, PLUS_EXPR, len,
2388 build_int_cst (size_type_node, 1));
2389 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2390 gimple_seq_add_seq_without_update (&stmts, stmts2);
2391
2392 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2393 gimple_seq_add_stmt_without_update (&stmts, repl);
2394 if (gimple_call_lhs (stmt))
2395 {
2396 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2397 gimple_seq_add_stmt_without_update (&stmts, repl);
2398 gsi_replace_with_seq_vops (gsi, stmts);
2399 /* gsi now points at the assignment to the lhs, get a
2400 stmt iterator to the memcpy call.
2401 ??? We can't use gsi_for_stmt as that doesn't work when the
2402 CFG isn't built yet. */
2403 gimple_stmt_iterator gsi2 = *gsi;
2404 gsi_prev (&gsi2);
2405 fold_stmt (&gsi2);
2406 }
2407 else
2408 {
2409 gsi_replace_with_seq_vops (gsi, stmts);
2410 fold_stmt (gsi);
2411 }
2412 return true;
2413}
2414
07f1cf56
RB
2415/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2416 are the arguments to the call. */
2417
2418static bool
2419gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2420{
355fe088 2421 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2422 tree dest = gimple_call_arg (stmt, 0);
2423 tree src = gimple_call_arg (stmt, 1);
2424 tree size = gimple_call_arg (stmt, 2);
2425 tree fn;
2426 const char *p;
2427
2428
2429 p = c_getstr (src);
2430 /* If the SRC parameter is "", return DEST. */
2431 if (p && *p == '\0')
2432 {
2433 replace_call_with_value (gsi, dest);
2434 return true;
2435 }
2436
2437 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2438 return false;
2439
2440 /* If __builtin_strcat_chk is used, assume strcat is available. */
2441 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2442 if (!fn)
2443 return false;
2444
355fe088 2445 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2446 replace_call_with_call_and_fold (gsi, repl);
2447 return true;
2448}
2449
ad03a744
RB
2450/* Simplify a call to the strncat builtin. */
2451
2452static bool
2453gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2454{
8a45b051 2455 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2456 tree dst = gimple_call_arg (stmt, 0);
2457 tree src = gimple_call_arg (stmt, 1);
2458 tree len = gimple_call_arg (stmt, 2);
2459
2460 const char *p = c_getstr (src);
2461
2462 /* If the requested length is zero, or the src parameter string
2463 length is zero, return the dst parameter. */
2464 if (integer_zerop (len) || (p && *p == '\0'))
2465 {
2466 replace_call_with_value (gsi, dst);
2467 return true;
2468 }
2469
025d57f0
MS
2470 if (TREE_CODE (len) != INTEGER_CST || !p)
2471 return false;
2472
2473 unsigned srclen = strlen (p);
2474
2475 int cmpsrc = compare_tree_int (len, srclen);
2476
2477 /* Return early if the requested len is less than the string length.
2478 Warnings will be issued elsewhere later. */
2479 if (cmpsrc < 0)
2480 return false;
2481
2482 unsigned HOST_WIDE_INT dstsize;
2483
2484 bool nowarn = gimple_no_warning_p (stmt);
2485
2486 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2487 {
025d57f0 2488 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2489
025d57f0
MS
2490 if (cmpdst >= 0)
2491 {
2492 tree fndecl = gimple_call_fndecl (stmt);
2493
2494 /* Strncat copies (at most) LEN bytes and always appends
2495 the terminating NUL so the specified bound should never
2496 be equal to (or greater than) the size of the destination.
2497 If it is, the copy could overflow. */
2498 location_t loc = gimple_location (stmt);
2499 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2500 cmpdst == 0
2501 ? G_("%G%qD specified bound %E equals "
2502 "destination size")
2503 : G_("%G%qD specified bound %E exceeds "
2504 "destination size %wu"),
2505 stmt, fndecl, len, dstsize);
2506 if (nowarn)
2507 gimple_set_no_warning (stmt, true);
2508 }
2509 }
ad03a744 2510
025d57f0
MS
2511 if (!nowarn && cmpsrc == 0)
2512 {
2513 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2514 location_t loc = gimple_location (stmt);
eec5f615
MS
2515
2516 /* To avoid possible overflow the specified bound should also
2517 not be equal to the length of the source, even when the size
2518 of the destination is unknown (it's not an uncommon mistake
2519 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2520 if (warning_at (loc, OPT_Wstringop_overflow_,
2521 "%G%qD specified bound %E equals source length",
2522 stmt, fndecl, len))
2523 gimple_set_no_warning (stmt, true);
ad03a744
RB
2524 }
2525
025d57f0
MS
2526 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2527
2528 /* If the replacement _DECL isn't initialized, don't do the
2529 transformation. */
2530 if (!fn)
2531 return false;
2532
2533 /* Otherwise, emit a call to strcat. */
2534 gcall *repl = gimple_build_call (fn, 2, dst, src);
2535 replace_call_with_call_and_fold (gsi, repl);
2536 return true;
ad03a744
RB
2537}
2538
745583f9
RB
2539/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2540 LEN, and SIZE. */
2541
2542static bool
2543gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2544{
355fe088 2545 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2546 tree dest = gimple_call_arg (stmt, 0);
2547 tree src = gimple_call_arg (stmt, 1);
2548 tree len = gimple_call_arg (stmt, 2);
2549 tree size = gimple_call_arg (stmt, 3);
2550 tree fn;
2551 const char *p;
2552
2553 p = c_getstr (src);
2554 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2555 if ((p && *p == '\0')
2556 || integer_zerop (len))
2557 {
2558 replace_call_with_value (gsi, dest);
2559 return true;
2560 }
2561
2562 if (! tree_fits_uhwi_p (size))
2563 return false;
2564
2565 if (! integer_all_onesp (size))
2566 {
2567 tree src_len = c_strlen (src, 1);
2568 if (src_len
2569 && tree_fits_uhwi_p (src_len)
2570 && tree_fits_uhwi_p (len)
2571 && ! tree_int_cst_lt (len, src_len))
2572 {
2573 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2574 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2575 if (!fn)
2576 return false;
2577
355fe088 2578 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2579 replace_call_with_call_and_fold (gsi, repl);
2580 return true;
2581 }
2582 return false;
2583 }
2584
2585 /* If __builtin_strncat_chk is used, assume strncat is available. */
2586 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2587 if (!fn)
2588 return false;
2589
355fe088 2590 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2591 replace_call_with_call_and_fold (gsi, repl);
2592 return true;
2593}
2594
a918bfbf
ML
2595/* Build and append gimple statements to STMTS that would load a first
2596 character of a memory location identified by STR. LOC is location
2597 of the statement. */
2598
2599static tree
2600gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2601{
2602 tree var;
2603
2604 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2605 tree cst_uchar_ptr_node
2606 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2607 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2608
2609 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2610 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2611 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2612
2613 gimple_assign_set_lhs (stmt, var);
2614 gimple_seq_add_stmt_without_update (stmts, stmt);
2615
2616 return var;
2617}
2618
d2f8402a 2619/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2620
2621static bool
2622gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2623{
2624 gimple *stmt = gsi_stmt (*gsi);
2625 tree callee = gimple_call_fndecl (stmt);
2626 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2627
2628 tree type = integer_type_node;
2629 tree str1 = gimple_call_arg (stmt, 0);
2630 tree str2 = gimple_call_arg (stmt, 1);
2631 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2632
2633 tree bound_node = NULL_TREE;
d2f8402a 2634 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2635
2636 /* Handle strncmp and strncasecmp functions. */
2637 if (gimple_call_num_args (stmt) == 3)
2638 {
d86d8b35
MS
2639 bound_node = gimple_call_arg (stmt, 2);
2640 if (tree_fits_uhwi_p (bound_node))
2641 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2642 }
2643
d86d8b35 2644 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2645 if (bound == 0)
a918bfbf
ML
2646 {
2647 replace_call_with_value (gsi, integer_zero_node);
2648 return true;
2649 }
2650
2651 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2652 if (operand_equal_p (str1, str2, 0))
2653 {
2654 replace_call_with_value (gsi, integer_zero_node);
2655 return true;
2656 }
2657
d2f8402a
MS
2658 /* Initially set to the number of characters, including the terminating
2659 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2660 the array Sx is not terminated by a nul.
2661 For nul-terminated strings then adjusted to their length so that
2662 LENx == NULPOSx holds. */
2663 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
866626ef
MS
2664 const char *p1 = getbyterep (str1, &len1);
2665 const char *p2 = getbyterep (str2, &len2);
d2f8402a
MS
2666
2667 /* The position of the terminating nul character if one exists, otherwise
2668 a value greater than LENx. */
2669 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2670
2671 if (p1)
2672 {
2673 size_t n = strnlen (p1, len1);
2674 if (n < len1)
2675 len1 = nulpos1 = n;
2676 }
2677
2678 if (p2)
2679 {
2680 size_t n = strnlen (p2, len2);
2681 if (n < len2)
2682 len2 = nulpos2 = n;
2683 }
a918bfbf
ML
2684
2685 /* For known strings, return an immediate value. */
2686 if (p1 && p2)
2687 {
2688 int r = 0;
2689 bool known_result = false;
2690
2691 switch (fcode)
2692 {
2693 case BUILT_IN_STRCMP:
8b0b334a 2694 case BUILT_IN_STRCMP_EQ:
d2f8402a 2695 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2696 break;
d2f8402a
MS
2697
2698 r = strcmp (p1, p2);
2699 known_result = true;
2700 break;
2701
a918bfbf 2702 case BUILT_IN_STRNCMP:
8b0b334a 2703 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2704 {
d86d8b35
MS
2705 if (bound == HOST_WIDE_INT_M1U)
2706 break;
2707
d2f8402a
MS
2708 /* Reduce the bound to be no more than the length
2709 of the shorter of the two strings, or the sizes
2710 of the unterminated arrays. */
2711 unsigned HOST_WIDE_INT n = bound;
2712
2713 if (len1 == nulpos1 && len1 < n)
2714 n = len1 + 1;
2715 if (len2 == nulpos2 && len2 < n)
2716 n = len2 + 1;
2717
2718 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2719 break;
d2f8402a
MS
2720
2721 r = strncmp (p1, p2, n);
a918bfbf
ML
2722 known_result = true;
2723 break;
2724 }
2725 /* Only handleable situation is where the string are equal (result 0),
2726 which is already handled by operand_equal_p case. */
2727 case BUILT_IN_STRCASECMP:
2728 break;
2729 case BUILT_IN_STRNCASECMP:
2730 {
d2f8402a 2731 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2732 break;
d2f8402a 2733 r = strncmp (p1, p2, bound);
a918bfbf
ML
2734 if (r == 0)
2735 known_result = true;
5de73c05 2736 break;
a918bfbf
ML
2737 }
2738 default:
2739 gcc_unreachable ();
2740 }
2741
2742 if (known_result)
2743 {
2744 replace_call_with_value (gsi, build_cmp_result (type, r));
2745 return true;
2746 }
2747 }
2748
d2f8402a 2749 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2750 || fcode == BUILT_IN_STRCMP
8b0b334a 2751 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2752 || fcode == BUILT_IN_STRCASECMP;
2753
2754 location_t loc = gimple_location (stmt);
2755
2756 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2757 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2758 {
2759 gimple_seq stmts = NULL;
2760 tree var = gimple_load_first_char (loc, str1, &stmts);
2761 if (lhs)
2762 {
2763 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2764 gimple_seq_add_stmt_without_update (&stmts, stmt);
2765 }
2766
2767 gsi_replace_with_seq_vops (gsi, stmts);
2768 return true;
2769 }
2770
2771 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2772 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2773 {
2774 gimple_seq stmts = NULL;
2775 tree var = gimple_load_first_char (loc, str2, &stmts);
2776
2777 if (lhs)
2778 {
2779 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2780 stmt = gimple_build_assign (c, NOP_EXPR, var);
2781 gimple_seq_add_stmt_without_update (&stmts, stmt);
2782
2783 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2784 gimple_seq_add_stmt_without_update (&stmts, stmt);
2785 }
2786
2787 gsi_replace_with_seq_vops (gsi, stmts);
2788 return true;
2789 }
2790
d2f8402a 2791 /* If BOUND is one, return an expression corresponding to
a918bfbf 2792 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2793 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2794 {
2795 gimple_seq stmts = NULL;
2796 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2797 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2798
2799 if (lhs)
2800 {
2801 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2802 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2803 gimple_seq_add_stmt_without_update (&stmts, convert1);
2804
2805 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2806 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2807 gimple_seq_add_stmt_without_update (&stmts, convert2);
2808
2809 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2810 gimple_seq_add_stmt_without_update (&stmts, stmt);
2811 }
2812
2813 gsi_replace_with_seq_vops (gsi, stmts);
2814 return true;
2815 }
2816
d2f8402a
MS
2817 /* If BOUND is greater than the length of one constant string,
2818 and the other argument is also a nul-terminated string, replace
2819 strncmp with strcmp. */
2820 if (fcode == BUILT_IN_STRNCMP
2821 && bound > 0 && bound < HOST_WIDE_INT_M1U
2822 && ((p2 && len2 < bound && len2 == nulpos2)
2823 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2824 {
2825 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2826 if (!fn)
2827 return false;
2828 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2829 replace_call_with_call_and_fold (gsi, repl);
2830 return true;
2831 }
2832
a918bfbf
ML
2833 return false;
2834}
2835
488c6247
ML
2836/* Fold a call to the memchr pointed by GSI iterator. */
2837
2838static bool
2839gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2840{
2841 gimple *stmt = gsi_stmt (*gsi);
2842 tree lhs = gimple_call_lhs (stmt);
2843 tree arg1 = gimple_call_arg (stmt, 0);
2844 tree arg2 = gimple_call_arg (stmt, 1);
2845 tree len = gimple_call_arg (stmt, 2);
2846
2847 /* If the LEN parameter is zero, return zero. */
2848 if (integer_zerop (len))
2849 {
2850 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2851 return true;
2852 }
2853
2854 char c;
2855 if (TREE_CODE (arg2) != INTEGER_CST
2856 || !tree_fits_uhwi_p (len)
2857 || !target_char_cst_p (arg2, &c))
2858 return false;
2859
2860 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2861 unsigned HOST_WIDE_INT string_length;
866626ef 2862 const char *p1 = getbyterep (arg1, &string_length);
488c6247
ML
2863
2864 if (p1)
2865 {
2866 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2867 if (r == NULL)
2868 {
5fd336bb 2869 tree mem_size, offset_node;
bb04901d 2870 byte_representation (arg1, &offset_node, &mem_size, NULL);
5fd336bb
JM
2871 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2872 ? 0 : tree_to_uhwi (offset_node);
2873 /* MEM_SIZE is the size of the array the string literal
2874 is stored in. */
2875 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2876 gcc_checking_assert (string_length <= string_size);
2877 if (length <= string_size)
488c6247
ML
2878 {
2879 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2880 return true;
2881 }
2882 }
2883 else
2884 {
2885 unsigned HOST_WIDE_INT offset = r - p1;
2886 gimple_seq stmts = NULL;
2887 if (lhs != NULL_TREE)
2888 {
aec2d684 2889 tree offset_cst = build_int_cst (sizetype, offset);
488c6247
ML
2890 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2891 arg1, offset_cst);
2892 gimple_seq_add_stmt_without_update (&stmts, stmt);
2893 }
2894 else
2895 gimple_seq_add_stmt_without_update (&stmts,
2896 gimple_build_nop ());
2897
2898 gsi_replace_with_seq_vops (gsi, stmts);
2899 return true;
2900 }
2901 }
2902
2903 return false;
2904}
a918bfbf 2905
fef5a0d9
RB
2906/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2907 to the call. IGNORE is true if the value returned
2908 by the builtin will be ignored. UNLOCKED is true is true if this
2909 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2910 the known length of the string. Return NULL_TREE if no simplification
2911 was possible. */
2912
2913static bool
2914gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2915 tree arg0, tree arg1,
dcb7fae2 2916 bool unlocked)
fef5a0d9 2917{
355fe088 2918 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2919
fef5a0d9
RB
2920 /* If we're using an unlocked function, assume the other unlocked
2921 functions exist explicitly. */
2922 tree const fn_fputc = (unlocked
2923 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2924 : builtin_decl_implicit (BUILT_IN_FPUTC));
2925 tree const fn_fwrite = (unlocked
2926 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2927 : builtin_decl_implicit (BUILT_IN_FWRITE));
2928
2929 /* If the return value is used, don't do the transformation. */
dcb7fae2 2930 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2931 return false;
2932
fef5a0d9
RB
2933 /* Get the length of the string passed to fputs. If the length
2934 can't be determined, punt. */
598f7235 2935 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2936 if (!len
2937 || TREE_CODE (len) != INTEGER_CST)
2938 return false;
2939
2940 switch (compare_tree_int (len, 1))
2941 {
2942 case -1: /* length is 0, delete the call entirely . */
2943 replace_call_with_value (gsi, integer_zero_node);
2944 return true;
2945
2946 case 0: /* length is 1, call fputc. */
2947 {
2948 const char *p = c_getstr (arg0);
2949 if (p != NULL)
2950 {
2951 if (!fn_fputc)
2952 return false;
2953
355fe088 2954 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2955 build_int_cst
2956 (integer_type_node, p[0]), arg1);
2957 replace_call_with_call_and_fold (gsi, repl);
2958 return true;
2959 }
2960 }
2961 /* FALLTHROUGH */
2962 case 1: /* length is greater than 1, call fwrite. */
2963 {
2964 /* If optimizing for size keep fputs. */
2965 if (optimize_function_for_size_p (cfun))
2966 return false;
2967 /* New argument list transforming fputs(string, stream) to
2968 fwrite(string, 1, len, stream). */
2969 if (!fn_fwrite)
2970 return false;
2971
355fe088 2972 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2973 size_one_node, len, arg1);
2974 replace_call_with_call_and_fold (gsi, repl);
2975 return true;
2976 }
2977 default:
2978 gcc_unreachable ();
2979 }
2980 return false;
2981}
2982
2983/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2984 DEST, SRC, LEN, and SIZE are the arguments to the call.
2985 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2986 code of the builtin. If MAXLEN is not NULL, it is maximum length
2987 passed as third argument. */
2988
2989static bool
2990gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2991 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2992 enum built_in_function fcode)
2993{
355fe088 2994 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2995 location_t loc = gimple_location (stmt);
2996 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2997 tree fn;
2998
2999 /* If SRC and DEST are the same (and not volatile), return DEST
3000 (resp. DEST+LEN for __mempcpy_chk). */
3001 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3002 {
3003 if (fcode != BUILT_IN_MEMPCPY_CHK)
3004 {
3005 replace_call_with_value (gsi, dest);
3006 return true;
3007 }
3008 else
3009 {
74e3c262
RB
3010 gimple_seq stmts = NULL;
3011 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
3012 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3013 TREE_TYPE (dest), dest, len);
74e3c262 3014 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
3015 replace_call_with_value (gsi, temp);
3016 return true;
3017 }
3018 }
3019
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3022
598f7235 3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
3024 if (! integer_all_onesp (size))
3025 {
3026 if (! tree_fits_uhwi_p (len))
3027 {
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 {
3033 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3034 {
3035 /* (void) __mempcpy_chk () can be optimized into
3036 (void) __memcpy_chk (). */
3037 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3038 if (!fn)
3039 return false;
3040
355fe088 3041 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3042 replace_call_with_call_and_fold (gsi, repl);
3043 return true;
3044 }
3045 return false;
3046 }
3047 }
3048 else
3049 maxlen = len;
3050
3051 if (tree_int_cst_lt (size, maxlen))
3052 return false;
3053 }
3054
3055 fn = NULL_TREE;
3056 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3057 mem{cpy,pcpy,move,set} is available. */
3058 switch (fcode)
3059 {
3060 case BUILT_IN_MEMCPY_CHK:
3061 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3062 break;
3063 case BUILT_IN_MEMPCPY_CHK:
3064 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3065 break;
3066 case BUILT_IN_MEMMOVE_CHK:
3067 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3068 break;
3069 case BUILT_IN_MEMSET_CHK:
3070 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3071 break;
3072 default:
3073 break;
3074 }
3075
3076 if (!fn)
3077 return false;
3078
355fe088 3079 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3080 replace_call_with_call_and_fold (gsi, repl);
3081 return true;
3082}
3083
3084/* Fold a call to the __st[rp]cpy_chk builtin.
3085 DEST, SRC, and SIZE are the arguments to the call.
3086 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3087 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3088 strings passed as second argument. */
3089
3090static bool
3091gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3092 tree dest,
fef5a0d9 3093 tree src, tree size,
fef5a0d9
RB
3094 enum built_in_function fcode)
3095{
355fe088 3096 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
3097 location_t loc = gimple_location (stmt);
3098 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3099 tree len, fn;
3100
3101 /* If SRC and DEST are the same (and not volatile), return DEST. */
3102 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3103 {
8cd95cec
MS
3104 /* Issue -Wrestrict unless the pointers are null (those do
3105 not point to objects and so do not indicate an overlap;
3106 such calls could be the result of sanitization and jump
3107 threading). */
3108 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
3109 {
3110 tree func = gimple_call_fndecl (stmt);
cc8bea0a 3111
e9b9fa4c
MS
3112 warning_at (loc, OPT_Wrestrict,
3113 "%qD source argument is the same as destination",
3114 func);
3115 }
cc8bea0a 3116
fef5a0d9
RB
3117 replace_call_with_value (gsi, dest);
3118 return true;
3119 }
3120
3121 if (! tree_fits_uhwi_p (size))
3122 return false;
3123
598f7235 3124 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
3125 if (! integer_all_onesp (size))
3126 {
3127 len = c_strlen (src, 1);
3128 if (! len || ! tree_fits_uhwi_p (len))
3129 {
3130 /* If LEN is not constant, try MAXLEN too.
3131 For MAXLEN only allow optimizing into non-_ocs function
3132 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3133 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3134 {
3135 if (fcode == BUILT_IN_STPCPY_CHK)
3136 {
3137 if (! ignore)
3138 return false;
3139
3140 /* If return value of __stpcpy_chk is ignored,
3141 optimize into __strcpy_chk. */
3142 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3143 if (!fn)
3144 return false;
3145
355fe088 3146 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
3147 replace_call_with_call_and_fold (gsi, repl);
3148 return true;
3149 }
3150
3151 if (! len || TREE_SIDE_EFFECTS (len))
3152 return false;
3153
3154 /* If c_strlen returned something, but not a constant,
3155 transform __strcpy_chk into __memcpy_chk. */
3156 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3157 if (!fn)
3158 return false;
3159
74e3c262 3160 gimple_seq stmts = NULL;
770fe3a3 3161 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
3162 len = gimple_convert (&stmts, loc, size_type_node, len);
3163 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3164 build_int_cst (size_type_node, 1));
3165 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 3166 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3167 replace_call_with_call_and_fold (gsi, repl);
3168 return true;
3169 }
e256dfce 3170 }
fef5a0d9
RB
3171 else
3172 maxlen = len;
3173
3174 if (! tree_int_cst_lt (maxlen, size))
3175 return false;
e256dfce
RG
3176 }
3177
fef5a0d9
RB
3178 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3179 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3180 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3181 if (!fn)
3182 return false;
3183
355fe088 3184 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
3185 replace_call_with_call_and_fold (gsi, repl);
3186 return true;
3187}
3188
3189/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3190 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3191 length passed as third argument. IGNORE is true if return value can be
3192 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3193
3194static bool
3195gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3196 tree dest, tree src,
dcb7fae2 3197 tree len, tree size,
fef5a0d9
RB
3198 enum built_in_function fcode)
3199{
355fe088 3200 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3201 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3202 tree fn;
3203
3204 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 3205 {
fef5a0d9
RB
3206 /* If return value of __stpncpy_chk is ignored,
3207 optimize into __strncpy_chk. */
3208 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3209 if (fn)
3210 {
355fe088 3211 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3212 replace_call_with_call_and_fold (gsi, repl);
3213 return true;
3214 }
cbdd87d4
RG
3215 }
3216
fef5a0d9
RB
3217 if (! tree_fits_uhwi_p (size))
3218 return false;
3219
598f7235 3220 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3221 if (! integer_all_onesp (size))
cbdd87d4 3222 {
fef5a0d9 3223 if (! tree_fits_uhwi_p (len))
fe2ef088 3224 {
fef5a0d9
RB
3225 /* If LEN is not constant, try MAXLEN too.
3226 For MAXLEN only allow optimizing into non-_ocs function
3227 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3228 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3229 return false;
8a1561bc 3230 }
fef5a0d9
RB
3231 else
3232 maxlen = len;
3233
3234 if (tree_int_cst_lt (size, maxlen))
3235 return false;
cbdd87d4
RG
3236 }
3237
fef5a0d9
RB
3238 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3239 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3240 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3241 if (!fn)
3242 return false;
3243
355fe088 3244 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3245 replace_call_with_call_and_fold (gsi, repl);
3246 return true;
cbdd87d4
RG
3247}
3248
2625bb5d
RB
3249/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3250 Return NULL_TREE if no simplification can be made. */
3251
3252static bool
3253gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3254{
3255 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3256 location_t loc = gimple_location (stmt);
3257 tree dest = gimple_call_arg (stmt, 0);
3258 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3259 tree fn, lenp1;
2625bb5d
RB
3260
3261 /* If the result is unused, replace stpcpy with strcpy. */
3262 if (gimple_call_lhs (stmt) == NULL_TREE)
3263 {
3264 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3265 if (!fn)
3266 return false;
3267 gimple_call_set_fndecl (stmt, fn);
3268 fold_stmt (gsi);
3269 return true;
3270 }
3271
01b0acb7 3272 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3273 c_strlen_data data = { };
d14c547a
MS
3274 /* The size of the unterminated array if SRC referes to one. */
3275 tree size;
3276 /* True if the size is exact/constant, false if it's the lower bound
3277 of a range. */
3278 bool exact;
7d583f42 3279 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3280 if (!len
3281 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3282 {
d14c547a 3283 data.decl = unterminated_array (src, &size, &exact);
7d583f42 3284 if (!data.decl)
01b0acb7
MS
3285 return false;
3286 }
3287
7d583f42 3288 if (data.decl)
01b0acb7
MS
3289 {
3290 /* Avoid folding calls with unterminated arrays. */
3291 if (!gimple_no_warning_p (stmt))
d14c547a
MS
3292 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3293 exact);
01b0acb7
MS
3294 gimple_set_no_warning (stmt, true);
3295 return false;
3296 }
2625bb5d
RB
3297
3298 if (optimize_function_for_size_p (cfun)
3299 /* If length is zero it's small enough. */
3300 && !integer_zerop (len))
3301 return false;
3302
3303 /* If the source has a known length replace stpcpy with memcpy. */
3304 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3305 if (!fn)
3306 return false;
3307
3308 gimple_seq stmts = NULL;
3309 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3310 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3311 tem, build_int_cst (size_type_node, 1));
3312 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3313 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3314 gimple_move_vops (repl, stmt);
2625bb5d
RB
3315 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3316 /* Replace the result with dest + len. */
3317 stmts = NULL;
3318 tem = gimple_convert (&stmts, loc, sizetype, len);
3319 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3320 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3321 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3322 gsi_replace (gsi, ret, false);
2625bb5d
RB
3323 /* Finally fold the memcpy call. */
3324 gimple_stmt_iterator gsi2 = *gsi;
3325 gsi_prev (&gsi2);
3326 fold_stmt (&gsi2);
3327 return true;
3328}
3329
fef5a0d9
RB
3330/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3331 NULL_TREE if a normal call should be emitted rather than expanding
3332 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3333 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3334 passed as second argument. */
cbdd87d4
RG
3335
3336static bool
fef5a0d9 3337gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3338 enum built_in_function fcode)
cbdd87d4 3339{
538dd0b7 3340 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3341 tree dest, size, len, fn, fmt, flag;
3342 const char *fmt_str;
cbdd87d4 3343
fef5a0d9
RB
3344 /* Verify the required arguments in the original call. */
3345 if (gimple_call_num_args (stmt) < 5)
3346 return false;
cbdd87d4 3347
fef5a0d9
RB
3348 dest = gimple_call_arg (stmt, 0);
3349 len = gimple_call_arg (stmt, 1);
3350 flag = gimple_call_arg (stmt, 2);
3351 size = gimple_call_arg (stmt, 3);
3352 fmt = gimple_call_arg (stmt, 4);
3353
3354 if (! tree_fits_uhwi_p (size))
3355 return false;
3356
3357 if (! integer_all_onesp (size))
3358 {
598f7235 3359 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3360 if (! tree_fits_uhwi_p (len))
cbdd87d4 3361 {
fef5a0d9
RB
3362 /* If LEN is not constant, try MAXLEN too.
3363 For MAXLEN only allow optimizing into non-_ocs function
3364 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3365 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3366 return false;
3367 }
3368 else
fef5a0d9 3369 maxlen = len;
cbdd87d4 3370
fef5a0d9
RB
3371 if (tree_int_cst_lt (size, maxlen))
3372 return false;
3373 }
cbdd87d4 3374
fef5a0d9
RB
3375 if (!init_target_chars ())
3376 return false;
cbdd87d4 3377
fef5a0d9
RB
3378 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3379 or if format doesn't contain % chars or is "%s". */
3380 if (! integer_zerop (flag))
3381 {
3382 fmt_str = c_getstr (fmt);
3383 if (fmt_str == NULL)
3384 return false;
3385 if (strchr (fmt_str, target_percent) != NULL
3386 && strcmp (fmt_str, target_percent_s))
3387 return false;
cbdd87d4
RG
3388 }
3389
fef5a0d9
RB
3390 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3391 available. */
3392 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3393 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3394 if (!fn)
491e0b9b
RG
3395 return false;
3396
fef5a0d9
RB
3397 /* Replace the called function and the first 5 argument by 3 retaining
3398 trailing varargs. */
3399 gimple_call_set_fndecl (stmt, fn);
3400 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3401 gimple_call_set_arg (stmt, 0, dest);
3402 gimple_call_set_arg (stmt, 1, len);
3403 gimple_call_set_arg (stmt, 2, fmt);
3404 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3405 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3406 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3407 fold_stmt (gsi);
3408 return true;
3409}
cbdd87d4 3410
fef5a0d9
RB
3411/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3412 Return NULL_TREE if a normal call should be emitted rather than
3413 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3414 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3415
fef5a0d9
RB
3416static bool
3417gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3418 enum built_in_function fcode)
3419{
538dd0b7 3420 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3421 tree dest, size, len, fn, fmt, flag;
3422 const char *fmt_str;
3423 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3424
fef5a0d9
RB
3425 /* Verify the required arguments in the original call. */
3426 if (nargs < 4)
3427 return false;
3428 dest = gimple_call_arg (stmt, 0);
3429 flag = gimple_call_arg (stmt, 1);
3430 size = gimple_call_arg (stmt, 2);
3431 fmt = gimple_call_arg (stmt, 3);
3432
3433 if (! tree_fits_uhwi_p (size))
3434 return false;
3435
3436 len = NULL_TREE;
3437
3438 if (!init_target_chars ())
3439 return false;
3440
3441 /* Check whether the format is a literal string constant. */
3442 fmt_str = c_getstr (fmt);
3443 if (fmt_str != NULL)
3444 {
3445 /* If the format doesn't contain % args or %%, we know the size. */
3446 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3447 {
fef5a0d9
RB
3448 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3449 len = build_int_cstu (size_type_node, strlen (fmt_str));
3450 }
3451 /* If the format is "%s" and first ... argument is a string literal,
3452 we know the size too. */
3453 else if (fcode == BUILT_IN_SPRINTF_CHK
3454 && strcmp (fmt_str, target_percent_s) == 0)
3455 {
3456 tree arg;
cbdd87d4 3457
fef5a0d9
RB
3458 if (nargs == 5)
3459 {
3460 arg = gimple_call_arg (stmt, 4);
3461 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3462 {
3463 len = c_strlen (arg, 1);
3464 if (! len || ! tree_fits_uhwi_p (len))
3465 len = NULL_TREE;
3466 }
3467 }
3468 }
3469 }
cbdd87d4 3470
fef5a0d9
RB
3471 if (! integer_all_onesp (size))
3472 {
3473 if (! len || ! tree_int_cst_lt (len, size))
3474 return false;
3475 }
cbdd87d4 3476
fef5a0d9
RB
3477 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3478 or if format doesn't contain % chars or is "%s". */
3479 if (! integer_zerop (flag))
3480 {
3481 if (fmt_str == NULL)
3482 return false;
3483 if (strchr (fmt_str, target_percent) != NULL
3484 && strcmp (fmt_str, target_percent_s))
3485 return false;
3486 }
cbdd87d4 3487
fef5a0d9
RB
3488 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3489 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3490 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3491 if (!fn)
3492 return false;
3493
3494 /* Replace the called function and the first 4 argument by 2 retaining
3495 trailing varargs. */
3496 gimple_call_set_fndecl (stmt, fn);
3497 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3498 gimple_call_set_arg (stmt, 0, dest);
3499 gimple_call_set_arg (stmt, 1, fmt);
3500 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3501 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3502 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3503 fold_stmt (gsi);
3504 return true;
3505}
3506
35770bb2
RB
3507/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3508 ORIG may be null if this is a 2-argument call. We don't attempt to
3509 simplify calls with more than 3 arguments.
3510
a104bd88 3511 Return true if simplification was possible, otherwise false. */
35770bb2 3512
a104bd88 3513bool
dcb7fae2 3514gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3515{
355fe088 3516 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3517 tree dest = gimple_call_arg (stmt, 0);
3518 tree fmt = gimple_call_arg (stmt, 1);
3519 tree orig = NULL_TREE;
3520 const char *fmt_str = NULL;
3521
3522 /* Verify the required arguments in the original call. We deal with two
3523 types of sprintf() calls: 'sprintf (str, fmt)' and
3524 'sprintf (dest, "%s", orig)'. */
3525 if (gimple_call_num_args (stmt) > 3)
3526 return false;
3527
3528 if (gimple_call_num_args (stmt) == 3)
3529 orig = gimple_call_arg (stmt, 2);
3530
3531 /* Check whether the format is a literal string constant. */
3532 fmt_str = c_getstr (fmt);
3533 if (fmt_str == NULL)
3534 return false;
3535
3536 if (!init_target_chars ())
3537 return false;
3538
3539 /* If the format doesn't contain % args or %%, use strcpy. */
3540 if (strchr (fmt_str, target_percent) == NULL)
3541 {
3542 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3543
3544 if (!fn)
3545 return false;
3546
3547 /* Don't optimize sprintf (buf, "abc", ptr++). */
3548 if (orig)
3549 return false;
3550
3551 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3552 'format' is known to contain no % formats. */
3553 gimple_seq stmts = NULL;
355fe088 3554 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3555
3556 /* Propagate the NO_WARNING bit to avoid issuing the same
3557 warning more than once. */
3558 if (gimple_no_warning_p (stmt))
3559 gimple_set_no_warning (repl, true);
3560
35770bb2 3561 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3562 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3563 {
a73468e8
JJ
3564 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3565 strlen (fmt_str)));
35770bb2
RB
3566 gimple_seq_add_stmt_without_update (&stmts, repl);
3567 gsi_replace_with_seq_vops (gsi, stmts);
3568 /* gsi now points at the assignment to the lhs, get a
3569 stmt iterator to the memcpy call.
3570 ??? We can't use gsi_for_stmt as that doesn't work when the
3571 CFG isn't built yet. */
3572 gimple_stmt_iterator gsi2 = *gsi;
3573 gsi_prev (&gsi2);
3574 fold_stmt (&gsi2);
3575 }
3576 else
3577 {
3578 gsi_replace_with_seq_vops (gsi, stmts);
3579 fold_stmt (gsi);
3580 }
3581 return true;
3582 }
3583
3584 /* If the format is "%s", use strcpy if the result isn't used. */
3585 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3586 {
3587 tree fn;
3588 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3589
3590 if (!fn)
3591 return false;
3592
3593 /* Don't crash on sprintf (str1, "%s"). */
3594 if (!orig)
3595 return false;
3596
dcb7fae2
RB
3597 tree orig_len = NULL_TREE;
3598 if (gimple_call_lhs (stmt))
35770bb2 3599 {
598f7235 3600 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3601 if (!orig_len)
35770bb2
RB
3602 return false;
3603 }
3604
3605 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3606 gimple_seq stmts = NULL;
355fe088 3607 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3608
3609 /* Propagate the NO_WARNING bit to avoid issuing the same
3610 warning more than once. */
3611 if (gimple_no_warning_p (stmt))
3612 gimple_set_no_warning (repl, true);
3613
35770bb2 3614 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3615 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3616 {
a73468e8 3617 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3618 TREE_TYPE (orig_len)))
a73468e8
JJ
3619 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3620 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3621 gimple_seq_add_stmt_without_update (&stmts, repl);
3622 gsi_replace_with_seq_vops (gsi, stmts);
3623 /* gsi now points at the assignment to the lhs, get a
3624 stmt iterator to the memcpy call.
3625 ??? We can't use gsi_for_stmt as that doesn't work when the
3626 CFG isn't built yet. */
3627 gimple_stmt_iterator gsi2 = *gsi;
3628 gsi_prev (&gsi2);
3629 fold_stmt (&gsi2);
3630 }
3631 else
3632 {
3633 gsi_replace_with_seq_vops (gsi, stmts);
3634 fold_stmt (gsi);
3635 }
3636 return true;
3637 }
3638 return false;
3639}
3640
d7e78447
RB
3641/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3642 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3643 attempt to simplify calls with more than 4 arguments.
35770bb2 3644
a104bd88 3645 Return true if simplification was possible, otherwise false. */
d7e78447 3646
a104bd88 3647bool
dcb7fae2 3648gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3649{
538dd0b7 3650 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3651 tree dest = gimple_call_arg (stmt, 0);
3652 tree destsize = gimple_call_arg (stmt, 1);
3653 tree fmt = gimple_call_arg (stmt, 2);
3654 tree orig = NULL_TREE;
3655 const char *fmt_str = NULL;
3656
3657 if (gimple_call_num_args (stmt) > 4)
3658 return false;
3659
3660 if (gimple_call_num_args (stmt) == 4)
3661 orig = gimple_call_arg (stmt, 3);
3662
3663 if (!tree_fits_uhwi_p (destsize))
3664 return false;
3665 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3666
3667 /* Check whether the format is a literal string constant. */
3668 fmt_str = c_getstr (fmt);
3669 if (fmt_str == NULL)
3670 return false;
3671
3672 if (!init_target_chars ())
3673 return false;
3674
3675 /* If the format doesn't contain % args or %%, use strcpy. */
3676 if (strchr (fmt_str, target_percent) == NULL)
3677 {
3678 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3679 if (!fn)
3680 return false;
3681
3682 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3683 if (orig)
3684 return false;
3685
3686 /* We could expand this as
3687 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3688 or to
3689 memcpy (str, fmt_with_nul_at_cstm1, cst);
3690 but in the former case that might increase code size
3691 and in the latter case grow .rodata section too much.
3692 So punt for now. */
3693 size_t len = strlen (fmt_str);
3694 if (len >= destlen)
3695 return false;
3696
3697 gimple_seq stmts = NULL;
355fe088 3698 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3699 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3700 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3701 {
a73468e8
JJ
3702 repl = gimple_build_assign (lhs,
3703 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3704 gimple_seq_add_stmt_without_update (&stmts, repl);
3705 gsi_replace_with_seq_vops (gsi, stmts);
3706 /* gsi now points at the assignment to the lhs, get a
3707 stmt iterator to the memcpy call.
3708 ??? We can't use gsi_for_stmt as that doesn't work when the
3709 CFG isn't built yet. */
3710 gimple_stmt_iterator gsi2 = *gsi;
3711 gsi_prev (&gsi2);
3712 fold_stmt (&gsi2);
3713 }
3714 else
3715 {
3716 gsi_replace_with_seq_vops (gsi, stmts);
3717 fold_stmt (gsi);
3718 }
3719 return true;
3720 }
3721
3722 /* If the format is "%s", use strcpy if the result isn't used. */
3723 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3724 {
3725 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3726 if (!fn)
3727 return false;
3728
3729 /* Don't crash on snprintf (str1, cst, "%s"). */
3730 if (!orig)
3731 return false;
3732
598f7235 3733 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3734 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3735 return false;
d7e78447
RB
3736
3737 /* We could expand this as
3738 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3739 or to
3740 memcpy (str1, str2_with_nul_at_cstm1, cst);
3741 but in the former case that might increase code size
3742 and in the latter case grow .rodata section too much.
3743 So punt for now. */
3744 if (compare_tree_int (orig_len, destlen) >= 0)
3745 return false;
3746
3747 /* Convert snprintf (str1, cst, "%s", str2) into
3748 strcpy (str1, str2) if strlen (str2) < cst. */
3749 gimple_seq stmts = NULL;
355fe088 3750 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3751 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3752 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3753 {
a73468e8 3754 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3755 TREE_TYPE (orig_len)))
a73468e8
JJ
3756 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3757 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3758 gimple_seq_add_stmt_without_update (&stmts, repl);
3759 gsi_replace_with_seq_vops (gsi, stmts);
3760 /* gsi now points at the assignment to the lhs, get a
3761 stmt iterator to the memcpy call.
3762 ??? We can't use gsi_for_stmt as that doesn't work when the
3763 CFG isn't built yet. */
3764 gimple_stmt_iterator gsi2 = *gsi;
3765 gsi_prev (&gsi2);
3766 fold_stmt (&gsi2);
3767 }
3768 else
3769 {
3770 gsi_replace_with_seq_vops (gsi, stmts);
3771 fold_stmt (gsi);
3772 }
3773 return true;
3774 }
3775 return false;
3776}
35770bb2 3777
edd7ae68
RB
3778/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3779 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3780 more than 3 arguments, and ARG may be null in the 2-argument case.
3781
3782 Return NULL_TREE if no simplification was possible, otherwise return the
3783 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3784 code of the function to be simplified. */
3785
3786static bool
3787gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3788 tree fp, tree fmt, tree arg,
3789 enum built_in_function fcode)
3790{
3791 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3792 tree fn_fputc, fn_fputs;
3793 const char *fmt_str = NULL;
3794
3795 /* If the return value is used, don't do the transformation. */
3796 if (gimple_call_lhs (stmt) != NULL_TREE)
3797 return false;
3798
3799 /* Check whether the format is a literal string constant. */
3800 fmt_str = c_getstr (fmt);
3801 if (fmt_str == NULL)
3802 return false;
3803
3804 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3805 {
3806 /* If we're using an unlocked function, assume the other
3807 unlocked functions exist explicitly. */
3808 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3809 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3810 }
3811 else
3812 {
3813 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3814 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3815 }
3816
3817 if (!init_target_chars ())
3818 return false;
3819
3820 /* If the format doesn't contain % args or %%, use strcpy. */
3821 if (strchr (fmt_str, target_percent) == NULL)
3822 {
3823 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3824 && arg)
3825 return false;
3826
3827 /* If the format specifier was "", fprintf does nothing. */
3828 if (fmt_str[0] == '\0')
3829 {
3830 replace_call_with_value (gsi, NULL_TREE);
3831 return true;
3832 }
3833
3834 /* When "string" doesn't contain %, replace all cases of
3835 fprintf (fp, string) with fputs (string, fp). The fputs
3836 builtin will take care of special cases like length == 1. */
3837 if (fn_fputs)
3838 {
3839 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3840 replace_call_with_call_and_fold (gsi, repl);
3841 return true;
3842 }
3843 }
3844
3845 /* The other optimizations can be done only on the non-va_list variants. */
3846 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3847 return false;
3848
3849 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3850 else if (strcmp (fmt_str, target_percent_s) == 0)
3851 {
3852 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3853 return false;
3854 if (fn_fputs)
3855 {
3856 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3857 replace_call_with_call_and_fold (gsi, repl);
3858 return true;
3859 }
3860 }
3861
3862 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3863 else if (strcmp (fmt_str, target_percent_c) == 0)
3864 {
3865 if (!arg
3866 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3867 return false;
3868 if (fn_fputc)
3869 {
3870 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3871 replace_call_with_call_and_fold (gsi, repl);
3872 return true;
3873 }
3874 }
3875
3876 return false;
3877}
3878
ad03a744
RB
3879/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3880 FMT and ARG are the arguments to the call; we don't fold cases with
3881 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3882
3883 Return NULL_TREE if no simplification was possible, otherwise return the
3884 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3885 code of the function to be simplified. */
3886
3887static bool
3888gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3889 tree arg, enum built_in_function fcode)
3890{
3891 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3892 tree fn_putchar, fn_puts, newarg;
3893 const char *fmt_str = NULL;
3894
3895 /* If the return value is used, don't do the transformation. */
3896 if (gimple_call_lhs (stmt) != NULL_TREE)
3897 return false;
3898
3899 /* Check whether the format is a literal string constant. */
3900 fmt_str = c_getstr (fmt);
3901 if (fmt_str == NULL)
3902 return false;
3903
3904 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3905 {
3906 /* If we're using an unlocked function, assume the other
3907 unlocked functions exist explicitly. */
3908 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3909 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3910 }
3911 else
3912 {
3913 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3914 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3915 }
3916
3917 if (!init_target_chars ())
3918 return false;
3919
3920 if (strcmp (fmt_str, target_percent_s) == 0
3921 || strchr (fmt_str, target_percent) == NULL)
3922 {
3923 const char *str;
3924
3925 if (strcmp (fmt_str, target_percent_s) == 0)
3926 {
3927 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3928 return false;
3929
3930 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3931 return false;
3932
3933 str = c_getstr (arg);
3934 if (str == NULL)
3935 return false;
3936 }
3937 else
3938 {
3939 /* The format specifier doesn't contain any '%' characters. */
3940 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3941 && arg)
3942 return false;
3943 str = fmt_str;
3944 }
3945
3946 /* If the string was "", printf does nothing. */
3947 if (str[0] == '\0')
3948 {
3949 replace_call_with_value (gsi, NULL_TREE);
3950 return true;
3951 }
3952
3953 /* If the string has length of 1, call putchar. */
3954 if (str[1] == '\0')
3955 {
3956 /* Given printf("c"), (where c is any one character,)
3957 convert "c"[0] to an int and pass that to the replacement
3958 function. */
3959 newarg = build_int_cst (integer_type_node, str[0]);
3960 if (fn_putchar)
3961 {
3962 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3963 replace_call_with_call_and_fold (gsi, repl);
3964 return true;
3965 }
3966 }
3967 else
3968 {
3969 /* If the string was "string\n", call puts("string"). */
3970 size_t len = strlen (str);
3971 if ((unsigned char)str[len - 1] == target_newline
3972 && (size_t) (int) len == len
3973 && (int) len > 0)
3974 {
3975 char *newstr;
ad03a744
RB
3976
3977 /* Create a NUL-terminated string that's one char shorter
3978 than the original, stripping off the trailing '\n'. */
a353fec4 3979 newstr = xstrdup (str);
ad03a744 3980 newstr[len - 1] = '\0';
a353fec4
BE
3981 newarg = build_string_literal (len, newstr);
3982 free (newstr);
ad03a744
RB
3983 if (fn_puts)
3984 {
3985 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3986 replace_call_with_call_and_fold (gsi, repl);
3987 return true;
3988 }
3989 }
3990 else
3991 /* We'd like to arrange to call fputs(string,stdout) here,
3992 but we need stdout and don't have a way to get it yet. */
3993 return false;
3994 }
3995 }
3996
3997 /* The other optimizations can be done only on the non-va_list variants. */
3998 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3999 return false;
4000
4001 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4002 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4003 {
4004 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4005 return false;
4006 if (fn_puts)
4007 {
4008 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4009 replace_call_with_call_and_fold (gsi, repl);
4010 return true;
4011 }
4012 }
4013
4014 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4015 else if (strcmp (fmt_str, target_percent_c) == 0)
4016 {
4017 if (!arg || ! useless_type_conversion_p (integer_type_node,
4018 TREE_TYPE (arg)))
4019 return false;
4020 if (fn_putchar)
4021 {
4022 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4023 replace_call_with_call_and_fold (gsi, repl);
4024 return true;
4025 }
4026 }
4027
4028 return false;
4029}
4030
edd7ae68 4031
fef5a0d9
RB
4032
4033/* Fold a call to __builtin_strlen with known length LEN. */
4034
4035static bool
dcb7fae2 4036gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 4037{
355fe088 4038 gimple *stmt = gsi_stmt (*gsi);
e08341bb 4039 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
4040
4041 wide_int minlen;
4042 wide_int maxlen;
4043
5d6655eb 4044 c_strlen_data lendata = { };
03c4a945 4045 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
4046 && !lendata.decl
4047 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4048 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
4049 {
4050 /* The range of lengths refers to either a single constant
4051 string or to the longest and shortest constant string
4052 referenced by the argument of the strlen() call, or to
4053 the strings that can possibly be stored in the arrays
4054 the argument refers to. */
5d6655eb
MS
4055 minlen = wi::to_wide (lendata.minlen);
4056 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
4057 }
4058 else
4059 {
4060 unsigned prec = TYPE_PRECISION (sizetype);
4061
4062 minlen = wi::shwi (0, prec);
4063 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4064 }
4065
4066 if (minlen == maxlen)
4067 {
5d6655eb
MS
4068 /* Fold the strlen call to a constant. */
4069 tree type = TREE_TYPE (lendata.minlen);
4070 tree len = force_gimple_operand_gsi (gsi,
4071 wide_int_to_tree (type, minlen),
4072 true, NULL, true, GSI_SAME_STMT);
4073 replace_call_with_value (gsi, len);
c42d0aa0
MS
4074 return true;
4075 }
4076
d4bf6975 4077 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 4078 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 4079 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
4080
4081 return false;
cbdd87d4
RG
4082}
4083
48126138
NS
4084/* Fold a call to __builtin_acc_on_device. */
4085
4086static bool
4087gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4088{
4089 /* Defer folding until we know which compiler we're in. */
4090 if (symtab->state != EXPANSION)
4091 return false;
4092
4093 unsigned val_host = GOMP_DEVICE_HOST;
4094 unsigned val_dev = GOMP_DEVICE_NONE;
4095
4096#ifdef ACCEL_COMPILER
4097 val_host = GOMP_DEVICE_NOT_HOST;
4098 val_dev = ACCEL_COMPILER_acc_device;
4099#endif
4100
4101 location_t loc = gimple_location (gsi_stmt (*gsi));
4102
4103 tree host_eq = make_ssa_name (boolean_type_node);
4104 gimple *host_ass = gimple_build_assign
4105 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4106 gimple_set_location (host_ass, loc);
4107 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4108
4109 tree dev_eq = make_ssa_name (boolean_type_node);
4110 gimple *dev_ass = gimple_build_assign
4111 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4112 gimple_set_location (dev_ass, loc);
4113 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4114
4115 tree result = make_ssa_name (boolean_type_node);
4116 gimple *result_ass = gimple_build_assign
4117 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4118 gimple_set_location (result_ass, loc);
4119 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4120
4121 replace_call_with_value (gsi, result);
4122
4123 return true;
4124}
cbdd87d4 4125
fe75f732
PK
4126/* Fold realloc (0, n) -> malloc (n). */
4127
4128static bool
4129gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4130{
4131 gimple *stmt = gsi_stmt (*gsi);
4132 tree arg = gimple_call_arg (stmt, 0);
4133 tree size = gimple_call_arg (stmt, 1);
4134
4135 if (operand_equal_p (arg, null_pointer_node, 0))
4136 {
4137 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4138 if (fn_malloc)
4139 {
4140 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4141 replace_call_with_call_and_fold (gsi, repl);
4142 return true;
4143 }
4144 }
4145 return false;
4146}
4147
1bea0d0a
JJ
4148/* Number of bytes into which any type but aggregate or vector types
4149 should fit. */
4150static constexpr size_t clear_padding_unit
4151 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4152/* Buffer size on which __builtin_clear_padding folding code works. */
4153static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4154
4155/* Data passed through __builtin_clear_padding folding. */
4156struct clear_padding_struct {
4157 location_t loc;
896048cf
JJ
4158 /* 0 during __builtin_clear_padding folding, nonzero during
4159 clear_type_padding_in_mask. In that case, instead of clearing the
4160 non-padding bits in union_ptr array clear the padding bits in there. */
4161 bool clear_in_mask;
1bea0d0a
JJ
4162 tree base;
4163 tree alias_type;
4164 gimple_stmt_iterator *gsi;
4165 /* Alignment of buf->base + 0. */
4166 unsigned align;
4167 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4168 HOST_WIDE_INT off;
4169 /* Number of padding bytes before buf->off that don't have padding clear
4170 code emitted yet. */
4171 HOST_WIDE_INT padding_bytes;
4172 /* The size of the whole object. Never emit code to touch
4173 buf->base + buf->sz or following bytes. */
4174 HOST_WIDE_INT sz;
4175 /* Number of bytes recorded in buf->buf. */
4176 size_t size;
4177 /* When inside union, instead of emitting code we and bits inside of
4178 the union_ptr array. */
4179 unsigned char *union_ptr;
4180 /* Set bits mean padding bits that need to be cleared by the builtin. */
4181 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4182};
4183
4184/* Emit code to clear padding requested in BUF->buf - set bits
4185 in there stand for padding that should be cleared. FULL is true
4186 if everything from the buffer should be flushed, otherwise
4187 it can leave up to 2 * clear_padding_unit bytes for further
4188 processing. */
4189
4190static void
4191clear_padding_flush (clear_padding_struct *buf, bool full)
4192{
4193 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4194 if (!full && buf->size < 2 * clear_padding_unit)
4195 return;
4196 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4197 size_t end = buf->size;
4198 if (!full)
4199 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4200 * clear_padding_unit);
4201 size_t padding_bytes = buf->padding_bytes;
4202 if (buf->union_ptr)
4203 {
896048cf
JJ
4204 if (buf->clear_in_mask)
4205 {
4206 /* During clear_type_padding_in_mask, clear the padding
4207 bits set in buf->buf in the buf->union_ptr mask. */
4208 for (size_t i = 0; i < end; i++)
4209 {
4210 if (buf->buf[i] == (unsigned char) ~0)
4211 padding_bytes++;
4212 else
4213 {
4214 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4215 0, padding_bytes);
4216 padding_bytes = 0;
4217 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4218 }
4219 }
4220 if (full)
4221 {
4222 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4223 0, padding_bytes);
4224 buf->off = 0;
4225 buf->size = 0;
4226 buf->padding_bytes = 0;
4227 }
4228 else
4229 {
4230 memmove (buf->buf, buf->buf + end, buf->size - end);
4231 buf->off += end;
4232 buf->size -= end;
4233 buf->padding_bytes = padding_bytes;
4234 }
4235 return;
4236 }
1bea0d0a
JJ
4237 /* Inside of a union, instead of emitting any code, instead
4238 clear all bits in the union_ptr buffer that are clear
4239 in buf. Whole padding bytes don't clear anything. */
4240 for (size_t i = 0; i < end; i++)
4241 {
4242 if (buf->buf[i] == (unsigned char) ~0)
4243 padding_bytes++;
4244 else
4245 {
4246 padding_bytes = 0;
4247 buf->union_ptr[buf->off + i] &= buf->buf[i];
4248 }
4249 }
4250 if (full)
4251 {
4252 buf->off = 0;
4253 buf->size = 0;
4254 buf->padding_bytes = 0;
4255 }
4256 else
4257 {
4258 memmove (buf->buf, buf->buf + end, buf->size - end);
4259 buf->off += end;
4260 buf->size -= end;
4261 buf->padding_bytes = padding_bytes;
4262 }
4263 return;
4264 }
4265 size_t wordsize = UNITS_PER_WORD;
4266 for (size_t i = 0; i < end; i += wordsize)
4267 {
4268 size_t nonzero_first = wordsize;
4269 size_t nonzero_last = 0;
4adfcea0
JJ
4270 size_t zero_first = wordsize;
4271 size_t zero_last = 0;
4272 bool all_ones = true, bytes_only = true;
1bea0d0a
JJ
4273 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4274 > (unsigned HOST_WIDE_INT) buf->sz)
4275 {
4276 gcc_assert (wordsize > 1);
4277 wordsize /= 2;
4278 i -= wordsize;
4279 continue;
4280 }
4281 for (size_t j = i; j < i + wordsize && j < end; j++)
4282 {
4283 if (buf->buf[j])
4284 {
4285 if (nonzero_first == wordsize)
4286 {
4287 nonzero_first = j - i;
4288 nonzero_last = j - i;
4289 }
4290 if (nonzero_last != j - i)
4291 all_ones = false;
4292 nonzero_last = j + 1 - i;
4293 }
4adfcea0
JJ
4294 else
4295 {
4296 if (zero_first == wordsize)
4297 zero_first = j - i;
4298 zero_last = j + 1 - i;
4299 }
1bea0d0a 4300 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4adfcea0
JJ
4301 {
4302 all_ones = false;
4303 bytes_only = false;
4304 }
1bea0d0a 4305 }
4adfcea0 4306 size_t padding_end = i;
1bea0d0a
JJ
4307 if (padding_bytes)
4308 {
4309 if (nonzero_first == 0
4310 && nonzero_last == wordsize
4311 && all_ones)
4312 {
4313 /* All bits are padding and we had some padding
4314 before too. Just extend it. */
4315 padding_bytes += wordsize;
4316 continue;
4317 }
1bea0d0a
JJ
4318 if (all_ones && nonzero_first == 0)
4319 {
4320 padding_bytes += nonzero_last;
4321 padding_end += nonzero_last;
4322 nonzero_first = wordsize;
4323 nonzero_last = 0;
4324 }
4adfcea0
JJ
4325 else if (bytes_only && nonzero_first == 0)
4326 {
4327 gcc_assert (zero_first && zero_first != wordsize);
4328 padding_bytes += zero_first;
4329 padding_end += zero_first;
4330 }
4331 tree atype, src;
4332 if (padding_bytes == 1)
4333 {
4334 atype = char_type_node;
4335 src = build_zero_cst (char_type_node);
4336 }
4337 else
4338 {
4339 atype = build_array_type_nelts (char_type_node, padding_bytes);
4340 src = build_constructor (atype, NULL);
4341 }
1bea0d0a
JJ
4342 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4343 build_int_cst (buf->alias_type,
4344 buf->off + padding_end
4345 - padding_bytes));
1bea0d0a
JJ
4346 gimple *g = gimple_build_assign (dst, src);
4347 gimple_set_location (g, buf->loc);
4348 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4349 padding_bytes = 0;
4350 buf->padding_bytes = 0;
4351 }
4352 if (nonzero_first == wordsize)
4353 /* All bits in a word are 0, there are no padding bits. */
4354 continue;
4355 if (all_ones && nonzero_last == wordsize)
4356 {
4357 /* All bits between nonzero_first and end of word are padding
4358 bits, start counting padding_bytes. */
4359 padding_bytes = nonzero_last - nonzero_first;
4360 continue;
4361 }
4adfcea0
JJ
4362 if (bytes_only)
4363 {
4364 /* If bitfields aren't involved in this word, prefer storing
4365 individual bytes or groups of them over performing a RMW
4366 operation on the whole word. */
4367 gcc_assert (i + zero_last <= end);
4368 for (size_t j = padding_end; j < i + zero_last; j++)
4369 {
4370 if (buf->buf[j])
4371 {
4372 size_t k;
4373 for (k = j; k < i + zero_last; k++)
4374 if (buf->buf[k] == 0)
4375 break;
4376 HOST_WIDE_INT off = buf->off + j;
4377 tree atype, src;
4378 if (k - j == 1)
4379 {
4380 atype = char_type_node;
4381 src = build_zero_cst (char_type_node);
4382 }
4383 else
4384 {
4385 atype = build_array_type_nelts (char_type_node, k - j);
4386 src = build_constructor (atype, NULL);
4387 }
4388 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4389 buf->base,
4390 build_int_cst (buf->alias_type, off));
4391 gimple *g = gimple_build_assign (dst, src);
4392 gimple_set_location (g, buf->loc);
4393 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4394 j = k;
4395 }
4396 }
4397 if (nonzero_last == wordsize)
4398 padding_bytes = nonzero_last - zero_last;
4399 continue;
4400 }
1bea0d0a
JJ
4401 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4402 {
4403 if (nonzero_last - nonzero_first <= eltsz
4404 && ((nonzero_first & ~(eltsz - 1))
4405 == ((nonzero_last - 1) & ~(eltsz - 1))))
4406 {
4407 tree type;
4408 if (eltsz == 1)
4409 type = char_type_node;
4410 else
4411 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4412 0);
4413 size_t start = nonzero_first & ~(eltsz - 1);
4414 HOST_WIDE_INT off = buf->off + i + start;
4415 tree atype = type;
4416 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4417 atype = build_aligned_type (type, buf->align);
4418 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4419 build_int_cst (buf->alias_type, off));
4420 tree src;
4421 gimple *g;
4422 if (all_ones
4423 && nonzero_first == start
4424 && nonzero_last == start + eltsz)
4425 src = build_zero_cst (type);
4426 else
4427 {
4428 src = make_ssa_name (type);
4429 g = gimple_build_assign (src, unshare_expr (dst));
4430 gimple_set_location (g, buf->loc);
4431 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4432 tree mask = native_interpret_expr (type,
4433 buf->buf + i + start,
4434 eltsz);
4435 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4436 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4437 tree src_masked = make_ssa_name (type);
4438 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4439 src, mask);
4440 gimple_set_location (g, buf->loc);
4441 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4442 src = src_masked;
4443 }
4444 g = gimple_build_assign (dst, src);
4445 gimple_set_location (g, buf->loc);
4446 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4447 break;
4448 }
4449 }
4450 }
4451 if (full)
4452 {
4453 if (padding_bytes)
4454 {
4adfcea0
JJ
4455 tree atype, src;
4456 if (padding_bytes == 1)
4457 {
4458 atype = char_type_node;
4459 src = build_zero_cst (char_type_node);
4460 }
4461 else
4462 {
4463 atype = build_array_type_nelts (char_type_node, padding_bytes);
4464 src = build_constructor (atype, NULL);
4465 }
1bea0d0a
JJ
4466 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4467 build_int_cst (buf->alias_type,
4468 buf->off + end
4469 - padding_bytes));
1bea0d0a
JJ
4470 gimple *g = gimple_build_assign (dst, src);
4471 gimple_set_location (g, buf->loc);
4472 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4473 }
4474 size_t end_rem = end % UNITS_PER_WORD;
4475 buf->off += end - end_rem;
4476 buf->size = end_rem;
4477 memset (buf->buf, 0, buf->size);
4478 buf->padding_bytes = 0;
4479 }
4480 else
4481 {
4482 memmove (buf->buf, buf->buf + end, buf->size - end);
4483 buf->off += end;
4484 buf->size -= end;
4485 buf->padding_bytes = padding_bytes;
4486 }
4487}
4488
4489/* Append PADDING_BYTES padding bytes. */
4490
4491static void
4492clear_padding_add_padding (clear_padding_struct *buf,
4493 HOST_WIDE_INT padding_bytes)
4494{
4495 if (padding_bytes == 0)
4496 return;
4497 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4498 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4499 clear_padding_flush (buf, false);
4500 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4501 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4502 {
4503 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4504 padding_bytes -= clear_padding_buf_size - buf->size;
4505 buf->size = clear_padding_buf_size;
4506 clear_padding_flush (buf, false);
4507 gcc_assert (buf->padding_bytes);
4508 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4509 is guaranteed to be all ones. */
4510 padding_bytes += buf->size;
4511 buf->size = padding_bytes % UNITS_PER_WORD;
4512 memset (buf->buf, ~0, buf->size);
4513 buf->off += padding_bytes - buf->size;
4514 buf->padding_bytes += padding_bytes - buf->size;
4515 }
4516 else
4517 {
4518 memset (buf->buf + buf->size, ~0, padding_bytes);
4519 buf->size += padding_bytes;
4520 }
4521}
4522
4523static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4524
4525/* Clear padding bits of union type TYPE. */
4526
4527static void
4528clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4529{
4530 clear_padding_struct *union_buf;
4531 HOST_WIDE_INT start_off = 0, next_off = 0;
4532 size_t start_size = 0;
4533 if (buf->union_ptr)
4534 {
4535 start_off = buf->off + buf->size;
4536 next_off = start_off + sz;
4537 start_size = start_off % UNITS_PER_WORD;
4538 start_off -= start_size;
4539 clear_padding_flush (buf, true);
4540 union_buf = buf;
4541 }
4542 else
4543 {
4544 if (sz + buf->size > clear_padding_buf_size)
4545 clear_padding_flush (buf, false);
4546 union_buf = XALLOCA (clear_padding_struct);
4547 union_buf->loc = buf->loc;
896048cf 4548 union_buf->clear_in_mask = buf->clear_in_mask;
1bea0d0a
JJ
4549 union_buf->base = NULL_TREE;
4550 union_buf->alias_type = NULL_TREE;
4551 union_buf->gsi = NULL;
4552 union_buf->align = 0;
4553 union_buf->off = 0;
4554 union_buf->padding_bytes = 0;
4555 union_buf->sz = sz;
4556 union_buf->size = 0;
4557 if (sz + buf->size <= clear_padding_buf_size)
4558 union_buf->union_ptr = buf->buf + buf->size;
4559 else
4560 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4561 memset (union_buf->union_ptr, ~0, sz);
4562 }
4563
4564 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4565 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4566 {
a7285c86
JJ
4567 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4568 {
4569 if (TREE_TYPE (field) == error_mark_node)
4570 continue;
4571 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4572 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
896048cf
JJ
4573 if (!buf->clear_in_mask)
4574 error_at (buf->loc, "flexible array member %qD does not have "
4575 "well defined padding bits for %qs",
4576 field, "__builtin_clear_padding");
a7285c86
JJ
4577 continue;
4578 }
1bea0d0a
JJ
4579 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4580 gcc_assert (union_buf->size == 0);
4581 union_buf->off = start_off;
4582 union_buf->size = start_size;
4583 memset (union_buf->buf, ~0, start_size);
4584 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4585 clear_padding_add_padding (union_buf, sz - fldsz);
4586 clear_padding_flush (union_buf, true);
4587 }
4588
4589 if (buf == union_buf)
4590 {
4591 buf->off = next_off;
4592 buf->size = next_off % UNITS_PER_WORD;
4593 buf->off -= buf->size;
4594 memset (buf->buf, ~0, buf->size);
4595 }
4596 else if (sz + buf->size <= clear_padding_buf_size)
4597 buf->size += sz;
4598 else
4599 {
4600 unsigned char *union_ptr = union_buf->union_ptr;
4601 while (sz)
4602 {
4603 clear_padding_flush (buf, false);
4604 HOST_WIDE_INT this_sz
4605 = MIN ((unsigned HOST_WIDE_INT) sz,
4606 clear_padding_buf_size - buf->size);
4607 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4608 buf->size += this_sz;
4609 union_ptr += this_sz;
4610 sz -= this_sz;
4611 }
4612 XDELETE (union_buf->union_ptr);
4613 }
4614}
4615
4616/* The only known floating point formats with padding bits are the
4617 IEEE extended ones. */
4618
4619static bool
4620clear_padding_real_needs_padding_p (tree type)
4621{
4622 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4623 return (fmt->b == 2
4624 && fmt->signbit_ro == fmt->signbit_rw
4625 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4626}
4627
4628/* Return true if TYPE might contain any padding bits. */
4629
4630static bool
4631clear_padding_type_may_have_padding_p (tree type)
4632{
4633 switch (TREE_CODE (type))
4634 {
4635 case RECORD_TYPE:
4636 case UNION_TYPE:
4637 return true;
4638 case ARRAY_TYPE:
4639 case COMPLEX_TYPE:
4640 case VECTOR_TYPE:
4641 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4642 case REAL_TYPE:
4643 return clear_padding_real_needs_padding_p (type);
4644 default:
4645 return false;
4646 }
4647}
4648
4649/* Emit a runtime loop:
4650 for (; buf.base != end; buf.base += sz)
4651 __builtin_clear_padding (buf.base); */
4652
4653static void
4654clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4655{
4656 tree l1 = create_artificial_label (buf->loc);
4657 tree l2 = create_artificial_label (buf->loc);
4658 tree l3 = create_artificial_label (buf->loc);
4659 gimple *g = gimple_build_goto (l2);
4660 gimple_set_location (g, buf->loc);
4661 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4662 g = gimple_build_label (l1);
4663 gimple_set_location (g, buf->loc);
4664 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4665 clear_padding_type (buf, type, buf->sz);
4666 clear_padding_flush (buf, true);
4667 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4668 size_int (buf->sz));
4669 gimple_set_location (g, buf->loc);
4670 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4671 g = gimple_build_label (l2);
4672 gimple_set_location (g, buf->loc);
4673 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4674 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4675 gimple_set_location (g, buf->loc);
4676 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4677 g = gimple_build_label (l3);
4678 gimple_set_location (g, buf->loc);
4679 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4680}
4681
4682/* Clear padding bits for TYPE. Called recursively from
4683 gimple_fold_builtin_clear_padding. */
4684
4685static void
4686clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4687{
4688 switch (TREE_CODE (type))
4689 {
4690 case RECORD_TYPE:
4691 HOST_WIDE_INT cur_pos;
4692 cur_pos = 0;
4693 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4694 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4695 {
a7285c86 4696 tree ftype = TREE_TYPE (field);
1bea0d0a
JJ
4697 if (DECL_BIT_FIELD (field))
4698 {
a7285c86 4699 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
1bea0d0a
JJ
4700 if (fldsz == 0)
4701 continue;
4702 HOST_WIDE_INT pos = int_byte_position (field);
4703 HOST_WIDE_INT bpos
4704 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4705 bpos %= BITS_PER_UNIT;
4706 HOST_WIDE_INT end
4707 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4708 if (pos + end > cur_pos)
4709 {
4710 clear_padding_add_padding (buf, pos + end - cur_pos);
4711 cur_pos = pos + end;
4712 }
4713 gcc_assert (cur_pos > pos
4714 && ((unsigned HOST_WIDE_INT) buf->size
4715 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4716 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4717 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4718 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4719 " in %qs", "__builtin_clear_padding");
4720 else if (BYTES_BIG_ENDIAN)
4721 {
4722 /* Big endian. */
4723 if (bpos + fldsz <= BITS_PER_UNIT)
4724 *p &= ~(((1 << fldsz) - 1)
4725 << (BITS_PER_UNIT - bpos - fldsz));
4726 else
4727 {
4728 if (bpos)
4729 {
4730 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4731 p++;
4732 fldsz -= BITS_PER_UNIT - bpos;
4733 }
4734 memset (p, 0, fldsz / BITS_PER_UNIT);
4735 p += fldsz / BITS_PER_UNIT;
4736 fldsz %= BITS_PER_UNIT;
4737 if (fldsz)
4738 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4739 }
4740 }
4741 else
4742 {
4743 /* Little endian. */
4744 if (bpos + fldsz <= BITS_PER_UNIT)
4745 *p &= ~(((1 << fldsz) - 1) << bpos);
4746 else
4747 {
4748 if (bpos)
4749 {
4750 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4751 p++;
4752 fldsz -= BITS_PER_UNIT - bpos;
4753 }
4754 memset (p, 0, fldsz / BITS_PER_UNIT);
4755 p += fldsz / BITS_PER_UNIT;
4756 fldsz %= BITS_PER_UNIT;
4757 if (fldsz)
4758 *p &= ~((1 << fldsz) - 1);
4759 }
4760 }
4761 }
a7285c86
JJ
4762 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4763 {
4764 if (ftype == error_mark_node)
4765 continue;
4766 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4767 && !COMPLETE_TYPE_P (ftype));
896048cf
JJ
4768 if (!buf->clear_in_mask)
4769 error_at (buf->loc, "flexible array member %qD does not "
4770 "have well defined padding bits for %qs",
4771 field, "__builtin_clear_padding");
a7285c86 4772 }
bf0a63a1
JJ
4773 else if (is_empty_type (TREE_TYPE (field)))
4774 continue;
1bea0d0a
JJ
4775 else
4776 {
4777 HOST_WIDE_INT pos = int_byte_position (field);
4778 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4779 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4780 clear_padding_add_padding (buf, pos - cur_pos);
4781 cur_pos = pos;
4782 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4783 cur_pos += fldsz;
4784 }
4785 }
4786 gcc_assert (sz >= cur_pos);
4787 clear_padding_add_padding (buf, sz - cur_pos);
4788 break;
4789 case ARRAY_TYPE:
4790 HOST_WIDE_INT nelts, fldsz;
4791 fldsz = int_size_in_bytes (TREE_TYPE (type));
337d6362
ML
4792 if (fldsz == 0)
4793 break;
1bea0d0a
JJ
4794 nelts = sz / fldsz;
4795 if (nelts > 1
4796 && sz > 8 * UNITS_PER_WORD
4797 && buf->union_ptr == NULL
4798 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4799 {
4800 /* For sufficiently large array of more than one elements,
4801 emit a runtime loop to keep code size manageable. */
4802 tree base = buf->base;
4803 unsigned int prev_align = buf->align;
4804 HOST_WIDE_INT off = buf->off + buf->size;
4805 HOST_WIDE_INT prev_sz = buf->sz;
4806 clear_padding_flush (buf, true);
4807 tree elttype = TREE_TYPE (type);
4808 buf->base = create_tmp_var (build_pointer_type (elttype));
4809 tree end = make_ssa_name (TREE_TYPE (buf->base));
4810 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4811 base, size_int (off));
4812 gimple_set_location (g, buf->loc);
4813 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4814 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4815 size_int (sz));
4816 gimple_set_location (g, buf->loc);
4817 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4818 buf->sz = fldsz;
4819 buf->align = TYPE_ALIGN (elttype);
4820 buf->off = 0;
4821 buf->size = 0;
4822 clear_padding_emit_loop (buf, elttype, end);
4823 buf->base = base;
4824 buf->sz = prev_sz;
4825 buf->align = prev_align;
4826 buf->size = off % UNITS_PER_WORD;
4827 buf->off = off - buf->size;
4828 memset (buf->buf, 0, buf->size);
4829 break;
4830 }
4831 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4832 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4833 break;
4834 case UNION_TYPE:
4835 clear_padding_union (buf, type, sz);
4836 break;
4837 case REAL_TYPE:
4838 gcc_assert ((size_t) sz <= clear_padding_unit);
4839 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4840 clear_padding_flush (buf, false);
4841 if (clear_padding_real_needs_padding_p (type))
4842 {
4843 /* Use native_interpret_expr + native_encode_expr to figure out
4844 which bits are padding. */
4845 memset (buf->buf + buf->size, ~0, sz);
4846 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4847 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4848 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4849 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4850 for (size_t i = 0; i < (size_t) sz; i++)
4851 buf->buf[buf->size + i] ^= ~0;
4852 }
4853 else
4854 memset (buf->buf + buf->size, 0, sz);
4855 buf->size += sz;
4856 break;
4857 case COMPLEX_TYPE:
4858 fldsz = int_size_in_bytes (TREE_TYPE (type));
4859 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4860 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4861 break;
4862 case VECTOR_TYPE:
4863 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4864 fldsz = int_size_in_bytes (TREE_TYPE (type));
4865 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4866 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4867 break;
4868 case NULLPTR_TYPE:
4869 gcc_assert ((size_t) sz <= clear_padding_unit);
4870 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4871 clear_padding_flush (buf, false);
4872 memset (buf->buf + buf->size, ~0, sz);
4873 buf->size += sz;
4874 break;
4875 default:
4876 gcc_assert ((size_t) sz <= clear_padding_unit);
4877 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4878 clear_padding_flush (buf, false);
4879 memset (buf->buf + buf->size, 0, sz);
4880 buf->size += sz;
4881 break;
4882 }
4883}
4884
896048cf
JJ
4885/* Clear padding bits of TYPE in MASK. */
4886
4887void
4888clear_type_padding_in_mask (tree type, unsigned char *mask)
4889{
4890 clear_padding_struct buf;
4891 buf.loc = UNKNOWN_LOCATION;
4892 buf.clear_in_mask = true;
4893 buf.base = NULL_TREE;
4894 buf.alias_type = NULL_TREE;
4895 buf.gsi = NULL;
4896 buf.align = 0;
4897 buf.off = 0;
4898 buf.padding_bytes = 0;
4899 buf.sz = int_size_in_bytes (type);
4900 buf.size = 0;
4901 buf.union_ptr = mask;
4902 clear_padding_type (&buf, type, buf.sz);
4903 clear_padding_flush (&buf, true);
4904}
4905
1bea0d0a
JJ
4906/* Fold __builtin_clear_padding builtin. */
4907
4908static bool
4909gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4910{
4911 gimple *stmt = gsi_stmt (*gsi);
4912 gcc_assert (gimple_call_num_args (stmt) == 2);
4913 tree ptr = gimple_call_arg (stmt, 0);
4914 tree typearg = gimple_call_arg (stmt, 1);
4915 tree type = TREE_TYPE (TREE_TYPE (typearg));
4916 location_t loc = gimple_location (stmt);
4917 clear_padding_struct buf;
4918 gimple_stmt_iterator gsiprev = *gsi;
4919 /* This should be folded during the lower pass. */
4920 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4921 gcc_assert (COMPLETE_TYPE_P (type));
4922 gsi_prev (&gsiprev);
4923
4924 buf.loc = loc;
896048cf 4925 buf.clear_in_mask = false;
1bea0d0a
JJ
4926 buf.base = ptr;
4927 buf.alias_type = NULL_TREE;
4928 buf.gsi = gsi;
4929 buf.align = get_pointer_alignment (ptr);
4930 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4931 buf.align = MAX (buf.align, talign);
4932 buf.off = 0;
4933 buf.padding_bytes = 0;
4934 buf.size = 0;
4935 buf.sz = int_size_in_bytes (type);
4936 buf.union_ptr = NULL;
4937 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4938 sorry_at (loc, "%s not supported for variable length aggregates",
4939 "__builtin_clear_padding");
4940 /* The implementation currently assumes 8-bit host and target
4941 chars which is the case for all currently supported targets
4942 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4943 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4944 sorry_at (loc, "%s not supported on this target",
4945 "__builtin_clear_padding");
4946 else if (!clear_padding_type_may_have_padding_p (type))
4947 ;
4948 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4949 {
4950 tree sz = TYPE_SIZE_UNIT (type);
4951 tree elttype = type;
4952 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4953 while (TREE_CODE (elttype) == ARRAY_TYPE
4954 && int_size_in_bytes (elttype) < 0)
4955 elttype = TREE_TYPE (elttype);
4956 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4957 gcc_assert (eltsz >= 0);
4958 if (eltsz)
4959 {
4960 buf.base = create_tmp_var (build_pointer_type (elttype));
4961 tree end = make_ssa_name (TREE_TYPE (buf.base));
4962 gimple *g = gimple_build_assign (buf.base, ptr);
4963 gimple_set_location (g, loc);
4964 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4965 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4966 gimple_set_location (g, loc);
4967 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4968 buf.sz = eltsz;
4969 buf.align = TYPE_ALIGN (elttype);
4970 buf.alias_type = build_pointer_type (elttype);
4971 clear_padding_emit_loop (&buf, elttype, end);
4972 }
4973 }
4974 else
4975 {
4976 if (!is_gimple_mem_ref_addr (buf.base))
4977 {
4978 buf.base = make_ssa_name (TREE_TYPE (ptr));
4979 gimple *g = gimple_build_assign (buf.base, ptr);
4980 gimple_set_location (g, loc);
4981 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4982 }
4983 buf.alias_type = build_pointer_type (type);
4984 clear_padding_type (&buf, type, buf.sz);
4985 clear_padding_flush (&buf, true);
4986 }
4987
4988 gimple_stmt_iterator gsiprev2 = *gsi;
4989 gsi_prev (&gsiprev2);
4990 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4991 gsi_replace (gsi, gimple_build_nop (), true);
4992 else
4993 {
4994 gsi_remove (gsi, true);
4995 *gsi = gsiprev2;
4996 }
4997 return true;
4998}
4999
dcb7fae2
RB
5000/* Fold the non-target builtin at *GSI and return whether any simplification
5001 was made. */
cbdd87d4 5002
fef5a0d9 5003static bool
dcb7fae2 5004gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 5005{
538dd0b7 5006 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 5007 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 5008
dcb7fae2
RB
5009 /* Give up for always_inline inline builtins until they are
5010 inlined. */
5011 if (avoid_folding_inline_builtin (callee))
5012 return false;
cbdd87d4 5013
edd7ae68
RB
5014 unsigned n = gimple_call_num_args (stmt);
5015 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5016 switch (fcode)
cbdd87d4 5017 {
b3d8d88e
MS
5018 case BUILT_IN_BCMP:
5019 return gimple_fold_builtin_bcmp (gsi);
5020 case BUILT_IN_BCOPY:
5021 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 5022 case BUILT_IN_BZERO:
b3d8d88e
MS
5023 return gimple_fold_builtin_bzero (gsi);
5024
dcb7fae2
RB
5025 case BUILT_IN_MEMSET:
5026 return gimple_fold_builtin_memset (gsi,
5027 gimple_call_arg (stmt, 1),
5028 gimple_call_arg (stmt, 2));
dcb7fae2 5029 case BUILT_IN_MEMCPY:
dcb7fae2 5030 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
5031 case BUILT_IN_MEMMOVE:
5032 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 5033 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
5034 case BUILT_IN_SPRINTF_CHK:
5035 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 5036 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
5037 case BUILT_IN_STRCAT_CHK:
5038 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
5039 case BUILT_IN_STRNCAT_CHK:
5040 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 5041 case BUILT_IN_STRLEN:
dcb7fae2 5042 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 5043 case BUILT_IN_STRCPY:
dcb7fae2 5044 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 5045 gimple_call_arg (stmt, 0),
dcb7fae2 5046 gimple_call_arg (stmt, 1));
cbdd87d4 5047 case BUILT_IN_STRNCPY:
dcb7fae2 5048 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
5049 gimple_call_arg (stmt, 0),
5050 gimple_call_arg (stmt, 1),
dcb7fae2 5051 gimple_call_arg (stmt, 2));
9a7eefec 5052 case BUILT_IN_STRCAT:
dcb7fae2
RB
5053 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5054 gimple_call_arg (stmt, 1));
ad03a744
RB
5055 case BUILT_IN_STRNCAT:
5056 return gimple_fold_builtin_strncat (gsi);
71dea1dd 5057 case BUILT_IN_INDEX:
912d9ec3 5058 case BUILT_IN_STRCHR:
71dea1dd
WD
5059 return gimple_fold_builtin_strchr (gsi, false);
5060 case BUILT_IN_RINDEX:
5061 case BUILT_IN_STRRCHR:
5062 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
5063 case BUILT_IN_STRSTR:
5064 return gimple_fold_builtin_strstr (gsi);
a918bfbf 5065 case BUILT_IN_STRCMP:
8b0b334a 5066 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
5067 case BUILT_IN_STRCASECMP:
5068 case BUILT_IN_STRNCMP:
8b0b334a 5069 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
5070 case BUILT_IN_STRNCASECMP:
5071 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
5072 case BUILT_IN_MEMCHR:
5073 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 5074 case BUILT_IN_FPUTS:
dcb7fae2
RB
5075 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5076 gimple_call_arg (stmt, 1), false);
cbdd87d4 5077 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
5078 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5079 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
5080 case BUILT_IN_MEMCPY_CHK:
5081 case BUILT_IN_MEMPCPY_CHK:
5082 case BUILT_IN_MEMMOVE_CHK:
5083 case BUILT_IN_MEMSET_CHK:
dcb7fae2 5084 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
5085 gimple_call_arg (stmt, 0),
5086 gimple_call_arg (stmt, 1),
5087 gimple_call_arg (stmt, 2),
5088 gimple_call_arg (stmt, 3),
edd7ae68 5089 fcode);
2625bb5d
RB
5090 case BUILT_IN_STPCPY:
5091 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
5092 case BUILT_IN_STRCPY_CHK:
5093 case BUILT_IN_STPCPY_CHK:
dcb7fae2 5094 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
5095 gimple_call_arg (stmt, 0),
5096 gimple_call_arg (stmt, 1),
5097 gimple_call_arg (stmt, 2),
edd7ae68 5098 fcode);
cbdd87d4 5099 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 5100 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
5101 return gimple_fold_builtin_stxncpy_chk (gsi,
5102 gimple_call_arg (stmt, 0),
5103 gimple_call_arg (stmt, 1),
5104 gimple_call_arg (stmt, 2),
5105 gimple_call_arg (stmt, 3),
edd7ae68 5106 fcode);
cbdd87d4
RG
5107 case BUILT_IN_SNPRINTF_CHK:
5108 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 5109 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 5110
edd7ae68
RB
5111 case BUILT_IN_FPRINTF:
5112 case BUILT_IN_FPRINTF_UNLOCKED:
5113 case BUILT_IN_VFPRINTF:
5114 if (n == 2 || n == 3)
5115 return gimple_fold_builtin_fprintf (gsi,
5116 gimple_call_arg (stmt, 0),
5117 gimple_call_arg (stmt, 1),
5118 n == 3
5119 ? gimple_call_arg (stmt, 2)
5120 : NULL_TREE,
5121 fcode);
5122 break;
5123 case BUILT_IN_FPRINTF_CHK:
5124 case BUILT_IN_VFPRINTF_CHK:
5125 if (n == 3 || n == 4)
5126 return gimple_fold_builtin_fprintf (gsi,
5127 gimple_call_arg (stmt, 0),
5128 gimple_call_arg (stmt, 2),
5129 n == 4
5130 ? gimple_call_arg (stmt, 3)
5131 : NULL_TREE,
5132 fcode);
5133 break;
ad03a744
RB
5134 case BUILT_IN_PRINTF:
5135 case BUILT_IN_PRINTF_UNLOCKED:
5136 case BUILT_IN_VPRINTF:
5137 if (n == 1 || n == 2)
5138 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5139 n == 2
5140 ? gimple_call_arg (stmt, 1)
5141 : NULL_TREE, fcode);
5142 break;
5143 case BUILT_IN_PRINTF_CHK:
5144 case BUILT_IN_VPRINTF_CHK:
5145 if (n == 2 || n == 3)
5146 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5147 n == 3
5148 ? gimple_call_arg (stmt, 2)
5149 : NULL_TREE, fcode);
242a37f1 5150 break;
48126138
NS
5151 case BUILT_IN_ACC_ON_DEVICE:
5152 return gimple_fold_builtin_acc_on_device (gsi,
5153 gimple_call_arg (stmt, 0));
fe75f732
PK
5154 case BUILT_IN_REALLOC:
5155 return gimple_fold_builtin_realloc (gsi);
5156
1bea0d0a
JJ
5157 case BUILT_IN_CLEAR_PADDING:
5158 return gimple_fold_builtin_clear_padding (gsi);
5159
fef5a0d9
RB
5160 default:;
5161 }
5162
5163 /* Try the generic builtin folder. */
5164 bool ignore = (gimple_call_lhs (stmt) == NULL);
5165 tree result = fold_call_stmt (stmt, ignore);
5166 if (result)
5167 {
5168 if (ignore)
5169 STRIP_NOPS (result);
5170 else
5171 result = fold_convert (gimple_call_return_type (stmt), result);
52a5515e 5172 gimplify_and_update_call_from_tree (gsi, result);
fef5a0d9
RB
5173 return true;
5174 }
5175
5176 return false;
5177}
5178
451e8dae
NS
5179/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5180 function calls to constants, where possible. */
5181
5182static tree
5183fold_internal_goacc_dim (const gimple *call)
5184{
629b3d75
MJ
5185 int axis = oacc_get_ifn_dim_arg (call);
5186 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 5187 tree result = NULL_TREE;
67d2229e 5188 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 5189
67d2229e 5190 switch (gimple_call_internal_fn (call))
451e8dae 5191 {
67d2229e
TV
5192 case IFN_GOACC_DIM_POS:
5193 /* If the size is 1, we know the answer. */
5194 if (size == 1)
5195 result = build_int_cst (type, 0);
5196 break;
5197 case IFN_GOACC_DIM_SIZE:
5198 /* If the size is not dynamic, we know the answer. */
5199 if (size)
5200 result = build_int_cst (type, size);
5201 break;
5202 default:
5203 break;
451e8dae
NS
5204 }
5205
5206 return result;
5207}
5208
849a76a5
JJ
5209/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5210 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5211 &var where var is only addressable because of such calls. */
5212
5213bool
5214optimize_atomic_compare_exchange_p (gimple *stmt)
5215{
5216 if (gimple_call_num_args (stmt) != 6
5217 || !flag_inline_atomics
5218 || !optimize
45b2222a 5219 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
5220 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5221 || !gimple_vdef (stmt)
5222 || !gimple_vuse (stmt))
5223 return false;
5224
5225 tree fndecl = gimple_call_fndecl (stmt);
5226 switch (DECL_FUNCTION_CODE (fndecl))
5227 {
5228 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5229 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5230 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5231 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5232 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5233 break;
5234 default:
5235 return false;
5236 }
5237
5238 tree expected = gimple_call_arg (stmt, 1);
5239 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
5240 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5241 return false;
5242
5243 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5244 if (!is_gimple_reg_type (etype)
849a76a5 5245 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
5246 || TREE_THIS_VOLATILE (etype)
5247 || VECTOR_TYPE_P (etype)
5248 || TREE_CODE (etype) == COMPLEX_TYPE
5249 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5250 might not preserve all the bits. See PR71716. */
5251 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
5252 || maybe_ne (TYPE_PRECISION (etype),
5253 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
5254 return false;
5255
5256 tree weak = gimple_call_arg (stmt, 3);
5257 if (!integer_zerop (weak) && !integer_onep (weak))
5258 return false;
5259
5260 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5261 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5262 machine_mode mode = TYPE_MODE (itype);
5263
5264 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5265 == CODE_FOR_nothing
5266 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5267 return false;
5268
cf098191 5269 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
5270 return false;
5271
5272 return true;
5273}
5274
5275/* Fold
5276 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5277 into
5278 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5279 i = IMAGPART_EXPR <t>;
5280 r = (_Bool) i;
5281 e = REALPART_EXPR <t>; */
5282
5283void
5284fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5285{
5286 gimple *stmt = gsi_stmt (*gsi);
5287 tree fndecl = gimple_call_fndecl (stmt);
5288 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5289 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5290 tree ctype = build_complex_type (itype);
5291 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
5292 bool throws = false;
5293 edge e = NULL;
849a76a5
JJ
5294 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5295 expected);
5296 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5297 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5298 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5299 {
5300 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5301 build1 (VIEW_CONVERT_EXPR, itype,
5302 gimple_assign_lhs (g)));
5303 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5304 }
5305 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5306 + int_size_in_bytes (itype);
5307 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5308 gimple_call_arg (stmt, 0),
5309 gimple_assign_lhs (g),
5310 gimple_call_arg (stmt, 2),
5311 build_int_cst (integer_type_node, flag),
5312 gimple_call_arg (stmt, 4),
5313 gimple_call_arg (stmt, 5));
5314 tree lhs = make_ssa_name (ctype);
5315 gimple_call_set_lhs (g, lhs);
779724a5 5316 gimple_move_vops (g, stmt);
cc195d46 5317 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 5318 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
5319 {
5320 throws = true;
5321 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5322 }
5323 gimple_call_set_nothrow (as_a <gcall *> (g),
5324 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5325 gimple_call_set_lhs (stmt, NULL_TREE);
5326 gsi_replace (gsi, g, true);
5327 if (oldlhs)
849a76a5 5328 {
849a76a5
JJ
5329 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5330 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
5331 if (throws)
5332 {
5333 gsi_insert_on_edge_immediate (e, g);
5334 *gsi = gsi_for_stmt (g);
5335 }
5336 else
5337 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5338 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5339 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 5340 }
849a76a5
JJ
5341 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5342 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
5343 if (throws && oldlhs == NULL_TREE)
5344 {
5345 gsi_insert_on_edge_immediate (e, g);
5346 *gsi = gsi_for_stmt (g);
5347 }
5348 else
5349 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
5350 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5351 {
5352 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5353 VIEW_CONVERT_EXPR,
5354 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5355 gimple_assign_lhs (g)));
5356 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5357 }
5358 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5359 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5360 *gsi = gsiret;
5361}
5362
1304953e
JJ
5363/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5364 doesn't fit into TYPE. The test for overflow should be regardless of
5365 -fwrapv, and even for unsigned types. */
5366
5367bool
5368arith_overflowed_p (enum tree_code code, const_tree type,
5369 const_tree arg0, const_tree arg1)
5370{
1304953e
JJ
5371 widest2_int warg0 = widest2_int_cst (arg0);
5372 widest2_int warg1 = widest2_int_cst (arg1);
5373 widest2_int wres;
5374 switch (code)
5375 {
5376 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5377 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5378 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5379 default: gcc_unreachable ();
5380 }
5381 signop sign = TYPE_SIGN (type);
5382 if (sign == UNSIGNED && wi::neg_p (wres))
5383 return true;
5384 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5385}
5386
868363d4
RS
5387/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5388 for the memory it references, otherwise return null. VECTYPE is the
5389 type of the memory vector. */
5390
5391static tree
5392gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5393{
5394 tree ptr = gimple_call_arg (call, 0);
5395 tree alias_align = gimple_call_arg (call, 1);
5396 tree mask = gimple_call_arg (call, 2);
5397 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5398 return NULL_TREE;
5399
aa204d51 5400 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
868363d4
RS
5401 if (TYPE_ALIGN (vectype) != align)
5402 vectype = build_aligned_type (vectype, align);
5403 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5404 return fold_build2 (MEM_REF, vectype, ptr, offset);
5405}
5406
5407/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5408
5409static bool
5410gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5411{
5412 tree lhs = gimple_call_lhs (call);
5413 if (!lhs)
5414 return false;
5415
5416 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5417 {
5418 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5419 gimple_set_location (new_stmt, gimple_location (call));
5420 gimple_move_vops (new_stmt, call);
5421 gsi_replace (gsi, new_stmt, false);
5422 return true;
5423 }
5424 return false;
5425}
5426
5427/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5428
5429static bool
5430gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5431{
5432 tree rhs = gimple_call_arg (call, 3);
5433 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5434 {
5435 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5436 gimple_set_location (new_stmt, gimple_location (call));
5437 gimple_move_vops (new_stmt, call);
5438 gsi_replace (gsi, new_stmt, false);
5439 return true;
5440 }
5441 return false;
5442}
5443
cbdd87d4
RG
5444/* Attempt to fold a call statement referenced by the statement iterator GSI.
5445 The statement may be replaced by another statement, e.g., if the call
5446 simplifies to a constant value. Return true if any changes were made.
5447 It is assumed that the operands have been previously folded. */
5448
e021c122 5449static bool
ceeffab0 5450gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 5451{
538dd0b7 5452 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 5453 tree callee;
e021c122 5454 bool changed = false;
3b45a007
RG
5455
5456 /* Check for virtual calls that became direct calls. */
5457 callee = gimple_call_fn (stmt);
25583c4f 5458 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 5459 {
49c471e3
MJ
5460 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5461 {
450ad0cd
JH
5462 if (dump_file && virtual_method_call_p (callee)
5463 && !possible_polymorphic_call_target_p
6f8091fc
JH
5464 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5465 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
5466 {
5467 fprintf (dump_file,
a70e9985 5468 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
5469 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5470 fprintf (dump_file, " to ");
5471 print_generic_expr (dump_file, callee, TDF_SLIM);
5472 fprintf (dump_file, "\n");
5473 }
5474
49c471e3 5475 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
5476 changed = true;
5477 }
a70e9985 5478 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 5479 {
61dd6a2e
JH
5480 bool final;
5481 vec <cgraph_node *>targets
058d0a90 5482 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 5483 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 5484 {
a70e9985 5485 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
5486 if (dump_enabled_p ())
5487 {
4f5b9c80 5488 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
5489 "folding virtual function call to %s\n",
5490 targets.length () == 1
5491 ? targets[0]->name ()
5492 : "__builtin_unreachable");
5493 }
61dd6a2e 5494 if (targets.length () == 1)
cf3e5a89 5495 {
18954840
JJ
5496 tree fndecl = targets[0]->decl;
5497 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 5498 changed = true;
18954840
JJ
5499 /* If changing the call to __cxa_pure_virtual
5500 or similar noreturn function, adjust gimple_call_fntype
5501 too. */
865f7046 5502 if (gimple_call_noreturn_p (stmt)
18954840
JJ
5503 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5504 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5505 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5506 == void_type_node))
5507 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 5508 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
5509 if (lhs
5510 && gimple_call_noreturn_p (stmt)
18954840 5511 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 5512 || should_remove_lhs_p (lhs)))
a70e9985
JJ
5513 {
5514 if (TREE_CODE (lhs) == SSA_NAME)
5515 {
b731b390 5516 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 5517 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 5518 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
5519 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5520 }
5521 gimple_call_set_lhs (stmt, NULL_TREE);
5522 }
0b986c6a 5523 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 5524 }
a70e9985 5525 else
cf3e5a89
JJ
5526 {
5527 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 5528 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 5529 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
5530 /* If the call had a SSA name as lhs morph that into
5531 an uninitialized value. */
a70e9985
JJ
5532 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5533 {
b731b390 5534 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
5535 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5536 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5537 set_ssa_default_def (cfun, var, lhs);
42e52a51 5538 }
779724a5 5539 gimple_move_vops (new_stmt, stmt);
2da6996c 5540 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
5541 return true;
5542 }
e021c122 5543 }
49c471e3 5544 }
e021c122 5545 }
49c471e3 5546
f2d3d07e
RH
5547 /* Check for indirect calls that became direct calls, and then
5548 no longer require a static chain. */
5549 if (gimple_call_chain (stmt))
5550 {
5551 tree fn = gimple_call_fndecl (stmt);
5552 if (fn && !DECL_STATIC_CHAIN (fn))
5553 {
5554 gimple_call_set_chain (stmt, NULL);
5555 changed = true;
5556 }
f2d3d07e
RH
5557 }
5558
e021c122
RG
5559 if (inplace)
5560 return changed;
5561
5562 /* Check for builtins that CCP can handle using information not
5563 available in the generic fold routines. */
fef5a0d9
RB
5564 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5565 {
5566 if (gimple_fold_builtin (gsi))
5567 changed = true;
5568 }
5569 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 5570 {
ea679d55 5571 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 5572 }
368b454d 5573 else if (gimple_call_internal_p (stmt))
ed9c79e1 5574 {
368b454d
JJ
5575 enum tree_code subcode = ERROR_MARK;
5576 tree result = NULL_TREE;
1304953e
JJ
5577 bool cplx_result = false;
5578 tree overflow = NULL_TREE;
368b454d
JJ
5579 switch (gimple_call_internal_fn (stmt))
5580 {
5581 case IFN_BUILTIN_EXPECT:
5582 result = fold_builtin_expect (gimple_location (stmt),
5583 gimple_call_arg (stmt, 0),
5584 gimple_call_arg (stmt, 1),
1e9168b2
ML
5585 gimple_call_arg (stmt, 2),
5586 NULL_TREE);
368b454d 5587 break;
0e82f089 5588 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
5589 {
5590 tree offset = gimple_call_arg (stmt, 1);
5591 tree objsize = gimple_call_arg (stmt, 2);
5592 if (integer_all_onesp (objsize)
5593 || (TREE_CODE (offset) == INTEGER_CST
5594 && TREE_CODE (objsize) == INTEGER_CST
5595 && tree_int_cst_le (offset, objsize)))
5596 {
5597 replace_call_with_value (gsi, NULL_TREE);
5598 return true;
5599 }
5600 }
5601 break;
5602 case IFN_UBSAN_PTR:
5603 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 5604 {
ca1150f0 5605 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
5606 return true;
5607 }
5608 break;
ca1150f0
JJ
5609 case IFN_UBSAN_BOUNDS:
5610 {
5611 tree index = gimple_call_arg (stmt, 1);
5612 tree bound = gimple_call_arg (stmt, 2);
5613 if (TREE_CODE (index) == INTEGER_CST
5614 && TREE_CODE (bound) == INTEGER_CST)
5615 {
5616 index = fold_convert (TREE_TYPE (bound), index);
5617 if (TREE_CODE (index) == INTEGER_CST
5618 && tree_int_cst_le (index, bound))
5619 {
5620 replace_call_with_value (gsi, NULL_TREE);
5621 return true;
5622 }
5623 }
5624 }
5625 break;
451e8dae
NS
5626 case IFN_GOACC_DIM_SIZE:
5627 case IFN_GOACC_DIM_POS:
5628 result = fold_internal_goacc_dim (stmt);
5629 break;
368b454d
JJ
5630 case IFN_UBSAN_CHECK_ADD:
5631 subcode = PLUS_EXPR;
5632 break;
5633 case IFN_UBSAN_CHECK_SUB:
5634 subcode = MINUS_EXPR;
5635 break;
5636 case IFN_UBSAN_CHECK_MUL:
5637 subcode = MULT_EXPR;
5638 break;
1304953e
JJ
5639 case IFN_ADD_OVERFLOW:
5640 subcode = PLUS_EXPR;
5641 cplx_result = true;
5642 break;
5643 case IFN_SUB_OVERFLOW:
5644 subcode = MINUS_EXPR;
5645 cplx_result = true;
5646 break;
5647 case IFN_MUL_OVERFLOW:
5648 subcode = MULT_EXPR;
5649 cplx_result = true;
5650 break;
868363d4
RS
5651 case IFN_MASK_LOAD:
5652 changed |= gimple_fold_mask_load (gsi, stmt);
5653 break;
5654 case IFN_MASK_STORE:
5655 changed |= gimple_fold_mask_store (gsi, stmt);
5656 break;
368b454d
JJ
5657 default:
5658 break;
5659 }
5660 if (subcode != ERROR_MARK)
5661 {
5662 tree arg0 = gimple_call_arg (stmt, 0);
5663 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
5664 tree type = TREE_TYPE (arg0);
5665 if (cplx_result)
5666 {
5667 tree lhs = gimple_call_lhs (stmt);
5668 if (lhs == NULL_TREE)
5669 type = NULL_TREE;
5670 else
5671 type = TREE_TYPE (TREE_TYPE (lhs));
5672 }
5673 if (type == NULL_TREE)
5674 ;
368b454d 5675 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
5676 else if (integer_zerop (arg1))
5677 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
5678 /* x = 0 + y; x = 0 * y; */
5679 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 5680 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
5681 /* x = y - y; */
5682 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 5683 result = integer_zero_node;
368b454d 5684 /* x = y * 1; x = 1 * y; */
1304953e
JJ
5685 else if (subcode == MULT_EXPR && integer_onep (arg1))
5686 result = arg0;
5687 else if (subcode == MULT_EXPR && integer_onep (arg0))
5688 result = arg1;
5689 else if (TREE_CODE (arg0) == INTEGER_CST
5690 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 5691 {
1304953e
JJ
5692 if (cplx_result)
5693 result = int_const_binop (subcode, fold_convert (type, arg0),
5694 fold_convert (type, arg1));
5695 else
5696 result = int_const_binop (subcode, arg0, arg1);
5697 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5698 {
5699 if (cplx_result)
5700 overflow = build_one_cst (type);
5701 else
5702 result = NULL_TREE;
5703 }
5704 }
5705 if (result)
5706 {
5707 if (result == integer_zero_node)
5708 result = build_zero_cst (type);
5709 else if (cplx_result && TREE_TYPE (result) != type)
5710 {
5711 if (TREE_CODE (result) == INTEGER_CST)
5712 {
5713 if (arith_overflowed_p (PLUS_EXPR, type, result,
5714 integer_zero_node))
5715 overflow = build_one_cst (type);
5716 }
5717 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5718 && TYPE_UNSIGNED (type))
5719 || (TYPE_PRECISION (type)
5720 < (TYPE_PRECISION (TREE_TYPE (result))
5721 + (TYPE_UNSIGNED (TREE_TYPE (result))
5722 && !TYPE_UNSIGNED (type)))))
5723 result = NULL_TREE;
5724 if (result)
5725 result = fold_convert (type, result);
5726 }
368b454d
JJ
5727 }
5728 }
1304953e 5729
ed9c79e1
JJ
5730 if (result)
5731 {
1304953e
JJ
5732 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5733 result = drop_tree_overflow (result);
5734 if (cplx_result)
5735 {
5736 if (overflow == NULL_TREE)
5737 overflow = build_zero_cst (TREE_TYPE (result));
5738 tree ctype = build_complex_type (TREE_TYPE (result));
5739 if (TREE_CODE (result) == INTEGER_CST
5740 && TREE_CODE (overflow) == INTEGER_CST)
5741 result = build_complex (ctype, result, overflow);
5742 else
5743 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5744 ctype, result, overflow);
5745 }
52a5515e 5746 gimplify_and_update_call_from_tree (gsi, result);
ed9c79e1
JJ
5747 changed = true;
5748 }
5749 }
3b45a007 5750
e021c122 5751 return changed;
cbdd87d4
RG
5752}
5753
e0ee10ed 5754
89a79e96
RB
5755/* Return true whether NAME has a use on STMT. */
5756
5757static bool
355fe088 5758has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
5759{
5760 imm_use_iterator iter;
5761 use_operand_p use_p;
5762 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5763 if (USE_STMT (use_p) == stmt)
5764 return true;
5765 return false;
5766}
5767
e0ee10ed
RB
5768/* Worker for fold_stmt_1 dispatch to pattern based folding with
5769 gimple_simplify.
5770
5771 Replaces *GSI with the simplification result in RCODE and OPS
5772 and the associated statements in *SEQ. Does the replacement
5773 according to INPLACE and returns true if the operation succeeded. */
5774
5775static bool
5776replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 5777 gimple_match_op *res_op,
e0ee10ed
RB
5778 gimple_seq *seq, bool inplace)
5779{
355fe088 5780 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
5781 tree *ops = res_op->ops;
5782 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
5783
5784 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
5785 newly created statements. See also maybe_push_res_to_seq.
5786 As an exception allow such uses if there was a use of the
5787 same SSA name on the old stmt. */
5d75ad95
RS
5788 for (unsigned int i = 0; i < num_ops; ++i)
5789 if (TREE_CODE (ops[i]) == SSA_NAME
5790 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5791 && !has_use_on_stmt (ops[i], stmt))
5792 return false;
5793
5794 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5795 for (unsigned int i = 0; i < 2; ++i)
5796 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5797 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5798 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5799 return false;
e0ee10ed 5800
fec40d06
RS
5801 /* Don't insert new statements when INPLACE is true, even if we could
5802 reuse STMT for the final statement. */
5803 if (inplace && !gimple_seq_empty_p (*seq))
5804 return false;
5805
538dd0b7 5806 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 5807 {
5d75ad95
RS
5808 gcc_assert (res_op->code.is_tree_code ());
5809 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
5810 /* GIMPLE_CONDs condition may not throw. */
5811 && (!flag_exceptions
5812 || !cfun->can_throw_non_call_exceptions
5d75ad95 5813 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
5814 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5815 false, NULL_TREE)))
5d75ad95
RS
5816 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5817 else if (res_op->code == SSA_NAME)
538dd0b7 5818 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 5819 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 5820 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
5821 {
5822 if (integer_zerop (ops[0]))
538dd0b7 5823 gimple_cond_make_false (cond_stmt);
e0ee10ed 5824 else
538dd0b7 5825 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
5826 }
5827 else if (!inplace)
5828 {
5d75ad95 5829 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
5830 if (!res)
5831 return false;
538dd0b7 5832 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
5833 build_zero_cst (TREE_TYPE (res)));
5834 }
5835 else
5836 return false;
5837 if (dump_file && (dump_flags & TDF_DETAILS))
5838 {
5839 fprintf (dump_file, "gimple_simplified to ");
5840 if (!gimple_seq_empty_p (*seq))
5841 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5842 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5843 0, TDF_SLIM);
5844 }
5845 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5846 return true;
5847 }
5848 else if (is_gimple_assign (stmt)
5d75ad95 5849 && res_op->code.is_tree_code ())
e0ee10ed
RB
5850 {
5851 if (!inplace
5d75ad95 5852 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 5853 {
5d75ad95
RS
5854 maybe_build_generic_op (res_op);
5855 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5856 res_op->op_or_null (0),
5857 res_op->op_or_null (1),
5858 res_op->op_or_null (2));
e0ee10ed
RB
5859 if (dump_file && (dump_flags & TDF_DETAILS))
5860 {
5861 fprintf (dump_file, "gimple_simplified to ");
5862 if (!gimple_seq_empty_p (*seq))
5863 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5864 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5865 0, TDF_SLIM);
5866 }
5867 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5868 return true;
5869 }
5870 }
5d75ad95
RS
5871 else if (res_op->code.is_fn_code ()
5872 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 5873 {
5d75ad95
RS
5874 gcc_assert (num_ops == gimple_call_num_args (stmt));
5875 for (unsigned int i = 0; i < num_ops; ++i)
5876 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
5877 if (dump_file && (dump_flags & TDF_DETAILS))
5878 {
5879 fprintf (dump_file, "gimple_simplified to ");
5880 if (!gimple_seq_empty_p (*seq))
5881 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5882 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5883 }
5884 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
5885 return true;
5886 }
e0ee10ed
RB
5887 else if (!inplace)
5888 {
5889 if (gimple_has_lhs (stmt))
5890 {
5891 tree lhs = gimple_get_lhs (stmt);
5d75ad95 5892 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 5893 return false;
e0ee10ed
RB
5894 if (dump_file && (dump_flags & TDF_DETAILS))
5895 {
5896 fprintf (dump_file, "gimple_simplified to ");
5897 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5898 }
5899 gsi_replace_with_seq_vops (gsi, *seq);
5900 return true;
5901 }
5902 else
5903 gcc_unreachable ();
5904 }
5905
5906 return false;
5907}
5908
040292e7
RB
5909/* Canonicalize MEM_REFs invariant address operand after propagation. */
5910
5911static bool
fabe0ede 5912maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
040292e7
RB
5913{
5914 bool res = false;
fe8c8f1e 5915 tree *orig_t = t;
040292e7
RB
5916
5917 if (TREE_CODE (*t) == ADDR_EXPR)
5918 t = &TREE_OPERAND (*t, 0);
5919
f17a223d
RB
5920 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5921 generic vector extension. The actual vector referenced is
5922 view-converted to an array type for this purpose. If the index
5923 is constant the canonical representation in the middle-end is a
5924 BIT_FIELD_REF so re-write the former to the latter here. */
5925 if (TREE_CODE (*t) == ARRAY_REF
5926 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5927 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5928 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5929 {
5930 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5931 if (VECTOR_TYPE_P (vtype))
5932 {
5933 tree low = array_ref_low_bound (*t);
5934 if (TREE_CODE (low) == INTEGER_CST)
5935 {
5936 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5937 {
5938 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5939 wi::to_widest (low));
5940 idx = wi::mul (idx, wi::to_widest
5941 (TYPE_SIZE (TREE_TYPE (*t))));
5942 widest_int ext
5943 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5944 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5945 {
5946 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5947 TREE_TYPE (*t),
5948 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5949 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 5950 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
5951 res = true;
5952 }
5953 }
5954 }
5955 }
5956 }
5957
040292e7
RB
5958 while (handled_component_p (*t))
5959 t = &TREE_OPERAND (*t, 0);
5960
5961 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5962 of invariant addresses into a SSA name MEM_REF address. */
5963 if (TREE_CODE (*t) == MEM_REF
5964 || TREE_CODE (*t) == TARGET_MEM_REF)
5965 {
5966 tree addr = TREE_OPERAND (*t, 0);
5967 if (TREE_CODE (addr) == ADDR_EXPR
5968 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5969 || handled_component_p (TREE_OPERAND (addr, 0))))
5970 {
5971 tree base;
a90c8804 5972 poly_int64 coffset;
040292e7
RB
5973 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5974 &coffset);
5975 if (!base)
fabe0ede
JJ
5976 {
5977 if (is_debug)
5978 return false;
5979 gcc_unreachable ();
5980 }
040292e7
RB
5981
5982 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5983 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5984 TREE_OPERAND (*t, 1),
5985 size_int (coffset));
5986 res = true;
5987 }
5988 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5989 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5990 }
5991
5992 /* Canonicalize back MEM_REFs to plain reference trees if the object
5993 accessed is a decl that has the same access semantics as the MEM_REF. */
5994 if (TREE_CODE (*t) == MEM_REF
5995 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
5996 && integer_zerop (TREE_OPERAND (*t, 1))
5997 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
5998 {
5999 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6000 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6001 if (/* Same volatile qualification. */
6002 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6003 /* Same TBAA behavior with -fstrict-aliasing. */
6004 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6005 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6006 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6007 /* Same alignment. */
6008 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6009 /* We have to look out here to not drop a required conversion
6010 from the rhs to the lhs if *t appears on the lhs or vice-versa
6011 if it appears on the rhs. Thus require strict type
6012 compatibility. */
6013 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6014 {
6015 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6016 res = true;
6017 }
6018 }
6019
fe8c8f1e
RB
6020 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6021 && TREE_CODE (*t) == MEM_REF
6022 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6023 {
6024 tree base;
6025 poly_int64 coffset;
6026 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6027 &coffset);
6028 if (base)
6029 {
6030 gcc_assert (TREE_CODE (base) == MEM_REF);
6031 poly_int64 moffset;
6032 if (mem_ref_offset (base).to_shwi (&moffset))
6033 {
6034 coffset += moffset;
6035 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6036 {
6037 coffset += moffset;
6038 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6039 return true;
6040 }
6041 }
6042 }
6043 }
6044
040292e7
RB
6045 /* Canonicalize TARGET_MEM_REF in particular with respect to
6046 the indexes becoming constant. */
6047 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6048 {
6049 tree tem = maybe_fold_tmr (*t);
6050 if (tem)
6051 {
6052 *t = tem;
c7789683
RS
6053 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6054 recompute_tree_invariant_for_addr_expr (*orig_t);
040292e7
RB
6055 res = true;
6056 }
6057 }
6058
6059 return res;
6060}
6061
cbdd87d4
RG
6062/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6063 distinguishes both cases. */
6064
6065static bool
e0ee10ed 6066fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
6067{
6068 bool changed = false;
355fe088 6069 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 6070 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 6071 unsigned i;
a8b85ce9 6072 fold_defer_overflow_warnings ();
cbdd87d4 6073
040292e7
RB
6074 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6075 after propagation.
6076 ??? This shouldn't be done in generic folding but in the
6077 propagation helpers which also know whether an address was
89a79e96
RB
6078 propagated.
6079 Also canonicalize operand order. */
040292e7
RB
6080 switch (gimple_code (stmt))
6081 {
6082 case GIMPLE_ASSIGN:
6083 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6084 {
6085 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6086 if ((REFERENCE_CLASS_P (*rhs)
6087 || TREE_CODE (*rhs) == ADDR_EXPR)
6088 && maybe_canonicalize_mem_ref_addr (rhs))
6089 changed = true;
6090 tree *lhs = gimple_assign_lhs_ptr (stmt);
6091 if (REFERENCE_CLASS_P (*lhs)
6092 && maybe_canonicalize_mem_ref_addr (lhs))
6093 changed = true;
6094 }
89a79e96
RB
6095 else
6096 {
6097 /* Canonicalize operand order. */
6098 enum tree_code code = gimple_assign_rhs_code (stmt);
6099 if (TREE_CODE_CLASS (code) == tcc_comparison
6100 || commutative_tree_code (code)
6101 || commutative_ternary_tree_code (code))
6102 {
6103 tree rhs1 = gimple_assign_rhs1 (stmt);
6104 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 6105 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
6106 {
6107 gimple_assign_set_rhs1 (stmt, rhs2);
6108 gimple_assign_set_rhs2 (stmt, rhs1);
6109 if (TREE_CODE_CLASS (code) == tcc_comparison)
6110 gimple_assign_set_rhs_code (stmt,
6111 swap_tree_comparison (code));
6112 changed = true;
6113 }
6114 }
6115 }
040292e7
RB
6116 break;
6117 case GIMPLE_CALL:
6118 {
6119 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6120 {
6121 tree *arg = gimple_call_arg_ptr (stmt, i);
6122 if (REFERENCE_CLASS_P (*arg)
6123 && maybe_canonicalize_mem_ref_addr (arg))
6124 changed = true;
6125 }
6126 tree *lhs = gimple_call_lhs_ptr (stmt);
6127 if (*lhs
6128 && REFERENCE_CLASS_P (*lhs)
6129 && maybe_canonicalize_mem_ref_addr (lhs))
6130 changed = true;
6131 break;
6132 }
6133 case GIMPLE_ASM:
6134 {
538dd0b7
DM
6135 gasm *asm_stmt = as_a <gasm *> (stmt);
6136 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 6137 {
538dd0b7 6138 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
6139 tree op = TREE_VALUE (link);
6140 if (REFERENCE_CLASS_P (op)
6141 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6142 changed = true;
6143 }
538dd0b7 6144 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 6145 {
538dd0b7 6146 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
6147 tree op = TREE_VALUE (link);
6148 if ((REFERENCE_CLASS_P (op)
6149 || TREE_CODE (op) == ADDR_EXPR)
6150 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6151 changed = true;
6152 }
6153 }
6154 break;
6155 case GIMPLE_DEBUG:
6156 if (gimple_debug_bind_p (stmt))
6157 {
6158 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6159 if (*val
6160 && (REFERENCE_CLASS_P (*val)
6161 || TREE_CODE (*val) == ADDR_EXPR)
fabe0ede 6162 && maybe_canonicalize_mem_ref_addr (val, true))
040292e7
RB
6163 changed = true;
6164 }
6165 break;
89a79e96
RB
6166 case GIMPLE_COND:
6167 {
6168 /* Canonicalize operand order. */
6169 tree lhs = gimple_cond_lhs (stmt);
6170 tree rhs = gimple_cond_rhs (stmt);
14e72812 6171 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
6172 {
6173 gcond *gc = as_a <gcond *> (stmt);
6174 gimple_cond_set_lhs (gc, rhs);
6175 gimple_cond_set_rhs (gc, lhs);
6176 gimple_cond_set_code (gc,
6177 swap_tree_comparison (gimple_cond_code (gc)));
6178 changed = true;
6179 }
6180 }
040292e7
RB
6181 default:;
6182 }
6183
e0ee10ed
RB
6184 /* Dispatch to pattern-based folding. */
6185 if (!inplace
6186 || is_gimple_assign (stmt)
6187 || gimple_code (stmt) == GIMPLE_COND)
6188 {
6189 gimple_seq seq = NULL;
5d75ad95
RS
6190 gimple_match_op res_op;
6191 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 6192 valueize, valueize))
e0ee10ed 6193 {
5d75ad95 6194 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
6195 changed = true;
6196 else
6197 gimple_seq_discard (seq);
6198 }
6199 }
6200
6201 stmt = gsi_stmt (*gsi);
6202
cbdd87d4
RG
6203 /* Fold the main computation performed by the statement. */
6204 switch (gimple_code (stmt))
6205 {
6206 case GIMPLE_ASSIGN:
6207 {
819ec64c
RB
6208 /* Try to canonicalize for boolean-typed X the comparisons
6209 X == 0, X == 1, X != 0, and X != 1. */
6210 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6211 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 6212 {
819ec64c
RB
6213 tree lhs = gimple_assign_lhs (stmt);
6214 tree op1 = gimple_assign_rhs1 (stmt);
6215 tree op2 = gimple_assign_rhs2 (stmt);
6216 tree type = TREE_TYPE (op1);
6217
6218 /* Check whether the comparison operands are of the same boolean
6219 type as the result type is.
6220 Check that second operand is an integer-constant with value
6221 one or zero. */
6222 if (TREE_CODE (op2) == INTEGER_CST
6223 && (integer_zerop (op2) || integer_onep (op2))
6224 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6225 {
6226 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6227 bool is_logical_not = false;
6228
6229 /* X == 0 and X != 1 is a logical-not.of X
6230 X == 1 and X != 0 is X */
6231 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6232 || (cmp_code == NE_EXPR && integer_onep (op2)))
6233 is_logical_not = true;
6234
6235 if (is_logical_not == false)
6236 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6237 /* Only for one-bit precision typed X the transformation
6238 !X -> ~X is valied. */
6239 else if (TYPE_PRECISION (type) == 1)
6240 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6241 /* Otherwise we use !X -> X ^ 1. */
6242 else
6243 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6244 build_int_cst (type, 1));
6245 changed = true;
6246 break;
6247 }
5fbcc0ed 6248 }
819ec64c
RB
6249
6250 unsigned old_num_ops = gimple_num_ops (stmt);
6251 tree lhs = gimple_assign_lhs (stmt);
6252 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
6253 if (new_rhs
6254 && !useless_type_conversion_p (TREE_TYPE (lhs),
6255 TREE_TYPE (new_rhs)))
6256 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6257 if (new_rhs
6258 && (!inplace
6259 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6260 {
6261 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6262 changed = true;
6263 }
6264 break;
6265 }
6266
cbdd87d4 6267 case GIMPLE_CALL:
ceeffab0 6268 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
6269 break;
6270
bd422c4a
RG
6271 case GIMPLE_DEBUG:
6272 if (gimple_debug_bind_p (stmt))
6273 {
6274 tree val = gimple_debug_bind_get_value (stmt);
6275 if (val
6276 && REFERENCE_CLASS_P (val))
6277 {
0bf8cd9d 6278 tree tem = maybe_fold_reference (val);
bd422c4a
RG
6279 if (tem)
6280 {
6281 gimple_debug_bind_set_value (stmt, tem);
6282 changed = true;
6283 }
6284 }
3e888a5e
RG
6285 else if (val
6286 && TREE_CODE (val) == ADDR_EXPR)
6287 {
6288 tree ref = TREE_OPERAND (val, 0);
0bf8cd9d 6289 tree tem = maybe_fold_reference (ref);
3e888a5e
RG
6290 if (tem)
6291 {
6292 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6293 gimple_debug_bind_set_value (stmt, tem);
6294 changed = true;
6295 }
6296 }
bd422c4a
RG
6297 }
6298 break;
6299
cfe3d653
PK
6300 case GIMPLE_RETURN:
6301 {
6302 greturn *ret_stmt = as_a<greturn *> (stmt);
6303 tree ret = gimple_return_retval(ret_stmt);
6304
6305 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6306 {
6307 tree val = valueize (ret);
1af928db
RB
6308 if (val && val != ret
6309 && may_propagate_copy (ret, val))
cfe3d653
PK
6310 {
6311 gimple_return_set_retval (ret_stmt, val);
6312 changed = true;
6313 }
6314 }
6315 }
6316 break;
6317
cbdd87d4
RG
6318 default:;
6319 }
6320
6321 stmt = gsi_stmt (*gsi);
6322
a8b85ce9 6323 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
6324 return changed;
6325}
6326
e0ee10ed
RB
6327/* Valueziation callback that ends up not following SSA edges. */
6328
6329tree
6330no_follow_ssa_edges (tree)
6331{
6332 return NULL_TREE;
6333}
6334
45cc9f96
RB
6335/* Valueization callback that ends up following single-use SSA edges only. */
6336
6337tree
6338follow_single_use_edges (tree val)
6339{
6340 if (TREE_CODE (val) == SSA_NAME
6341 && !has_single_use (val))
6342 return NULL_TREE;
6343 return val;
6344}
6345
c566cc9f
RS
6346/* Valueization callback that follows all SSA edges. */
6347
6348tree
6349follow_all_ssa_edges (tree val)
6350{
6351 return val;
6352}
6353
cbdd87d4
RG
6354/* Fold the statement pointed to by GSI. In some cases, this function may
6355 replace the whole statement with a new one. Returns true iff folding
6356 makes any changes.
6357 The statement pointed to by GSI should be in valid gimple form but may
6358 be in unfolded state as resulting from for example constant propagation
6359 which can produce *&x = 0. */
6360
6361bool
6362fold_stmt (gimple_stmt_iterator *gsi)
6363{
e0ee10ed
RB
6364 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6365}
6366
6367bool
6368fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6369{
6370 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
6371}
6372
59401b92 6373/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
6374 *&x created by constant propagation are handled. The statement cannot
6375 be replaced with a new one. Return true if the statement was
6376 changed, false otherwise.
59401b92 6377 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
6378 be in unfolded state as resulting from for example constant propagation
6379 which can produce *&x = 0. */
6380
6381bool
59401b92 6382fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 6383{
355fe088 6384 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 6385 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 6386 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
6387 return changed;
6388}
6389
e89065a1
SL
6390/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6391 if EXPR is null or we don't know how.
6392 If non-null, the result always has boolean type. */
6393
6394static tree
6395canonicalize_bool (tree expr, bool invert)
6396{
6397 if (!expr)
6398 return NULL_TREE;
6399 else if (invert)
6400 {
6401 if (integer_nonzerop (expr))
6402 return boolean_false_node;
6403 else if (integer_zerop (expr))
6404 return boolean_true_node;
6405 else if (TREE_CODE (expr) == SSA_NAME)
6406 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6407 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6408 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6409 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6410 boolean_type_node,
6411 TREE_OPERAND (expr, 0),
6412 TREE_OPERAND (expr, 1));
6413 else
6414 return NULL_TREE;
6415 }
6416 else
6417 {
6418 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6419 return expr;
6420 if (integer_nonzerop (expr))
6421 return boolean_true_node;
6422 else if (integer_zerop (expr))
6423 return boolean_false_node;
6424 else if (TREE_CODE (expr) == SSA_NAME)
6425 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6426 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6427 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6428 return fold_build2 (TREE_CODE (expr),
6429 boolean_type_node,
6430 TREE_OPERAND (expr, 0),
6431 TREE_OPERAND (expr, 1));
6432 else
6433 return NULL_TREE;
6434 }
6435}
6436
6437/* Check to see if a boolean expression EXPR is logically equivalent to the
6438 comparison (OP1 CODE OP2). Check for various identities involving
6439 SSA_NAMEs. */
6440
6441static bool
6442same_bool_comparison_p (const_tree expr, enum tree_code code,
6443 const_tree op1, const_tree op2)
6444{
355fe088 6445 gimple *s;
e89065a1
SL
6446
6447 /* The obvious case. */
6448 if (TREE_CODE (expr) == code
6449 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6450 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6451 return true;
6452
6453 /* Check for comparing (name, name != 0) and the case where expr
6454 is an SSA_NAME with a definition matching the comparison. */
6455 if (TREE_CODE (expr) == SSA_NAME
6456 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6457 {
6458 if (operand_equal_p (expr, op1, 0))
6459 return ((code == NE_EXPR && integer_zerop (op2))
6460 || (code == EQ_EXPR && integer_nonzerop (op2)));
6461 s = SSA_NAME_DEF_STMT (expr);
6462 if (is_gimple_assign (s)
6463 && gimple_assign_rhs_code (s) == code
6464 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6465 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6466 return true;
6467 }
6468
6469 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6470 of name is a comparison, recurse. */
6471 if (TREE_CODE (op1) == SSA_NAME
6472 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6473 {
6474 s = SSA_NAME_DEF_STMT (op1);
6475 if (is_gimple_assign (s)
6476 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6477 {
6478 enum tree_code c = gimple_assign_rhs_code (s);
6479 if ((c == NE_EXPR && integer_zerop (op2))
6480 || (c == EQ_EXPR && integer_nonzerop (op2)))
6481 return same_bool_comparison_p (expr, c,
6482 gimple_assign_rhs1 (s),
6483 gimple_assign_rhs2 (s));
6484 if ((c == EQ_EXPR && integer_zerop (op2))
6485 || (c == NE_EXPR && integer_nonzerop (op2)))
6486 return same_bool_comparison_p (expr,
6487 invert_tree_comparison (c, false),
6488 gimple_assign_rhs1 (s),
6489 gimple_assign_rhs2 (s));
6490 }
6491 }
6492 return false;
6493}
6494
6495/* Check to see if two boolean expressions OP1 and OP2 are logically
6496 equivalent. */
6497
6498static bool
6499same_bool_result_p (const_tree op1, const_tree op2)
6500{
6501 /* Simple cases first. */
6502 if (operand_equal_p (op1, op2, 0))
6503 return true;
6504
6505 /* Check the cases where at least one of the operands is a comparison.
6506 These are a bit smarter than operand_equal_p in that they apply some
6507 identifies on SSA_NAMEs. */
98209db3 6508 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
6509 && same_bool_comparison_p (op1, TREE_CODE (op2),
6510 TREE_OPERAND (op2, 0),
6511 TREE_OPERAND (op2, 1)))
6512 return true;
98209db3 6513 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
6514 && same_bool_comparison_p (op2, TREE_CODE (op1),
6515 TREE_OPERAND (op1, 0),
6516 TREE_OPERAND (op1, 1)))
6517 return true;
6518
6519 /* Default case. */
6520 return false;
6521}
6522
6523/* Forward declarations for some mutually recursive functions. */
6524
6525static tree
5f487a34 6526and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6527 enum tree_code code2, tree op2a, tree op2b);
6528static tree
5f487a34 6529and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6530 enum tree_code code2, tree op2a, tree op2b);
6531static tree
5f487a34 6532and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6533 enum tree_code code2, tree op2a, tree op2b);
6534static tree
5f487a34 6535or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6536 enum tree_code code2, tree op2a, tree op2b);
6537static tree
5f487a34 6538or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
6539 enum tree_code code2, tree op2a, tree op2b);
6540static tree
5f487a34 6541or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
6542 enum tree_code code2, tree op2a, tree op2b);
6543
6544/* Helper function for and_comparisons_1: try to simplify the AND of the
6545 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6546 If INVERT is true, invert the value of the VAR before doing the AND.
6547 Return NULL_EXPR if we can't simplify this to a single expression. */
6548
6549static tree
5f487a34 6550and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6551 enum tree_code code2, tree op2a, tree op2b)
6552{
6553 tree t;
355fe088 6554 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6555
6556 /* We can only deal with variables whose definitions are assignments. */
6557 if (!is_gimple_assign (stmt))
6558 return NULL_TREE;
6559
6560 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6561 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6562 Then we only have to consider the simpler non-inverted cases. */
6563 if (invert)
5f487a34 6564 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
6565 invert_tree_comparison (code2, false),
6566 op2a, op2b);
6567 else
5f487a34 6568 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6569 return canonicalize_bool (t, invert);
6570}
6571
6572/* Try to simplify the AND of the ssa variable defined by the assignment
6573 STMT with the comparison specified by (OP2A CODE2 OP2B).
6574 Return NULL_EXPR if we can't simplify this to a single expression. */
6575
6576static tree
5f487a34 6577and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6578 enum tree_code code2, tree op2a, tree op2b)
6579{
6580 tree var = gimple_assign_lhs (stmt);
6581 tree true_test_var = NULL_TREE;
6582 tree false_test_var = NULL_TREE;
6583 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6584
6585 /* Check for identities like (var AND (var == 0)) => false. */
6586 if (TREE_CODE (op2a) == SSA_NAME
6587 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6588 {
6589 if ((code2 == NE_EXPR && integer_zerop (op2b))
6590 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6591 {
6592 true_test_var = op2a;
6593 if (var == true_test_var)
6594 return var;
6595 }
6596 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6597 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6598 {
6599 false_test_var = op2a;
6600 if (var == false_test_var)
6601 return boolean_false_node;
6602 }
6603 }
6604
6605 /* If the definition is a comparison, recurse on it. */
6606 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6607 {
5f487a34 6608 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
6609 gimple_assign_rhs1 (stmt),
6610 gimple_assign_rhs2 (stmt),
6611 code2,
6612 op2a,
6613 op2b);
6614 if (t)
6615 return t;
6616 }
6617
6618 /* If the definition is an AND or OR expression, we may be able to
6619 simplify by reassociating. */
eb9820c0
KT
6620 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6621 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6622 {
6623 tree inner1 = gimple_assign_rhs1 (stmt);
6624 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6625 gimple *s;
e89065a1
SL
6626 tree t;
6627 tree partial = NULL_TREE;
eb9820c0 6628 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
6629
6630 /* Check for boolean identities that don't require recursive examination
6631 of inner1/inner2:
6632 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6633 inner1 AND (inner1 OR inner2) => inner1
6634 !inner1 AND (inner1 AND inner2) => false
6635 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6636 Likewise for similar cases involving inner2. */
6637 if (inner1 == true_test_var)
6638 return (is_and ? var : inner1);
6639 else if (inner2 == true_test_var)
6640 return (is_and ? var : inner2);
6641 else if (inner1 == false_test_var)
6642 return (is_and
6643 ? boolean_false_node
5f487a34
LJH
6644 : and_var_with_comparison (type, inner2, false, code2, op2a,
6645 op2b));
e89065a1
SL
6646 else if (inner2 == false_test_var)
6647 return (is_and
6648 ? boolean_false_node
5f487a34
LJH
6649 : and_var_with_comparison (type, inner1, false, code2, op2a,
6650 op2b));
e89065a1
SL
6651
6652 /* Next, redistribute/reassociate the AND across the inner tests.
6653 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6654 if (TREE_CODE (inner1) == SSA_NAME
6655 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6656 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6657 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6658 gimple_assign_rhs1 (s),
6659 gimple_assign_rhs2 (s),
6660 code2, op2a, op2b)))
6661 {
6662 /* Handle the AND case, where we are reassociating:
6663 (inner1 AND inner2) AND (op2a code2 op2b)
6664 => (t AND inner2)
6665 If the partial result t is a constant, we win. Otherwise
6666 continue on to try reassociating with the other inner test. */
6667 if (is_and)
6668 {
6669 if (integer_onep (t))
6670 return inner2;
6671 else if (integer_zerop (t))
6672 return boolean_false_node;
6673 }
6674
6675 /* Handle the OR case, where we are redistributing:
6676 (inner1 OR inner2) AND (op2a code2 op2b)
6677 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
6678 else if (integer_onep (t))
6679 return boolean_true_node;
6680
6681 /* Save partial result for later. */
6682 partial = t;
e89065a1
SL
6683 }
6684
6685 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6686 if (TREE_CODE (inner2) == SSA_NAME
6687 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6688 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6689 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6690 gimple_assign_rhs1 (s),
6691 gimple_assign_rhs2 (s),
6692 code2, op2a, op2b)))
6693 {
6694 /* Handle the AND case, where we are reassociating:
6695 (inner1 AND inner2) AND (op2a code2 op2b)
6696 => (inner1 AND t) */
6697 if (is_and)
6698 {
6699 if (integer_onep (t))
6700 return inner1;
6701 else if (integer_zerop (t))
6702 return boolean_false_node;
8236c8eb
JJ
6703 /* If both are the same, we can apply the identity
6704 (x AND x) == x. */
6705 else if (partial && same_bool_result_p (t, partial))
6706 return t;
e89065a1
SL
6707 }
6708
6709 /* Handle the OR case. where we are redistributing:
6710 (inner1 OR inner2) AND (op2a code2 op2b)
6711 => (t OR (inner1 AND (op2a code2 op2b)))
6712 => (t OR partial) */
6713 else
6714 {
6715 if (integer_onep (t))
6716 return boolean_true_node;
6717 else if (partial)
6718 {
6719 /* We already got a simplification for the other
6720 operand to the redistributed OR expression. The
6721 interesting case is when at least one is false.
6722 Or, if both are the same, we can apply the identity
6723 (x OR x) == x. */
6724 if (integer_zerop (partial))
6725 return t;
6726 else if (integer_zerop (t))
6727 return partial;
6728 else if (same_bool_result_p (t, partial))
6729 return t;
6730 }
6731 }
6732 }
6733 }
6734 return NULL_TREE;
6735}
6736
6737/* Try to simplify the AND of two comparisons defined by
6738 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6739 If this can be done without constructing an intermediate value,
6740 return the resulting tree; otherwise NULL_TREE is returned.
6741 This function is deliberately asymmetric as it recurses on SSA_DEFs
6742 in the first comparison but not the second. */
6743
6744static tree
5f487a34 6745and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6746 enum tree_code code2, tree op2a, tree op2b)
6747{
ae22ac3c 6748 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6749
e89065a1
SL
6750 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6751 if (operand_equal_p (op1a, op2a, 0)
6752 && operand_equal_p (op1b, op2b, 0))
6753 {
eb9820c0 6754 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6755 tree t = combine_comparisons (UNKNOWN_LOCATION,
6756 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 6757 truth_type, op1a, op1b);
e89065a1
SL
6758 if (t)
6759 return t;
6760 }
6761
6762 /* Likewise the swapped case of the above. */
6763 if (operand_equal_p (op1a, op2b, 0)
6764 && operand_equal_p (op1b, op2a, 0))
6765 {
eb9820c0 6766 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6767 tree t = combine_comparisons (UNKNOWN_LOCATION,
6768 TRUTH_ANDIF_EXPR, code1,
6769 swap_tree_comparison (code2),
31ed6226 6770 truth_type, op1a, op1b);
e89065a1
SL
6771 if (t)
6772 return t;
6773 }
6774
e89065a1
SL
6775 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6776 NAME's definition is a truth value. See if there are any simplifications
6777 that can be done against the NAME's definition. */
6778 if (TREE_CODE (op1a) == SSA_NAME
6779 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6780 && (integer_zerop (op1b) || integer_onep (op1b)))
6781 {
6782 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6783 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6784 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6785 switch (gimple_code (stmt))
6786 {
6787 case GIMPLE_ASSIGN:
6788 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6789 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6790 op2b);
e89065a1
SL
6791
6792 case GIMPLE_PHI:
6793 /* If every argument to the PHI produces the same result when
6794 ANDed with the second comparison, we win.
6795 Do not do this unless the type is bool since we need a bool
6796 result here anyway. */
6797 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6798 {
6799 tree result = NULL_TREE;
6800 unsigned i;
6801 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6802 {
6803 tree arg = gimple_phi_arg_def (stmt, i);
6804
6805 /* If this PHI has itself as an argument, ignore it.
6806 If all the other args produce the same result,
6807 we're still OK. */
6808 if (arg == gimple_phi_result (stmt))
6809 continue;
6810 else if (TREE_CODE (arg) == INTEGER_CST)
6811 {
6812 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6813 {
6814 if (!result)
6815 result = boolean_false_node;
6816 else if (!integer_zerop (result))
6817 return NULL_TREE;
6818 }
6819 else if (!result)
6820 result = fold_build2 (code2, boolean_type_node,
6821 op2a, op2b);
6822 else if (!same_bool_comparison_p (result,
6823 code2, op2a, op2b))
6824 return NULL_TREE;
6825 }
0e8b84ec
JJ
6826 else if (TREE_CODE (arg) == SSA_NAME
6827 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6828 {
6c66f733 6829 tree temp;
355fe088 6830 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6831 /* In simple cases we can look through PHI nodes,
6832 but we have to be careful with loops.
6833 See PR49073. */
6834 if (! dom_info_available_p (CDI_DOMINATORS)
6835 || gimple_bb (def_stmt) == gimple_bb (stmt)
6836 || dominated_by_p (CDI_DOMINATORS,
6837 gimple_bb (def_stmt),
6838 gimple_bb (stmt)))
6839 return NULL_TREE;
5f487a34 6840 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 6841 op2a, op2b);
e89065a1
SL
6842 if (!temp)
6843 return NULL_TREE;
6844 else if (!result)
6845 result = temp;
6846 else if (!same_bool_result_p (result, temp))
6847 return NULL_TREE;
6848 }
6849 else
6850 return NULL_TREE;
6851 }
6852 return result;
6853 }
6854
6855 default:
6856 break;
6857 }
6858 }
6859 return NULL_TREE;
6860}
6861
5f487a34
LJH
6862/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6863 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6864 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6865 simplify this to a single expression. As we are going to lower the cost
6866 of building SSA names / gimple stmts significantly, we need to allocate
6867 them ont the stack. This will cause the code to be a bit ugly. */
6868
6869static tree
6870maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6871 enum tree_code code1,
6872 tree op1a, tree op1b,
6873 enum tree_code code2, tree op2a,
6874 tree op2b)
6875{
6876 /* Allocate gimple stmt1 on the stack. */
6877 gassign *stmt1
6878 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6879 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6880 gimple_assign_set_rhs_code (stmt1, code1);
6881 gimple_assign_set_rhs1 (stmt1, op1a);
6882 gimple_assign_set_rhs2 (stmt1, op1b);
6883
6884 /* Allocate gimple stmt2 on the stack. */
6885 gassign *stmt2
6886 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6887 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6888 gimple_assign_set_rhs_code (stmt2, code2);
6889 gimple_assign_set_rhs1 (stmt2, op2a);
6890 gimple_assign_set_rhs2 (stmt2, op2b);
6891
6892 /* Allocate SSA names(lhs1) on the stack. */
6893 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6894 memset (lhs1, 0, sizeof (tree_ssa_name));
6895 TREE_SET_CODE (lhs1, SSA_NAME);
6896 TREE_TYPE (lhs1) = type;
6897 init_ssa_name_imm_use (lhs1);
6898
6899 /* Allocate SSA names(lhs2) on the stack. */
6900 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6901 memset (lhs2, 0, sizeof (tree_ssa_name));
6902 TREE_SET_CODE (lhs2, SSA_NAME);
6903 TREE_TYPE (lhs2) = type;
6904 init_ssa_name_imm_use (lhs2);
6905
6906 gimple_assign_set_lhs (stmt1, lhs1);
6907 gimple_assign_set_lhs (stmt2, lhs2);
6908
6909 gimple_match_op op (gimple_match_cond::UNCOND, code,
6910 type, gimple_assign_lhs (stmt1),
6911 gimple_assign_lhs (stmt2));
6912 if (op.resimplify (NULL, follow_all_ssa_edges))
6913 {
6914 if (gimple_simplified_result_is_gimple_val (&op))
6915 {
6916 tree res = op.ops[0];
6917 if (res == lhs1)
6918 return build2 (code1, type, op1a, op1b);
6919 else if (res == lhs2)
6920 return build2 (code2, type, op2a, op2b);
6921 else
6922 return res;
6923 }
ae9c3507
ML
6924 else if (op.code.is_tree_code ()
6925 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6926 {
6927 tree op0 = op.ops[0];
6928 tree op1 = op.ops[1];
6929 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6930 return NULL_TREE; /* not simple */
6931
6932 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6933 }
5f487a34
LJH
6934 }
6935
6936 return NULL_TREE;
6937}
6938
e89065a1
SL
6939/* Try to simplify the AND of two comparisons, specified by
6940 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6941 If this can be simplified to a single expression (without requiring
6942 introducing more SSA variables to hold intermediate values),
6943 return the resulting tree. Otherwise return NULL_TREE.
6944 If the result expression is non-null, it has boolean type. */
6945
6946tree
5f487a34
LJH
6947maybe_fold_and_comparisons (tree type,
6948 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6949 enum tree_code code2, tree op2a, tree op2b)
6950{
5f487a34 6951 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6952 return t;
5f487a34
LJH
6953
6954 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6955 return t;
6956
6957 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6958 op1a, op1b, code2, op2a,
6959 op2b))
6960 return t;
6961
6962 return NULL_TREE;
e89065a1
SL
6963}
6964
6965/* Helper function for or_comparisons_1: try to simplify the OR of the
6966 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6967 If INVERT is true, invert the value of VAR before doing the OR.
6968 Return NULL_EXPR if we can't simplify this to a single expression. */
6969
6970static tree
5f487a34 6971or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6972 enum tree_code code2, tree op2a, tree op2b)
6973{
6974 tree t;
355fe088 6975 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6976
6977 /* We can only deal with variables whose definitions are assignments. */
6978 if (!is_gimple_assign (stmt))
6979 return NULL_TREE;
6980
6981 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6982 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6983 Then we only have to consider the simpler non-inverted cases. */
6984 if (invert)
5f487a34 6985 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
6986 invert_tree_comparison (code2, false),
6987 op2a, op2b);
6988 else
5f487a34 6989 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6990 return canonicalize_bool (t, invert);
6991}
6992
6993/* Try to simplify the OR of the ssa variable defined by the assignment
6994 STMT with the comparison specified by (OP2A CODE2 OP2B).
6995 Return NULL_EXPR if we can't simplify this to a single expression. */
6996
6997static tree
5f487a34 6998or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6999 enum tree_code code2, tree op2a, tree op2b)
7000{
7001 tree var = gimple_assign_lhs (stmt);
7002 tree true_test_var = NULL_TREE;
7003 tree false_test_var = NULL_TREE;
7004 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7005
7006 /* Check for identities like (var OR (var != 0)) => true . */
7007 if (TREE_CODE (op2a) == SSA_NAME
7008 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7009 {
7010 if ((code2 == NE_EXPR && integer_zerop (op2b))
7011 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7012 {
7013 true_test_var = op2a;
7014 if (var == true_test_var)
7015 return var;
7016 }
7017 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7018 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7019 {
7020 false_test_var = op2a;
7021 if (var == false_test_var)
7022 return boolean_true_node;
7023 }
7024 }
7025
7026 /* If the definition is a comparison, recurse on it. */
7027 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7028 {
5f487a34 7029 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
7030 gimple_assign_rhs1 (stmt),
7031 gimple_assign_rhs2 (stmt),
7032 code2,
7033 op2a,
7034 op2b);
7035 if (t)
7036 return t;
7037 }
7038
7039 /* If the definition is an AND or OR expression, we may be able to
7040 simplify by reassociating. */
eb9820c0
KT
7041 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7042 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
7043 {
7044 tree inner1 = gimple_assign_rhs1 (stmt);
7045 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 7046 gimple *s;
e89065a1
SL
7047 tree t;
7048 tree partial = NULL_TREE;
eb9820c0 7049 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
7050
7051 /* Check for boolean identities that don't require recursive examination
7052 of inner1/inner2:
7053 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7054 inner1 OR (inner1 AND inner2) => inner1
7055 !inner1 OR (inner1 OR inner2) => true
7056 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7057 */
7058 if (inner1 == true_test_var)
7059 return (is_or ? var : inner1);
7060 else if (inner2 == true_test_var)
7061 return (is_or ? var : inner2);
7062 else if (inner1 == false_test_var)
7063 return (is_or
7064 ? boolean_true_node
5f487a34
LJH
7065 : or_var_with_comparison (type, inner2, false, code2, op2a,
7066 op2b));
e89065a1
SL
7067 else if (inner2 == false_test_var)
7068 return (is_or
7069 ? boolean_true_node
5f487a34
LJH
7070 : or_var_with_comparison (type, inner1, false, code2, op2a,
7071 op2b));
e89065a1
SL
7072
7073 /* Next, redistribute/reassociate the OR across the inner tests.
7074 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7075 if (TREE_CODE (inner1) == SSA_NAME
7076 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7077 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7078 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7079 gimple_assign_rhs1 (s),
7080 gimple_assign_rhs2 (s),
7081 code2, op2a, op2b)))
7082 {
7083 /* Handle the OR case, where we are reassociating:
7084 (inner1 OR inner2) OR (op2a code2 op2b)
7085 => (t OR inner2)
7086 If the partial result t is a constant, we win. Otherwise
7087 continue on to try reassociating with the other inner test. */
8236c8eb 7088 if (is_or)
e89065a1
SL
7089 {
7090 if (integer_onep (t))
7091 return boolean_true_node;
7092 else if (integer_zerop (t))
7093 return inner2;
7094 }
7095
7096 /* Handle the AND case, where we are redistributing:
7097 (inner1 AND inner2) OR (op2a code2 op2b)
7098 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
7099 else if (integer_zerop (t))
7100 return boolean_false_node;
7101
7102 /* Save partial result for later. */
7103 partial = t;
e89065a1
SL
7104 }
7105
7106 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7107 if (TREE_CODE (inner2) == SSA_NAME
7108 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7109 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7110 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7111 gimple_assign_rhs1 (s),
7112 gimple_assign_rhs2 (s),
7113 code2, op2a, op2b)))
7114 {
7115 /* Handle the OR case, where we are reassociating:
7116 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
7117 => (inner1 OR t)
7118 => (t OR partial) */
7119 if (is_or)
e89065a1
SL
7120 {
7121 if (integer_zerop (t))
7122 return inner1;
7123 else if (integer_onep (t))
7124 return boolean_true_node;
8236c8eb
JJ
7125 /* If both are the same, we can apply the identity
7126 (x OR x) == x. */
7127 else if (partial && same_bool_result_p (t, partial))
7128 return t;
e89065a1
SL
7129 }
7130
7131 /* Handle the AND case, where we are redistributing:
7132 (inner1 AND inner2) OR (op2a code2 op2b)
7133 => (t AND (inner1 OR (op2a code2 op2b)))
7134 => (t AND partial) */
7135 else
7136 {
7137 if (integer_zerop (t))
7138 return boolean_false_node;
7139 else if (partial)
7140 {
7141 /* We already got a simplification for the other
7142 operand to the redistributed AND expression. The
7143 interesting case is when at least one is true.
7144 Or, if both are the same, we can apply the identity
8236c8eb 7145 (x AND x) == x. */
e89065a1
SL
7146 if (integer_onep (partial))
7147 return t;
7148 else if (integer_onep (t))
7149 return partial;
7150 else if (same_bool_result_p (t, partial))
8236c8eb 7151 return t;
e89065a1
SL
7152 }
7153 }
7154 }
7155 }
7156 return NULL_TREE;
7157}
7158
7159/* Try to simplify the OR of two comparisons defined by
7160 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7161 If this can be done without constructing an intermediate value,
7162 return the resulting tree; otherwise NULL_TREE is returned.
7163 This function is deliberately asymmetric as it recurses on SSA_DEFs
7164 in the first comparison but not the second. */
7165
7166static tree
5f487a34 7167or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7168 enum tree_code code2, tree op2a, tree op2b)
7169{
ae22ac3c 7170 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 7171
e89065a1
SL
7172 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7173 if (operand_equal_p (op1a, op2a, 0)
7174 && operand_equal_p (op1b, op2b, 0))
7175 {
eb9820c0 7176 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7177 tree t = combine_comparisons (UNKNOWN_LOCATION,
7178 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 7179 truth_type, op1a, op1b);
e89065a1
SL
7180 if (t)
7181 return t;
7182 }
7183
7184 /* Likewise the swapped case of the above. */
7185 if (operand_equal_p (op1a, op2b, 0)
7186 && operand_equal_p (op1b, op2a, 0))
7187 {
eb9820c0 7188 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7189 tree t = combine_comparisons (UNKNOWN_LOCATION,
7190 TRUTH_ORIF_EXPR, code1,
7191 swap_tree_comparison (code2),
31ed6226 7192 truth_type, op1a, op1b);
e89065a1
SL
7193 if (t)
7194 return t;
7195 }
7196
e89065a1
SL
7197 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7198 NAME's definition is a truth value. See if there are any simplifications
7199 that can be done against the NAME's definition. */
7200 if (TREE_CODE (op1a) == SSA_NAME
7201 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7202 && (integer_zerop (op1b) || integer_onep (op1b)))
7203 {
7204 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7205 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 7206 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
7207 switch (gimple_code (stmt))
7208 {
7209 case GIMPLE_ASSIGN:
7210 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
7211 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7212 op2b);
e89065a1
SL
7213
7214 case GIMPLE_PHI:
7215 /* If every argument to the PHI produces the same result when
7216 ORed with the second comparison, we win.
7217 Do not do this unless the type is bool since we need a bool
7218 result here anyway. */
7219 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7220 {
7221 tree result = NULL_TREE;
7222 unsigned i;
7223 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7224 {
7225 tree arg = gimple_phi_arg_def (stmt, i);
7226
7227 /* If this PHI has itself as an argument, ignore it.
7228 If all the other args produce the same result,
7229 we're still OK. */
7230 if (arg == gimple_phi_result (stmt))
7231 continue;
7232 else if (TREE_CODE (arg) == INTEGER_CST)
7233 {
7234 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7235 {
7236 if (!result)
7237 result = boolean_true_node;
7238 else if (!integer_onep (result))
7239 return NULL_TREE;
7240 }
7241 else if (!result)
7242 result = fold_build2 (code2, boolean_type_node,
7243 op2a, op2b);
7244 else if (!same_bool_comparison_p (result,
7245 code2, op2a, op2b))
7246 return NULL_TREE;
7247 }
0e8b84ec
JJ
7248 else if (TREE_CODE (arg) == SSA_NAME
7249 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 7250 {
6c66f733 7251 tree temp;
355fe088 7252 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
7253 /* In simple cases we can look through PHI nodes,
7254 but we have to be careful with loops.
7255 See PR49073. */
7256 if (! dom_info_available_p (CDI_DOMINATORS)
7257 || gimple_bb (def_stmt) == gimple_bb (stmt)
7258 || dominated_by_p (CDI_DOMINATORS,
7259 gimple_bb (def_stmt),
7260 gimple_bb (stmt)))
7261 return NULL_TREE;
5f487a34 7262 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 7263 op2a, op2b);
e89065a1
SL
7264 if (!temp)
7265 return NULL_TREE;
7266 else if (!result)
7267 result = temp;
7268 else if (!same_bool_result_p (result, temp))
7269 return NULL_TREE;
7270 }
7271 else
7272 return NULL_TREE;
7273 }
7274 return result;
7275 }
7276
7277 default:
7278 break;
7279 }
7280 }
7281 return NULL_TREE;
7282}
7283
7284/* Try to simplify the OR of two comparisons, specified by
7285 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7286 If this can be simplified to a single expression (without requiring
7287 introducing more SSA variables to hold intermediate values),
7288 return the resulting tree. Otherwise return NULL_TREE.
7289 If the result expression is non-null, it has boolean type. */
7290
7291tree
5f487a34
LJH
7292maybe_fold_or_comparisons (tree type,
7293 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7294 enum tree_code code2, tree op2a, tree op2b)
7295{
5f487a34 7296 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 7297 return t;
cfef45c8 7298
5f487a34
LJH
7299 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7300 return t;
7301
7302 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7303 op1a, op1b, code2, op2a,
7304 op2b))
7305 return t;
7306
7307 return NULL_TREE;
7308}
cfef45c8
RG
7309
7310/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7311
7312 Either NULL_TREE, a simplified but non-constant or a constant
7313 is returned.
7314
7315 ??? This should go into a gimple-fold-inline.h file to be eventually
7316 privatized with the single valueize function used in the various TUs
7317 to avoid the indirect function call overhead. */
7318
7319tree
355fe088 7320gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 7321 tree (*gvalueize) (tree))
cfef45c8 7322{
5d75ad95 7323 gimple_match_op res_op;
45cc9f96
RB
7324 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7325 edges if there are intermediate VARYING defs. For this reason
7326 do not follow SSA edges here even though SCCVN can technically
7327 just deal fine with that. */
5d75ad95 7328 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 7329 {
34050b6b 7330 tree res = NULL_TREE;
5d75ad95
RS
7331 if (gimple_simplified_result_is_gimple_val (&res_op))
7332 res = res_op.ops[0];
34050b6b 7333 else if (mprts_hook)
5d75ad95 7334 res = mprts_hook (&res_op);
34050b6b 7335 if (res)
45cc9f96 7336 {
34050b6b
RB
7337 if (dump_file && dump_flags & TDF_DETAILS)
7338 {
7339 fprintf (dump_file, "Match-and-simplified ");
7340 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7341 fprintf (dump_file, " to ");
ef6cb4c7 7342 print_generic_expr (dump_file, res);
34050b6b
RB
7343 fprintf (dump_file, "\n");
7344 }
7345 return res;
45cc9f96 7346 }
45cc9f96
RB
7347 }
7348
cfef45c8
RG
7349 location_t loc = gimple_location (stmt);
7350 switch (gimple_code (stmt))
7351 {
7352 case GIMPLE_ASSIGN:
7353 {
7354 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7355
7356 switch (get_gimple_rhs_class (subcode))
7357 {
7358 case GIMPLE_SINGLE_RHS:
7359 {
7360 tree rhs = gimple_assign_rhs1 (stmt);
7361 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7362
7363 if (TREE_CODE (rhs) == SSA_NAME)
7364 {
7365 /* If the RHS is an SSA_NAME, return its known constant value,
7366 if any. */
7367 return (*valueize) (rhs);
7368 }
7369 /* Handle propagating invariant addresses into address
7370 operations. */
7371 else if (TREE_CODE (rhs) == ADDR_EXPR
7372 && !is_gimple_min_invariant (rhs))
7373 {
a90c8804 7374 poly_int64 offset = 0;
cfef45c8
RG
7375 tree base;
7376 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7377 &offset,
7378 valueize);
7379 if (base
7380 && (CONSTANT_CLASS_P (base)
7381 || decl_address_invariant_p (base)))
7382 return build_invariant_address (TREE_TYPE (rhs),
7383 base, offset);
7384 }
7385 else if (TREE_CODE (rhs) == CONSTRUCTOR
7386 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
7387 && known_eq (CONSTRUCTOR_NELTS (rhs),
7388 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 7389 {
794e3180
RS
7390 unsigned i, nelts;
7391 tree val;
cfef45c8 7392
928686b1 7393 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 7394 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
7395 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7396 {
7397 val = (*valueize) (val);
7398 if (TREE_CODE (val) == INTEGER_CST
7399 || TREE_CODE (val) == REAL_CST
7400 || TREE_CODE (val) == FIXED_CST)
794e3180 7401 vec.quick_push (val);
cfef45c8
RG
7402 else
7403 return NULL_TREE;
7404 }
7405
5ebaa477 7406 return vec.build ();
cfef45c8 7407 }
bdf37f7a
JH
7408 if (subcode == OBJ_TYPE_REF)
7409 {
7410 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7411 /* If callee is constant, we can fold away the wrapper. */
7412 if (is_gimple_min_invariant (val))
7413 return val;
7414 }
cfef45c8
RG
7415
7416 if (kind == tcc_reference)
7417 {
7418 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7419 || TREE_CODE (rhs) == REALPART_EXPR
7420 || TREE_CODE (rhs) == IMAGPART_EXPR)
7421 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7422 {
7423 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7424 return fold_unary_loc (EXPR_LOCATION (rhs),
7425 TREE_CODE (rhs),
7426 TREE_TYPE (rhs), val);
7427 }
7428 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7429 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7430 {
7431 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7432 return fold_ternary_loc (EXPR_LOCATION (rhs),
7433 TREE_CODE (rhs),
7434 TREE_TYPE (rhs), val,
7435 TREE_OPERAND (rhs, 1),
7436 TREE_OPERAND (rhs, 2));
7437 }
7438 else if (TREE_CODE (rhs) == MEM_REF
7439 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7440 {
7441 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7442 if (TREE_CODE (val) == ADDR_EXPR
7443 && is_gimple_min_invariant (val))
7444 {
7445 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7446 unshare_expr (val),
7447 TREE_OPERAND (rhs, 1));
7448 if (tem)
7449 rhs = tem;
7450 }
7451 }
7452 return fold_const_aggregate_ref_1 (rhs, valueize);
7453 }
7454 else if (kind == tcc_declaration)
7455 return get_symbol_constant_value (rhs);
7456 return rhs;
7457 }
7458
7459 case GIMPLE_UNARY_RHS:
f3582e54 7460 return NULL_TREE;
cfef45c8
RG
7461
7462 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
7463 /* Translate &x + CST into an invariant form suitable for
7464 further propagation. */
7465 if (subcode == POINTER_PLUS_EXPR)
7466 {
4b1b9e64
RB
7467 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7468 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
7469 if (TREE_CODE (op0) == ADDR_EXPR
7470 && TREE_CODE (op1) == INTEGER_CST)
7471 {
7472 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
7473 return build1_loc
7474 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
7475 fold_build2 (MEM_REF,
7476 TREE_TYPE (TREE_TYPE (op0)),
7477 unshare_expr (op0), off));
7478 }
7479 }
59c20dc7
RB
7480 /* Canonicalize bool != 0 and bool == 0 appearing after
7481 valueization. While gimple_simplify handles this
7482 it can get confused by the ~X == 1 -> X == 0 transform
7483 which we cant reduce to a SSA name or a constant
7484 (and we have no way to tell gimple_simplify to not
7485 consider those transforms in the first place). */
7486 else if (subcode == EQ_EXPR
7487 || subcode == NE_EXPR)
7488 {
7489 tree lhs = gimple_assign_lhs (stmt);
7490 tree op0 = gimple_assign_rhs1 (stmt);
7491 if (useless_type_conversion_p (TREE_TYPE (lhs),
7492 TREE_TYPE (op0)))
7493 {
7494 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7495 op0 = (*valueize) (op0);
8861704d
RB
7496 if (TREE_CODE (op0) == INTEGER_CST)
7497 std::swap (op0, op1);
7498 if (TREE_CODE (op1) == INTEGER_CST
7499 && ((subcode == NE_EXPR && integer_zerop (op1))
7500 || (subcode == EQ_EXPR && integer_onep (op1))))
7501 return op0;
59c20dc7
RB
7502 }
7503 }
4b1b9e64 7504 return NULL_TREE;
cfef45c8
RG
7505
7506 case GIMPLE_TERNARY_RHS:
7507 {
7508 /* Handle ternary operators that can appear in GIMPLE form. */
7509 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7510 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7511 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
7512 return fold_ternary_loc (loc, subcode,
7513 gimple_expr_type (stmt), op0, op1, op2);
7514 }
7515
7516 default:
7517 gcc_unreachable ();
7518 }
7519 }
7520
7521 case GIMPLE_CALL:
7522 {
25583c4f 7523 tree fn;
538dd0b7 7524 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
7525
7526 if (gimple_call_internal_p (stmt))
31e071ae
MP
7527 {
7528 enum tree_code subcode = ERROR_MARK;
7529 switch (gimple_call_internal_fn (stmt))
7530 {
7531 case IFN_UBSAN_CHECK_ADD:
7532 subcode = PLUS_EXPR;
7533 break;
7534 case IFN_UBSAN_CHECK_SUB:
7535 subcode = MINUS_EXPR;
7536 break;
7537 case IFN_UBSAN_CHECK_MUL:
7538 subcode = MULT_EXPR;
7539 break;
68fa96d6
ML
7540 case IFN_BUILTIN_EXPECT:
7541 {
7542 tree arg0 = gimple_call_arg (stmt, 0);
7543 tree op0 = (*valueize) (arg0);
7544 if (TREE_CODE (op0) == INTEGER_CST)
7545 return op0;
7546 return NULL_TREE;
7547 }
31e071ae
MP
7548 default:
7549 return NULL_TREE;
7550 }
368b454d
JJ
7551 tree arg0 = gimple_call_arg (stmt, 0);
7552 tree arg1 = gimple_call_arg (stmt, 1);
7553 tree op0 = (*valueize) (arg0);
7554 tree op1 = (*valueize) (arg1);
31e071ae
MP
7555
7556 if (TREE_CODE (op0) != INTEGER_CST
7557 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
7558 {
7559 switch (subcode)
7560 {
7561 case MULT_EXPR:
7562 /* x * 0 = 0 * x = 0 without overflow. */
7563 if (integer_zerop (op0) || integer_zerop (op1))
7564 return build_zero_cst (TREE_TYPE (arg0));
7565 break;
7566 case MINUS_EXPR:
7567 /* y - y = 0 without overflow. */
7568 if (operand_equal_p (op0, op1, 0))
7569 return build_zero_cst (TREE_TYPE (arg0));
7570 break;
7571 default:
7572 break;
7573 }
7574 }
7575 tree res
7576 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
7577 if (res
7578 && TREE_CODE (res) == INTEGER_CST
7579 && !TREE_OVERFLOW (res))
7580 return res;
7581 return NULL_TREE;
7582 }
25583c4f
RS
7583
7584 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 7585 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 7586 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 7587 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
7588 && gimple_builtin_call_types_compatible_p (stmt,
7589 TREE_OPERAND (fn, 0)))
cfef45c8
RG
7590 {
7591 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 7592 tree retval;
cfef45c8
RG
7593 unsigned i;
7594 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7595 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 7596 retval = fold_builtin_call_array (loc,
538dd0b7 7597 gimple_call_return_type (call_stmt),
cfef45c8 7598 fn, gimple_call_num_args (stmt), args);
cfef45c8 7599 if (retval)
5c944c6c
RB
7600 {
7601 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7602 STRIP_NOPS (retval);
538dd0b7
DM
7603 retval = fold_convert (gimple_call_return_type (call_stmt),
7604 retval);
5c944c6c 7605 }
cfef45c8
RG
7606 return retval;
7607 }
7608 return NULL_TREE;
7609 }
7610
7611 default:
7612 return NULL_TREE;
7613 }
7614}
7615
7616/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7617 Returns NULL_TREE if folding to a constant is not possible, otherwise
7618 returns a constant according to is_gimple_min_invariant. */
7619
7620tree
355fe088 7621gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
7622{
7623 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7624 if (res && is_gimple_min_invariant (res))
7625 return res;
7626 return NULL_TREE;
7627}
7628
7629
7630/* The following set of functions are supposed to fold references using
7631 their constant initializers. */
7632
cfef45c8
RG
7633/* See if we can find constructor defining value of BASE.
7634 When we know the consructor with constant offset (such as
7635 base is array[40] and we do know constructor of array), then
7636 BIT_OFFSET is adjusted accordingly.
7637
7638 As a special case, return error_mark_node when constructor
7639 is not explicitly available, but it is known to be zero
7640 such as 'static const int a;'. */
7641static tree
588db50c 7642get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
7643 tree (*valueize)(tree))
7644{
588db50c 7645 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
7646 bool reverse;
7647
cfef45c8
RG
7648 if (TREE_CODE (base) == MEM_REF)
7649 {
6a5aca53
ML
7650 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7651 if (!boff.to_shwi (bit_offset))
7652 return NULL_TREE;
cfef45c8
RG
7653
7654 if (valueize
7655 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7656 base = valueize (TREE_OPERAND (base, 0));
7657 if (!base || TREE_CODE (base) != ADDR_EXPR)
7658 return NULL_TREE;
7659 base = TREE_OPERAND (base, 0);
7660 }
13e88953
RB
7661 else if (valueize
7662 && TREE_CODE (base) == SSA_NAME)
7663 base = valueize (base);
cfef45c8
RG
7664
7665 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7666 DECL_INITIAL. If BASE is a nested reference into another
7667 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7668 the inner reference. */
7669 switch (TREE_CODE (base))
7670 {
7671 case VAR_DECL:
cfef45c8 7672 case CONST_DECL:
6a6dac52
JH
7673 {
7674 tree init = ctor_for_folding (base);
7675
688010ba 7676 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
7677 NULL means unknown, while error_mark_node is 0. */
7678 if (init == error_mark_node)
7679 return NULL_TREE;
7680 if (!init)
7681 return error_mark_node;
7682 return init;
7683 }
cfef45c8 7684
13e88953
RB
7685 case VIEW_CONVERT_EXPR:
7686 return get_base_constructor (TREE_OPERAND (base, 0),
7687 bit_offset, valueize);
7688
cfef45c8
RG
7689 case ARRAY_REF:
7690 case COMPONENT_REF:
ee45a32d
EB
7691 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7692 &reverse);
588db50c 7693 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
7694 return NULL_TREE;
7695 *bit_offset += bit_offset2;
7696 return get_base_constructor (base, bit_offset, valueize);
7697
cfef45c8
RG
7698 case CONSTRUCTOR:
7699 return base;
7700
7701 default:
13e88953
RB
7702 if (CONSTANT_CLASS_P (base))
7703 return base;
7704
cfef45c8
RG
7705 return NULL_TREE;
7706 }
7707}
7708
35b4d3a6
MS
7709/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7710 to the memory at bit OFFSET. When non-null, TYPE is the expected
7711 type of the reference; otherwise the type of the referenced element
7712 is used instead. When SIZE is zero, attempt to fold a reference to
7713 the entire element which OFFSET refers to. Increment *SUBOFF by
7714 the bit offset of the accessed element. */
cfef45c8
RG
7715
7716static tree
7717fold_array_ctor_reference (tree type, tree ctor,
7718 unsigned HOST_WIDE_INT offset,
c44c2088 7719 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7720 tree from_decl,
7721 unsigned HOST_WIDE_INT *suboff)
cfef45c8 7722{
807e902e
KZ
7723 offset_int low_bound;
7724 offset_int elt_size;
807e902e 7725 offset_int access_index;
6a636014 7726 tree domain_type = NULL_TREE;
cfef45c8
RG
7727 HOST_WIDE_INT inner_offset;
7728
7729 /* Compute low bound and elt size. */
eb8f1123
RG
7730 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7731 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
7732 if (domain_type && TYPE_MIN_VALUE (domain_type))
7733 {
6aa238a1 7734 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7735 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7736 return NULL_TREE;
807e902e 7737 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
7738 }
7739 else
807e902e 7740 low_bound = 0;
6aa238a1 7741 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7742 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7743 return NULL_TREE;
807e902e 7744 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 7745
35b4d3a6 7746 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 7747 access of a multiple of the array element size. Avoid division
6aa238a1
MS
7748 by zero below when ELT_SIZE is zero, such as with the result of
7749 an initializer for a zero-length array or an empty struct. */
7750 if (elt_size == 0
7751 || (type
7752 && (!TYPE_SIZE_UNIT (type)
831e688a 7753 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
7754 return NULL_TREE;
7755
7756 /* Compute the array index we look for. */
807e902e
KZ
7757 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7758 elt_size);
27bcd47c 7759 access_index += low_bound;
cfef45c8
RG
7760
7761 /* And offset within the access. */
27bcd47c 7762 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 7763
3c076c96
JJ
7764 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7765 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
7766 {
7767 /* native_encode_expr constraints. */
7768 if (size > MAX_BITSIZE_MODE_ANY_MODE
7769 || size % BITS_PER_UNIT != 0
3c076c96
JJ
7770 || inner_offset % BITS_PER_UNIT != 0
7771 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
7772 return NULL_TREE;
7773
7774 unsigned ctor_idx;
7775 tree val = get_array_ctor_element_at_index (ctor, access_index,
7776 &ctor_idx);
7777 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7778 return build_zero_cst (type);
7779
7780 /* native-encode adjacent ctor elements. */
7781 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7782 unsigned bufoff = 0;
7783 offset_int index = 0;
7784 offset_int max_index = access_index;
7785 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7786 if (!val)
7787 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7788 else if (!CONSTANT_CLASS_P (val))
7789 return NULL_TREE;
7790 if (!elt->index)
7791 ;
7792 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7793 {
7794 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7795 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7796 }
7797 else
7798 index = max_index = wi::to_offset (elt->index);
7799 index = wi::umax (index, access_index);
7800 do
7801 {
3c076c96
JJ
7802 if (bufoff + elt_sz > sizeof (buf))
7803 elt_sz = sizeof (buf) - bufoff;
7804 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 7805 inner_offset / BITS_PER_UNIT);
3c076c96 7806 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
7807 return NULL_TREE;
7808 inner_offset = 0;
7809 bufoff += len;
7810
7811 access_index += 1;
7812 if (wi::cmpu (access_index, index) == 0)
7813 val = elt->value;
7814 else if (wi::cmpu (access_index, max_index) > 0)
7815 {
7816 ctor_idx++;
7817 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7818 {
7819 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7820 ++max_index;
7821 }
7822 else
7823 {
7824 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7825 index = 0;
7826 max_index = access_index;
7827 if (!elt->index)
7828 ;
7829 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7830 {
7831 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7832 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7833 }
7834 else
7835 index = max_index = wi::to_offset (elt->index);
7836 index = wi::umax (index, access_index);
7837 if (wi::cmpu (access_index, index) == 0)
7838 val = elt->value;
7839 else
7840 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7841 }
7842 }
7843 }
7844 while (bufoff < size / BITS_PER_UNIT);
7845 *suboff += size;
7846 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7847 }
7848
6a636014 7849 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
7850 {
7851 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7852 {
7853 /* For the final reference to the entire accessed element
7854 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7855 may be null) in favor of the type of the element, and set
7856 SIZE to the size of the accessed element. */
7857 inner_offset = 0;
7858 type = TREE_TYPE (val);
6e41c27b 7859 size = elt_sz * BITS_PER_UNIT;
35b4d3a6 7860 }
6e41c27b
RB
7861 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7862 && TREE_CODE (val) == CONSTRUCTOR
7863 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7864 /* If this isn't the last element in the CTOR and a CTOR itself
7865 and it does not cover the whole object we are requesting give up
7866 since we're not set up for combining from multiple CTORs. */
7867 return NULL_TREE;
35b4d3a6 7868
6e41c27b 7869 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
35b4d3a6
MS
7870 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7871 suboff);
7872 }
cfef45c8 7873
35b4d3a6
MS
7874 /* Memory not explicitly mentioned in constructor is 0 (or
7875 the reference is out of range). */
7876 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
7877}
7878
35b4d3a6
MS
7879/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7880 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7881 is the expected type of the reference; otherwise the type of
7882 the referenced member is used instead. When SIZE is zero,
7883 attempt to fold a reference to the entire member which OFFSET
7884 refers to; in this case. Increment *SUBOFF by the bit offset
7885 of the accessed member. */
cfef45c8
RG
7886
7887static tree
7888fold_nonarray_ctor_reference (tree type, tree ctor,
7889 unsigned HOST_WIDE_INT offset,
c44c2088 7890 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7891 tree from_decl,
7892 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
7893{
7894 unsigned HOST_WIDE_INT cnt;
7895 tree cfield, cval;
7896
7897 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7898 cval)
7899 {
7900 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7901 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7902 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
7903
7904 if (!field_size)
7905 {
7906 /* Determine the size of the flexible array member from
7907 the size of the initializer provided for it. */
7908 field_size = TYPE_SIZE (TREE_TYPE (cval));
7909 }
cfef45c8
RG
7910
7911 /* Variable sized objects in static constructors makes no sense,
7912 but field_size can be NULL for flexible array members. */
7913 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7914 && TREE_CODE (byte_offset) == INTEGER_CST
7915 && (field_size != NULL_TREE
7916 ? TREE_CODE (field_size) == INTEGER_CST
7917 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7918
7919 /* Compute bit offset of the field. */
35b4d3a6
MS
7920 offset_int bitoffset
7921 = (wi::to_offset (field_offset)
7922 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 7923 /* Compute bit offset where the field ends. */
35b4d3a6 7924 offset_int bitoffset_end;
cfef45c8 7925 if (field_size != NULL_TREE)
807e902e 7926 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 7927 else
807e902e 7928 bitoffset_end = 0;
cfef45c8 7929
35b4d3a6
MS
7930 /* Compute the bit offset of the end of the desired access.
7931 As a special case, if the size of the desired access is
7932 zero, assume the access is to the entire field (and let
7933 the caller make any necessary adjustments by storing
7934 the actual bounds of the field in FIELDBOUNDS). */
7935 offset_int access_end = offset_int (offset);
7936 if (size)
7937 access_end += size;
7938 else
7939 access_end = bitoffset_end;
b8b2b009 7940
35b4d3a6
MS
7941 /* Is there any overlap between the desired access at
7942 [OFFSET, OFFSET+SIZE) and the offset of the field within
7943 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 7944 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 7945 && (field_size == NULL_TREE
807e902e 7946 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 7947 {
35b4d3a6
MS
7948 *suboff += bitoffset.to_uhwi ();
7949
7950 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7951 {
7952 /* For the final reference to the entire accessed member
7953 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7954 be null) in favor of the type of the member, and set
7955 SIZE to the size of the accessed member. */
7956 offset = bitoffset.to_uhwi ();
7957 type = TREE_TYPE (cval);
7958 size = (bitoffset_end - bitoffset).to_uhwi ();
7959 }
7960
7961 /* We do have overlap. Now see if the field is large enough
7962 to cover the access. Give up for accesses that extend
7963 beyond the end of the object or that span multiple fields. */
807e902e 7964 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 7965 return NULL_TREE;
032c80e9 7966 if (offset < bitoffset)
b8b2b009 7967 return NULL_TREE;
35b4d3a6
MS
7968
7969 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 7970 return fold_ctor_reference (type, cval,
27bcd47c 7971 inner_offset.to_uhwi (), size,
35b4d3a6 7972 from_decl, suboff);
cfef45c8
RG
7973 }
7974 }
14b7950f
MS
7975
7976 if (!type)
7977 return NULL_TREE;
7978
7979 return build_zero_cst (type);
cfef45c8
RG
7980}
7981
35b4d3a6 7982/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 7983 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
7984 is zero, attempt to fold a reference to the entire subobject
7985 which OFFSET refers to. This is used when folding accesses to
7986 string members of aggregates. When non-null, set *SUBOFF to
7987 the bit offset of the accessed subobject. */
cfef45c8 7988
8403c2cf 7989tree
35b4d3a6
MS
7990fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7991 const poly_uint64 &poly_size, tree from_decl,
7992 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
7993{
7994 tree ret;
7995
7996 /* We found the field with exact match. */
35b4d3a6
MS
7997 if (type
7998 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 7999 && known_eq (poly_offset, 0U))
9d60be38 8000 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8001
30acf282
RS
8002 /* The remaining optimizations need a constant size and offset. */
8003 unsigned HOST_WIDE_INT size, offset;
8004 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8005 return NULL_TREE;
8006
cfef45c8
RG
8007 /* We are at the end of walk, see if we can view convert the
8008 result. */
8009 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8010 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
8011 && !compare_tree_int (TYPE_SIZE (type), size)
8012 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 8013 {
9d60be38 8014 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8015 if (ret)
672d9f8e
RB
8016 {
8017 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8018 if (ret)
8019 STRIP_USELESS_TYPE_CONVERSION (ret);
8020 }
cfef45c8
RG
8021 return ret;
8022 }
b2505143
RB
8023 /* For constants and byte-aligned/sized reads try to go through
8024 native_encode/interpret. */
8025 if (CONSTANT_CLASS_P (ctor)
8026 && BITS_PER_UNIT == 8
8027 && offset % BITS_PER_UNIT == 0
ea69031c 8028 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 8029 && size % BITS_PER_UNIT == 0
ea69031c
JJ
8030 && size <= MAX_BITSIZE_MODE_ANY_MODE
8031 && can_native_interpret_type_p (type))
b2505143
RB
8032 {
8033 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
8034 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8035 offset / BITS_PER_UNIT);
8036 if (len > 0)
8037 return native_interpret_expr (type, buf, len);
b2505143 8038 }
cfef45c8
RG
8039 if (TREE_CODE (ctor) == CONSTRUCTOR)
8040 {
35b4d3a6
MS
8041 unsigned HOST_WIDE_INT dummy = 0;
8042 if (!suboff)
8043 suboff = &dummy;
cfef45c8 8044
ea69031c 8045 tree ret;
eb8f1123
RG
8046 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8047 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
8048 ret = fold_array_ctor_reference (type, ctor, offset, size,
8049 from_decl, suboff);
8050 else
8051 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8052 from_decl, suboff);
8053
8054 /* Fall back to native_encode_initializer. Needs to be done
8055 only in the outermost fold_ctor_reference call (because it itself
8056 recurses into CONSTRUCTORs) and doesn't update suboff. */
8057 if (ret == NULL_TREE
8058 && suboff == &dummy
8059 && BITS_PER_UNIT == 8
8060 && offset % BITS_PER_UNIT == 0
8061 && offset / BITS_PER_UNIT <= INT_MAX
8062 && size % BITS_PER_UNIT == 0
8063 && size <= MAX_BITSIZE_MODE_ANY_MODE
8064 && can_native_interpret_type_p (type))
8065 {
8066 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8067 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8068 offset / BITS_PER_UNIT);
8069 if (len > 0)
8070 return native_interpret_expr (type, buf, len);
8071 }
35b4d3a6 8072
ea69031c 8073 return ret;
cfef45c8
RG
8074 }
8075
8076 return NULL_TREE;
8077}
8078
8079/* Return the tree representing the element referenced by T if T is an
8080 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8081 names using VALUEIZE. Return NULL_TREE otherwise. */
8082
8083tree
8084fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8085{
8086 tree ctor, idx, base;
588db50c 8087 poly_int64 offset, size, max_size;
cfef45c8 8088 tree tem;
ee45a32d 8089 bool reverse;
cfef45c8 8090
f8a7df45
RG
8091 if (TREE_THIS_VOLATILE (t))
8092 return NULL_TREE;
8093
3a65ee74 8094 if (DECL_P (t))
cfef45c8
RG
8095 return get_symbol_constant_value (t);
8096
8097 tem = fold_read_from_constant_string (t);
8098 if (tem)
8099 return tem;
8100
8101 switch (TREE_CODE (t))
8102 {
8103 case ARRAY_REF:
8104 case ARRAY_RANGE_REF:
8105 /* Constant indexes are handled well by get_base_constructor.
8106 Only special case variable offsets.
8107 FIXME: This code can't handle nested references with variable indexes
8108 (they will be handled only by iteration of ccp). Perhaps we can bring
8109 get_ref_base_and_extent here and make it use a valueize callback. */
8110 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8111 && valueize
8112 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 8113 && poly_int_tree_p (idx))
cfef45c8
RG
8114 {
8115 tree low_bound, unit_size;
8116
8117 /* If the resulting bit-offset is constant, track it. */
8118 if ((low_bound = array_ref_low_bound (t),
588db50c 8119 poly_int_tree_p (low_bound))
cfef45c8 8120 && (unit_size = array_ref_element_size (t),
807e902e 8121 tree_fits_uhwi_p (unit_size)))
cfef45c8 8122 {
588db50c
RS
8123 poly_offset_int woffset
8124 = wi::sext (wi::to_poly_offset (idx)
8125 - wi::to_poly_offset (low_bound),
e287a2a1 8126 TYPE_PRECISION (sizetype));
a9e6359a
RB
8127 woffset *= tree_to_uhwi (unit_size);
8128 woffset *= BITS_PER_UNIT;
588db50c 8129 if (woffset.to_shwi (&offset))
807e902e 8130 {
807e902e
KZ
8131 base = TREE_OPERAND (t, 0);
8132 ctor = get_base_constructor (base, &offset, valueize);
8133 /* Empty constructor. Always fold to 0. */
8134 if (ctor == error_mark_node)
8135 return build_zero_cst (TREE_TYPE (t));
8136 /* Out of bound array access. Value is undefined,
8137 but don't fold. */
588db50c 8138 if (maybe_lt (offset, 0))
807e902e 8139 return NULL_TREE;
67914693 8140 /* We cannot determine ctor. */
807e902e
KZ
8141 if (!ctor)
8142 return NULL_TREE;
8143 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8144 tree_to_uhwi (unit_size)
8145 * BITS_PER_UNIT,
8146 base);
8147 }
cfef45c8
RG
8148 }
8149 }
8150 /* Fallthru. */
8151
8152 case COMPONENT_REF:
8153 case BIT_FIELD_REF:
8154 case TARGET_MEM_REF:
8155 case MEM_REF:
ee45a32d 8156 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
8157 ctor = get_base_constructor (base, &offset, valueize);
8158
8159 /* Empty constructor. Always fold to 0. */
8160 if (ctor == error_mark_node)
8161 return build_zero_cst (TREE_TYPE (t));
8162 /* We do not know precise address. */
588db50c 8163 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 8164 return NULL_TREE;
67914693 8165 /* We cannot determine ctor. */
cfef45c8
RG
8166 if (!ctor)
8167 return NULL_TREE;
8168
8169 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 8170 if (maybe_lt (offset, 0))
cfef45c8
RG
8171 return NULL_TREE;
8172
e4f1cbc3
JJ
8173 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8174 if (tem)
8175 return tem;
8176
8177 /* For bit field reads try to read the representative and
8178 adjust. */
8179 if (TREE_CODE (t) == COMPONENT_REF
8180 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8181 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8182 {
8183 HOST_WIDE_INT csize, coffset;
8184 tree field = TREE_OPERAND (t, 1);
8185 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8186 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8187 && size.is_constant (&csize)
8188 && offset.is_constant (&coffset)
8189 && (coffset % BITS_PER_UNIT != 0
8190 || csize % BITS_PER_UNIT != 0)
8191 && !reverse
8192 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8193 {
8194 poly_int64 bitoffset;
8195 poly_uint64 field_offset, repr_offset;
8196 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8197 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8198 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8199 else
8200 bitoffset = 0;
8201 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8202 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8203 HOST_WIDE_INT bitoff;
8204 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8205 - TYPE_PRECISION (TREE_TYPE (field)));
8206 if (bitoffset.is_constant (&bitoff)
8207 && bitoff >= 0
8208 && bitoff <= diff)
8209 {
8210 offset -= bitoff;
8211 size = tree_to_uhwi (DECL_SIZE (repr));
8212
8213 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8214 size, base);
8215 if (tem && TREE_CODE (tem) == INTEGER_CST)
8216 {
8217 if (!BYTES_BIG_ENDIAN)
8218 tem = wide_int_to_tree (TREE_TYPE (field),
8219 wi::lrshift (wi::to_wide (tem),
8220 bitoff));
8221 else
8222 tem = wide_int_to_tree (TREE_TYPE (field),
8223 wi::lrshift (wi::to_wide (tem),
8224 diff - bitoff));
8225 return tem;
8226 }
8227 }
8228 }
8229 }
8230 break;
cfef45c8
RG
8231
8232 case REALPART_EXPR:
8233 case IMAGPART_EXPR:
8234 {
8235 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8236 if (c && TREE_CODE (c) == COMPLEX_CST)
8237 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 8238 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
8239 break;
8240 }
8241
8242 default:
8243 break;
8244 }
8245
8246 return NULL_TREE;
8247}
8248
8249tree
8250fold_const_aggregate_ref (tree t)
8251{
8252 return fold_const_aggregate_ref_1 (t, NULL);
8253}
06bc3ec7 8254
85942f45 8255/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
8256 at OFFSET.
8257 Set CAN_REFER if non-NULL to false if method
8258 is not referable or if the virtual table is ill-formed (such as rewriten
8259 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
8260
8261tree
85942f45
JH
8262gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8263 tree v,
ec77d61f
JH
8264 unsigned HOST_WIDE_INT offset,
8265 bool *can_refer)
81fa35bd 8266{
85942f45
JH
8267 tree vtable = v, init, fn;
8268 unsigned HOST_WIDE_INT size;
8c311b50
JH
8269 unsigned HOST_WIDE_INT elt_size, access_index;
8270 tree domain_type;
81fa35bd 8271
ec77d61f
JH
8272 if (can_refer)
8273 *can_refer = true;
8274
9de2f554 8275 /* First of all double check we have virtual table. */
8813a647 8276 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 8277 {
ec77d61f
JH
8278 /* Pass down that we lost track of the target. */
8279 if (can_refer)
8280 *can_refer = false;
8281 return NULL_TREE;
8282 }
9de2f554 8283
2aa3da06
JH
8284 init = ctor_for_folding (v);
8285
9de2f554 8286 /* The virtual tables should always be born with constructors
2aa3da06
JH
8287 and we always should assume that they are avaialble for
8288 folding. At the moment we do not stream them in all cases,
8289 but it should never happen that ctor seem unreachable. */
8290 gcc_assert (init);
8291 if (init == error_mark_node)
8292 {
ec77d61f
JH
8293 /* Pass down that we lost track of the target. */
8294 if (can_refer)
8295 *can_refer = false;
2aa3da06
JH
8296 return NULL_TREE;
8297 }
81fa35bd 8298 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 8299 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 8300 offset *= BITS_PER_UNIT;
81fa35bd 8301 offset += token * size;
9de2f554 8302
8c311b50
JH
8303 /* Lookup the value in the constructor that is assumed to be array.
8304 This is equivalent to
8305 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8306 offset, size, NULL);
8307 but in a constant time. We expect that frontend produced a simple
8308 array without indexed initializers. */
8309
8310 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8311 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8312 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8313 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8314
8315 access_index = offset / BITS_PER_UNIT / elt_size;
8316 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8317
bf8d8309
MP
8318 /* The C++ FE can now produce indexed fields, and we check if the indexes
8319 match. */
8c311b50
JH
8320 if (access_index < CONSTRUCTOR_NELTS (init))
8321 {
8322 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
8323 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8324 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
8325 STRIP_NOPS (fn);
8326 }
8327 else
8328 fn = NULL;
9de2f554
JH
8329
8330 /* For type inconsistent program we may end up looking up virtual method
8331 in virtual table that does not contain TOKEN entries. We may overrun
8332 the virtual table and pick up a constant or RTTI info pointer.
8333 In any case the call is undefined. */
8334 if (!fn
8335 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8336 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8337 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8338 else
8339 {
8340 fn = TREE_OPERAND (fn, 0);
8341
8342 /* When cgraph node is missing and function is not public, we cannot
8343 devirtualize. This can happen in WHOPR when the actual method
8344 ends up in other partition, because we found devirtualization
8345 possibility too late. */
8346 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
8347 {
8348 if (can_refer)
8349 {
8350 *can_refer = false;
8351 return fn;
8352 }
8353 return NULL_TREE;
8354 }
9de2f554 8355 }
81fa35bd 8356
7501ca28
RG
8357 /* Make sure we create a cgraph node for functions we'll reference.
8358 They can be non-existent if the reference comes from an entry
8359 of an external vtable for example. */
d52f5295 8360 cgraph_node::get_create (fn);
7501ca28 8361
81fa35bd
MJ
8362 return fn;
8363}
8364
85942f45
JH
8365/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8366 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8367 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
8368 OBJ_TYPE_REF_OBJECT(REF).
8369 Set CAN_REFER if non-NULL to false if method
8370 is not referable or if the virtual table is ill-formed (such as rewriten
8371 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
8372
8373tree
ec77d61f
JH
8374gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8375 bool *can_refer)
85942f45
JH
8376{
8377 unsigned HOST_WIDE_INT offset;
8378 tree v;
8379
8380 v = BINFO_VTABLE (known_binfo);
8381 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8382 if (!v)
8383 return NULL_TREE;
8384
8385 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
8386 {
8387 if (can_refer)
8388 *can_refer = false;
8389 return NULL_TREE;
8390 }
8391 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
8392}
8393
737f500a
RB
8394/* Given a pointer value T, return a simplified version of an
8395 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
8396 possible. Note that the resulting type may be different from
8397 the type pointed to in the sense that it is still compatible
8398 from the langhooks point of view. */
8399
8400tree
8401gimple_fold_indirect_ref (tree t)
8402{
8403 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8404 tree sub = t;
8405 tree subtype;
8406
8407 STRIP_NOPS (sub);
8408 subtype = TREE_TYPE (sub);
737f500a
RB
8409 if (!POINTER_TYPE_P (subtype)
8410 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
8411 return NULL_TREE;
8412
8413 if (TREE_CODE (sub) == ADDR_EXPR)
8414 {
8415 tree op = TREE_OPERAND (sub, 0);
8416 tree optype = TREE_TYPE (op);
8417 /* *&p => p */
8418 if (useless_type_conversion_p (type, optype))
8419 return op;
8420
8421 /* *(foo *)&fooarray => fooarray[0] */
8422 if (TREE_CODE (optype) == ARRAY_TYPE
8423 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8424 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8425 {
8426 tree type_domain = TYPE_DOMAIN (optype);
8427 tree min_val = size_zero_node;
8428 if (type_domain && TYPE_MIN_VALUE (type_domain))
8429 min_val = TYPE_MIN_VALUE (type_domain);
8430 if (TREE_CODE (min_val) == INTEGER_CST)
8431 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8432 }
8433 /* *(foo *)&complexfoo => __real__ complexfoo */
8434 else if (TREE_CODE (optype) == COMPLEX_TYPE
8435 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8436 return fold_build1 (REALPART_EXPR, type, op);
8437 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8438 else if (TREE_CODE (optype) == VECTOR_TYPE
8439 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8440 {
8441 tree part_width = TYPE_SIZE (type);
8442 tree index = bitsize_int (0);
8443 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8444 }
8445 }
8446
8447 /* *(p + CST) -> ... */
8448 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8449 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8450 {
8451 tree addr = TREE_OPERAND (sub, 0);
8452 tree off = TREE_OPERAND (sub, 1);
8453 tree addrtype;
8454
8455 STRIP_NOPS (addr);
8456 addrtype = TREE_TYPE (addr);
8457
8458 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8459 if (TREE_CODE (addr) == ADDR_EXPR
8460 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8461 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 8462 && tree_fits_uhwi_p (off))
b184c8f1 8463 {
ae7e9ddd 8464 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
8465 tree part_width = TYPE_SIZE (type);
8466 unsigned HOST_WIDE_INT part_widthi
9439e9a1 8467 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
8468 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8469 tree index = bitsize_int (indexi);
928686b1
RS
8470 if (known_lt (offset / part_widthi,
8471 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
8472 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8473 part_width, index);
8474 }
8475
8476 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8477 if (TREE_CODE (addr) == ADDR_EXPR
8478 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8479 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8480 {
8481 tree size = TYPE_SIZE_UNIT (type);
8482 if (tree_int_cst_equal (size, off))
8483 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8484 }
8485
8486 /* *(p + CST) -> MEM_REF <p, CST>. */
8487 if (TREE_CODE (addr) != ADDR_EXPR
8488 || DECL_P (TREE_OPERAND (addr, 0)))
8489 return fold_build2 (MEM_REF, type,
8490 addr,
8e6cdc90 8491 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
8492 }
8493
8494 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8495 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8496 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8497 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8498 {
8499 tree type_domain;
8500 tree min_val = size_zero_node;
8501 tree osub = sub;
8502 sub = gimple_fold_indirect_ref (sub);
8503 if (! sub)
8504 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8505 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8506 if (type_domain && TYPE_MIN_VALUE (type_domain))
8507 min_val = TYPE_MIN_VALUE (type_domain);
8508 if (TREE_CODE (min_val) == INTEGER_CST)
8509 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8510 }
8511
8512 return NULL_TREE;
8513}
19e51b40
JJ
8514
8515/* Return true if CODE is an operation that when operating on signed
8516 integer types involves undefined behavior on overflow and the
8517 operation can be expressed with unsigned arithmetic. */
8518
8519bool
8520arith_code_with_undefined_signed_overflow (tree_code code)
8521{
8522 switch (code)
8523 {
8e2c037d 8524 case ABS_EXPR:
19e51b40
JJ
8525 case PLUS_EXPR:
8526 case MINUS_EXPR:
8527 case MULT_EXPR:
8528 case NEGATE_EXPR:
8529 case POINTER_PLUS_EXPR:
8530 return true;
8531 default:
8532 return false;
8533 }
8534}
8535
8536/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8537 operation that can be transformed to unsigned arithmetic by converting
8538 its operand, carrying out the operation in the corresponding unsigned
8539 type and converting the result back to the original type.
8540
8541 Returns a sequence of statements that replace STMT and also contain
8542 a modified form of STMT itself. */
8543
8544gimple_seq
355fe088 8545rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
8546{
8547 if (dump_file && (dump_flags & TDF_DETAILS))
8548 {
8549 fprintf (dump_file, "rewriting stmt with undefined signed "
8550 "overflow ");
8551 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8552 }
8553
8554 tree lhs = gimple_assign_lhs (stmt);
8555 tree type = unsigned_type_for (TREE_TYPE (lhs));
8556 gimple_seq stmts = NULL;
8e2c037d
RB
8557 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8558 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8559 else
8560 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8561 {
8562 tree op = gimple_op (stmt, i);
8563 op = gimple_convert (&stmts, type, op);
8564 gimple_set_op (stmt, i, op);
8565 }
19e51b40
JJ
8566 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8567 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8568 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 8569 gimple_set_modified (stmt, true);
19e51b40 8570 gimple_seq_add_stmt (&stmts, stmt);
355fe088 8571 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
8572 gimple_seq_add_stmt (&stmts, cvt);
8573
8574 return stmts;
8575}
d4f5cd5e 8576
3d2cf79f 8577
c26de36d
RB
8578/* The valueization hook we use for the gimple_build API simplification.
8579 This makes us match fold_buildN behavior by only combining with
8580 statements in the sequence(s) we are currently building. */
8581
8582static tree
8583gimple_build_valueize (tree op)
8584{
8585 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8586 return op;
8587 return NULL_TREE;
8588}
8589
3d2cf79f 8590/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 8591 simplifying it first if possible. Returns the built
3d2cf79f
RB
8592 expression value and appends statements possibly defining it
8593 to SEQ. */
8594
8595tree
8596gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8597 enum tree_code code, tree type, tree op0)
3d2cf79f 8598{
c26de36d 8599 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
8600 if (!res)
8601 {
a15ebbcd 8602 res = create_tmp_reg_or_ssa_name (type);
355fe088 8603 gimple *stmt;
3d2cf79f
RB
8604 if (code == REALPART_EXPR
8605 || code == IMAGPART_EXPR
8606 || code == VIEW_CONVERT_EXPR)
0d0e4a03 8607 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 8608 else
0d0e4a03 8609 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
8610 gimple_set_location (stmt, loc);
8611 gimple_seq_add_stmt_without_update (seq, stmt);
8612 }
8613 return res;
8614}
8615
8616/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 8617 simplifying it first if possible. Returns the built
3d2cf79f
RB
8618 expression value and appends statements possibly defining it
8619 to SEQ. */
8620
8621tree
8622gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8623 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 8624{
c26de36d 8625 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
8626 if (!res)
8627 {
a15ebbcd 8628 res = create_tmp_reg_or_ssa_name (type);
355fe088 8629 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
8630 gimple_set_location (stmt, loc);
8631 gimple_seq_add_stmt_without_update (seq, stmt);
8632 }
8633 return res;
8634}
8635
8636/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 8637 simplifying it first if possible. Returns the built
3d2cf79f
RB
8638 expression value and appends statements possibly defining it
8639 to SEQ. */
8640
8641tree
8642gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8643 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
8644{
8645 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 8646 seq, gimple_build_valueize);
3d2cf79f
RB
8647 if (!res)
8648 {
a15ebbcd 8649 res = create_tmp_reg_or_ssa_name (type);
355fe088 8650 gimple *stmt;
3d2cf79f 8651 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
8652 stmt = gimple_build_assign (res, code,
8653 build3 (code, type, op0, op1, op2));
3d2cf79f 8654 else
0d0e4a03 8655 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
8656 gimple_set_location (stmt, loc);
8657 gimple_seq_add_stmt_without_update (seq, stmt);
8658 }
8659 return res;
8660}
8661
93a73251
MM
8662/* Build the call FN () with a result of type TYPE (or no result if TYPE is
8663 void) with a location LOC. Returns the built expression value (or NULL_TREE
8664 if TYPE is void) and appends statements possibly defining it to SEQ. */
8665
8666tree
8667gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8668{
8669 tree res = NULL_TREE;
8670 gcall *stmt;
8671 if (internal_fn_p (fn))
8672 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8673 else
8674 {
8675 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8676 stmt = gimple_build_call (decl, 0);
8677 }
8678 if (!VOID_TYPE_P (type))
8679 {
8680 res = create_tmp_reg_or_ssa_name (type);
8681 gimple_call_set_lhs (stmt, res);
8682 }
8683 gimple_set_location (stmt, loc);
8684 gimple_seq_add_stmt_without_update (seq, stmt);
8685 return res;
8686}
8687
3d2cf79f
RB
8688/* Build the call FN (ARG0) with a result of type TYPE
8689 (or no result if TYPE is void) with location LOC,
c26de36d 8690 simplifying it first if possible. Returns the built
3d2cf79f
RB
8691 expression value (or NULL_TREE if TYPE is void) and appends
8692 statements possibly defining it to SEQ. */
8693
8694tree
eb69361d
RS
8695gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8696 tree type, tree arg0)
3d2cf79f 8697{
c26de36d 8698 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
8699 if (!res)
8700 {
eb69361d
RS
8701 gcall *stmt;
8702 if (internal_fn_p (fn))
8703 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8704 else
8705 {
8706 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8707 stmt = gimple_build_call (decl, 1, arg0);
8708 }
3d2cf79f
RB
8709 if (!VOID_TYPE_P (type))
8710 {
a15ebbcd 8711 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8712 gimple_call_set_lhs (stmt, res);
8713 }
8714 gimple_set_location (stmt, loc);
8715 gimple_seq_add_stmt_without_update (seq, stmt);
8716 }
8717 return res;
8718}
8719
8720/* Build the call FN (ARG0, ARG1) with a result of type TYPE
8721 (or no result if TYPE is void) with location LOC,
c26de36d 8722 simplifying it first if possible. Returns the built
3d2cf79f
RB
8723 expression value (or NULL_TREE if TYPE is void) and appends
8724 statements possibly defining it to SEQ. */
8725
8726tree
eb69361d
RS
8727gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8728 tree type, tree arg0, tree arg1)
3d2cf79f 8729{
c26de36d 8730 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
8731 if (!res)
8732 {
eb69361d
RS
8733 gcall *stmt;
8734 if (internal_fn_p (fn))
8735 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8736 else
8737 {
8738 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8739 stmt = gimple_build_call (decl, 2, arg0, arg1);
8740 }
3d2cf79f
RB
8741 if (!VOID_TYPE_P (type))
8742 {
a15ebbcd 8743 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8744 gimple_call_set_lhs (stmt, res);
8745 }
8746 gimple_set_location (stmt, loc);
8747 gimple_seq_add_stmt_without_update (seq, stmt);
8748 }
8749 return res;
8750}
8751
8752/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8753 (or no result if TYPE is void) with location LOC,
c26de36d 8754 simplifying it first if possible. Returns the built
3d2cf79f
RB
8755 expression value (or NULL_TREE if TYPE is void) and appends
8756 statements possibly defining it to SEQ. */
8757
8758tree
eb69361d
RS
8759gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8760 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 8761{
c26de36d
RB
8762 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8763 seq, gimple_build_valueize);
3d2cf79f
RB
8764 if (!res)
8765 {
eb69361d
RS
8766 gcall *stmt;
8767 if (internal_fn_p (fn))
8768 stmt = gimple_build_call_internal (as_internal_fn (fn),
8769 3, arg0, arg1, arg2);
8770 else
8771 {
8772 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8773 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8774 }
3d2cf79f
RB
8775 if (!VOID_TYPE_P (type))
8776 {
a15ebbcd 8777 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8778 gimple_call_set_lhs (stmt, res);
8779 }
8780 gimple_set_location (stmt, loc);
8781 gimple_seq_add_stmt_without_update (seq, stmt);
8782 }
8783 return res;
8784}
8785
8786/* Build the conversion (TYPE) OP with a result of type TYPE
8787 with location LOC if such conversion is neccesary in GIMPLE,
8788 simplifying it first.
8789 Returns the built expression value and appends
8790 statements possibly defining it to SEQ. */
d4f5cd5e
RB
8791
8792tree
8793gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8794{
8795 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8796 return op;
3d2cf79f 8797 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 8798}
68e57f04 8799
74e3c262
RB
8800/* Build the conversion (ptrofftype) OP with a result of a type
8801 compatible with ptrofftype with location LOC if such conversion
8802 is neccesary in GIMPLE, simplifying it first.
8803 Returns the built expression value and appends
8804 statements possibly defining it to SEQ. */
8805
8806tree
8807gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8808{
8809 if (ptrofftype_p (TREE_TYPE (op)))
8810 return op;
8811 return gimple_convert (seq, loc, sizetype, op);
8812}
8813
e7c45b66
RS
8814/* Build a vector of type TYPE in which each element has the value OP.
8815 Return a gimple value for the result, appending any new statements
8816 to SEQ. */
8817
8818tree
8819gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8820 tree op)
8821{
928686b1
RS
8822 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8823 && !CONSTANT_CLASS_P (op))
8824 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8825
e7c45b66
RS
8826 tree res, vec = build_vector_from_val (type, op);
8827 if (is_gimple_val (vec))
8828 return vec;
8829 if (gimple_in_ssa_p (cfun))
8830 res = make_ssa_name (type);
8831 else
8832 res = create_tmp_reg (type);
8833 gimple *stmt = gimple_build_assign (res, vec);
8834 gimple_set_location (stmt, loc);
8835 gimple_seq_add_stmt_without_update (seq, stmt);
8836 return res;
8837}
8838
abe73c3d
RS
8839/* Build a vector from BUILDER, handling the case in which some elements
8840 are non-constant. Return a gimple value for the result, appending any
8841 new instructions to SEQ.
8842
8843 BUILDER must not have a stepped encoding on entry. This is because
8844 the function is not geared up to handle the arithmetic that would
8845 be needed in the variable case, and any code building a vector that
8846 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
8847
8848tree
abe73c3d
RS
8849gimple_build_vector (gimple_seq *seq, location_t loc,
8850 tree_vector_builder *builder)
e7c45b66 8851{
abe73c3d
RS
8852 gcc_assert (builder->nelts_per_pattern () <= 2);
8853 unsigned int encoded_nelts = builder->encoded_nelts ();
8854 for (unsigned int i = 0; i < encoded_nelts; ++i)
8ebedfcd 8855 if (!CONSTANT_CLASS_P ((*builder)[i]))
e7c45b66 8856 {
abe73c3d 8857 tree type = builder->type ();
928686b1 8858 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
8859 vec<constructor_elt, va_gc> *v;
8860 vec_alloc (v, nelts);
8861 for (i = 0; i < nelts; ++i)
abe73c3d 8862 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
8863
8864 tree res;
8865 if (gimple_in_ssa_p (cfun))
8866 res = make_ssa_name (type);
8867 else
8868 res = create_tmp_reg (type);
8869 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8870 gimple_set_location (stmt, loc);
8871 gimple_seq_add_stmt_without_update (seq, stmt);
8872 return res;
8873 }
abe73c3d 8874 return builder->build ();
e7c45b66
RS
8875}
8876
93a73251
MM
8877/* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8878 and generate a value guaranteed to be rounded upwards to ALIGN.
8879
8880 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8881
8882tree
8883gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8884 tree old_size, unsigned HOST_WIDE_INT align)
8885{
8886 unsigned HOST_WIDE_INT tg_mask = align - 1;
8887 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8888 gcc_assert (INTEGRAL_TYPE_P (type));
8889 tree tree_mask = build_int_cst (type, tg_mask);
8890 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8891 tree_mask);
8892
8893 tree mask = build_int_cst (type, -align);
8894 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8895}
8896
68e57f04
RS
8897/* Return true if the result of assignment STMT is known to be non-negative.
8898 If the return value is based on the assumption that signed overflow is
8899 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8900 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8901
8902static bool
8903gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8904 int depth)
8905{
8906 enum tree_code code = gimple_assign_rhs_code (stmt);
8907 switch (get_gimple_rhs_class (code))
8908 {
8909 case GIMPLE_UNARY_RHS:
8910 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8911 gimple_expr_type (stmt),
8912 gimple_assign_rhs1 (stmt),
8913 strict_overflow_p, depth);
8914 case GIMPLE_BINARY_RHS:
8915 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8916 gimple_expr_type (stmt),
8917 gimple_assign_rhs1 (stmt),
8918 gimple_assign_rhs2 (stmt),
8919 strict_overflow_p, depth);
8920 case GIMPLE_TERNARY_RHS:
8921 return false;
8922 case GIMPLE_SINGLE_RHS:
8923 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8924 strict_overflow_p, depth);
8925 case GIMPLE_INVALID_RHS:
8926 break;
8927 }
8928 gcc_unreachable ();
8929}
8930
8931/* Return true if return value of call STMT is known to be non-negative.
8932 If the return value is based on the assumption that signed overflow is
8933 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8934 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8935
8936static bool
8937gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8938 int depth)
8939{
8940 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8941 gimple_call_arg (stmt, 0) : NULL_TREE;
8942 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8943 gimple_call_arg (stmt, 1) : NULL_TREE;
8944
8945 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 8946 gimple_call_combined_fn (stmt),
68e57f04
RS
8947 arg0,
8948 arg1,
8949 strict_overflow_p, depth);
8950}
8951
4534c203
RB
8952/* Return true if return value of call STMT is known to be non-negative.
8953 If the return value is based on the assumption that signed overflow is
8954 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8955 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8956
8957static bool
8958gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8959 int depth)
8960{
8961 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8962 {
8963 tree arg = gimple_phi_arg_def (stmt, i);
8964 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8965 return false;
8966 }
8967 return true;
8968}
8969
68e57f04
RS
8970/* Return true if STMT is known to compute a non-negative value.
8971 If the return value is based on the assumption that signed overflow is
8972 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8973 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8974
8975bool
8976gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8977 int depth)
8978{
8979 switch (gimple_code (stmt))
8980 {
8981 case GIMPLE_ASSIGN:
8982 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8983 depth);
8984 case GIMPLE_CALL:
8985 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8986 depth);
4534c203
RB
8987 case GIMPLE_PHI:
8988 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8989 depth);
68e57f04
RS
8990 default:
8991 return false;
8992 }
8993}
67dbe582
RS
8994
8995/* Return true if the floating-point value computed by assignment STMT
8996 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 8997 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
8998
8999 DEPTH is the current nesting depth of the query. */
9000
9001static bool
9002gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9003{
9004 enum tree_code code = gimple_assign_rhs_code (stmt);
9005 switch (get_gimple_rhs_class (code))
9006 {
9007 case GIMPLE_UNARY_RHS:
9008 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9009 gimple_assign_rhs1 (stmt), depth);
9010 case GIMPLE_BINARY_RHS:
9011 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9012 gimple_assign_rhs1 (stmt),
9013 gimple_assign_rhs2 (stmt), depth);
9014 case GIMPLE_TERNARY_RHS:
9015 return false;
9016 case GIMPLE_SINGLE_RHS:
9017 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9018 case GIMPLE_INVALID_RHS:
9019 break;
9020 }
9021 gcc_unreachable ();
9022}
9023
9024/* Return true if the floating-point value computed by call STMT is known
9025 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9026 considered integer values. Return false for signaling NaN.
67dbe582
RS
9027
9028 DEPTH is the current nesting depth of the query. */
9029
9030static bool
9031gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9032{
9033 tree arg0 = (gimple_call_num_args (stmt) > 0
9034 ? gimple_call_arg (stmt, 0)
9035 : NULL_TREE);
9036 tree arg1 = (gimple_call_num_args (stmt) > 1
9037 ? gimple_call_arg (stmt, 1)
9038 : NULL_TREE);
1d9da71f 9039 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
9040 arg0, arg1, depth);
9041}
9042
9043/* Return true if the floating-point result of phi STMT is known to have
9044 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 9045 integer values. Return false for signaling NaN.
67dbe582
RS
9046
9047 DEPTH is the current nesting depth of the query. */
9048
9049static bool
9050gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9051{
9052 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9053 {
9054 tree arg = gimple_phi_arg_def (stmt, i);
9055 if (!integer_valued_real_single_p (arg, depth + 1))
9056 return false;
9057 }
9058 return true;
9059}
9060
9061/* Return true if the floating-point value computed by STMT is known
9062 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9063 considered integer values. Return false for signaling NaN.
67dbe582
RS
9064
9065 DEPTH is the current nesting depth of the query. */
9066
9067bool
9068gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9069{
9070 switch (gimple_code (stmt))
9071 {
9072 case GIMPLE_ASSIGN:
9073 return gimple_assign_integer_valued_real_p (stmt, depth);
9074 case GIMPLE_CALL:
9075 return gimple_call_integer_valued_real_p (stmt, depth);
9076 case GIMPLE_PHI:
9077 return gimple_phi_integer_valued_real_p (stmt, depth);
9078 default:
9079 return false;
9080 }
9081}