]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
Correct a function pre/postcondition [PR102403].
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
99dee823 2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
2a837de2 33#include "gimple-ssa-warn-access.h"
cc8bea0a 34#include "gimple-ssa-warn-restrict.h"
c7131fb2 35#include "fold-const.h"
36566b39
PK
36#include "stmt.h"
37#include "expr.h"
38#include "stor-layout.h"
7ee2468b 39#include "dumpfile.h"
2fb9a547 40#include "gimple-fold.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
442b4905
AM
43#include "tree-into-ssa.h"
44#include "tree-dfa.h"
025d57f0 45#include "tree-object-size.h"
7a300452 46#include "tree-ssa.h"
cbdd87d4 47#include "tree-ssa-propagate.h"
450ad0cd 48#include "ipa-utils.h"
4484a35a 49#include "tree-ssa-address.h"
862d0b35 50#include "langhooks.h"
19e51b40 51#include "gimplify-me.h"
2b5f0895 52#include "dbgcnt.h"
9b2b7279 53#include "builtins.h"
e0ee10ed
RB
54#include "tree-eh.h"
55#include "gimple-match.h"
48126138 56#include "gomp-constants.h"
f869c12f 57#include "optabs-query.h"
629b3d75 58#include "omp-general.h"
abd3a68c 59#include "tree-cfg.h"
a918bfbf 60#include "fold-const-call.h"
314e6352
ML
61#include "stringpool.h"
62#include "attribs.h"
45b2222a 63#include "asan.h"
025d57f0
MS
64#include "diagnostic-core.h"
65#include "intl.h"
6a33d0ff 66#include "calls.h"
5ebaa477 67#include "tree-vector-builder.h"
5d0d5d68 68#include "tree-ssa-strlen.h"
e7868dc6 69#include "varasm.h"
5f6a6c91
RE
70#include "memmodel.h"
71#include "optabs.h"
cbdd87d4 72
598f7235
MS
73enum strlen_range_kind {
74 /* Compute the exact constant string length. */
75 SRK_STRLEN,
76 /* Compute the maximum constant string length. */
77 SRK_STRLENMAX,
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
83 SRK_LENRANGE,
598f7235
MS
84 /* Determine the integer value of the argument (not string length). */
85 SRK_INT_VALUE
86};
87
03c4a945
MS
88static bool
89get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 90
b3b9f3d0 91/* Return true when DECL can be referenced from current unit.
c44c2088
JH
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
94 reasons:
1389294c 95
1389294c
JH
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
101 set.
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
106 declaring the body.
107 3) COMDAT functions referred by external vtables that
3e89949e 108 we devirtualize only during final compilation stage.
b3b9f3d0
JH
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
111 directly. */
112
1389294c 113static bool
c44c2088 114can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 115{
2c8326a5 116 varpool_node *vnode;
1389294c 117 struct cgraph_node *node;
5e20cdc9 118 symtab_node *snode;
c44c2088 119
00de328a 120 if (DECL_ABSTRACT_P (decl))
1632a686
JH
121 return false;
122
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 125 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
126 return true;
127
d4babd37
JM
128 /* Static objects can be referred only if they are defined and not optimized
129 out yet. */
130 if (!TREE_PUBLIC (decl))
1632a686 131 {
d4babd37
JM
132 if (DECL_EXTERNAL (decl))
133 return false;
3aaf0529
JH
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
3dafb85c 136 if (symtab->function_flags_ready)
3aaf0529 137 return true;
d52f5295 138 snode = symtab_node::get (decl);
3aaf0529 139 if (!snode || !snode->definition)
1632a686 140 return false;
7de90a6c 141 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 142 return !node || !node->inlined_to;
1632a686
JH
143 }
144
6da8be89 145 /* We will later output the initializer, so we can refer to it.
c44c2088 146 So we are concerned only when DECL comes from initializer of
3aaf0529 147 external var or var that has been optimized out. */
c44c2088 148 if (!from_decl
8813a647 149 || !VAR_P (from_decl)
3aaf0529 150 || (!DECL_EXTERNAL (from_decl)
9041d2e6 151 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 152 && vnode->definition)
6da8be89 153 || (flag_ltrans
9041d2e6 154 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 155 && vnode->in_other_partition))
c44c2088 156 return true;
c44c2088
JH
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl)
161 && DECL_EXTERNAL (decl)
a33a931b 162 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 163 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 164 return false;
b3b9f3d0
JH
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
169 return true;
3aaf0529
JH
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
173
174 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 175 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
180 was privatized. */
3dafb85c 181 if (!symtab->function_flags_ready)
b3b9f3d0 182 return true;
c44c2088 183
d52f5295 184 snode = symtab_node::get (decl);
3aaf0529
JH
185 if (!snode
186 || ((!snode->definition || DECL_EXTERNAL (decl))
187 && (!snode->in_other_partition
188 || (!snode->forced_by_abi && !snode->force_output))))
189 return false;
190 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 191 return !node || !node->inlined_to;
1389294c
JH
192}
193
a15ebbcd
ML
194/* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
196 is made. */
197
edc19e03
WS
198tree
199create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
200{
201 if (gimple_in_ssa_p (cfun))
202 return make_ssa_name (type, stmt);
203 else
204 return create_tmp_reg (type);
205}
206
0038d4e0 207/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
210
211tree
c44c2088 212canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 213{
37f808c4
RB
214 if (CONSTANT_CLASS_P (cval))
215 return cval;
216
50619002
EB
217 tree orig_cval = cval;
218 STRIP_NOPS (cval);
315f5f1b
RG
219 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 221 {
315f5f1b
RG
222 tree ptr = TREE_OPERAND (cval, 0);
223 if (is_gimple_min_invariant (ptr))
224 cval = build1_loc (EXPR_LOCATION (cval),
225 ADDR_EXPR, TREE_TYPE (ptr),
226 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
227 ptr,
228 fold_convert (ptr_type_node,
229 TREE_OPERAND (cval, 1))));
17f39a39
JH
230 }
231 if (TREE_CODE (cval) == ADDR_EXPR)
232 {
5a27a197
RG
233 tree base = NULL_TREE;
234 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
235 {
236 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
237 if (base)
238 TREE_OPERAND (cval, 0) = base;
239 }
5a27a197
RG
240 else
241 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
242 if (!base)
243 return NULL_TREE;
b3b9f3d0 244
8813a647 245 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 246 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 247 return NULL_TREE;
13f92e8d
JJ
248 if (TREE_TYPE (base) == error_mark_node)
249 return NULL_TREE;
8813a647 250 if (VAR_P (base))
a076632e
RB
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
253 ;
7501ca28
RG
254 else if (TREE_CODE (base) == FUNCTION_DECL)
255 {
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
d52f5295 259 cgraph_node::get_create (base);
7501ca28 260 }
0038d4e0 261 /* Fixup types in global initializers. */
73aef89e
RG
262 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
263 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
264
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
266 cval = fold_convert (TREE_TYPE (orig_cval), cval);
267 return cval;
17f39a39 268 }
37f808c4
RB
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval) == INTEGER_CST)
271 {
272 if (TREE_OVERFLOW_P (cval))
273 cval = drop_tree_overflow (cval);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
275 cval = fold_convert (TREE_TYPE (orig_cval), cval);
276 return cval;
277 }
50619002 278 return orig_cval;
17f39a39 279}
cbdd87d4
RG
280
281/* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
283
284tree
285get_symbol_constant_value (tree sym)
286{
6a6dac52
JH
287 tree val = ctor_for_folding (sym);
288 if (val != error_mark_node)
cbdd87d4 289 {
cbdd87d4
RG
290 if (val)
291 {
9d60be38 292 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 293 if (val && is_gimple_min_invariant (val))
17f39a39 294 return val;
1389294c
JH
295 else
296 return NULL_TREE;
cbdd87d4
RG
297 }
298 /* Variables declared 'const' without an initializer
299 have zero as the initializer if they may not be
300 overridden at link or run time. */
301 if (!val
b8a8c472 302 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 303 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
304 }
305
306 return NULL_TREE;
307}
308
309
cbdd87d4 310
0bf8cd9d
RB
311/* Subroutine of fold_stmt. We perform constant folding of the
312 memory reference tree EXPR. */
cbdd87d4
RG
313
314static tree
0bf8cd9d 315maybe_fold_reference (tree expr)
cbdd87d4 316{
2301a394 317 tree result = NULL_TREE;
cbdd87d4 318
f0eddb90
RG
319 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
320 || TREE_CODE (expr) == REALPART_EXPR
321 || TREE_CODE (expr) == IMAGPART_EXPR)
322 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
2301a394 323 result = fold_unary_loc (EXPR_LOCATION (expr),
f0eddb90
RG
324 TREE_CODE (expr),
325 TREE_TYPE (expr),
2301a394
RB
326 TREE_OPERAND (expr, 0));
327 else if (TREE_CODE (expr) == BIT_FIELD_REF
328 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
329 result = fold_ternary_loc (EXPR_LOCATION (expr),
330 TREE_CODE (expr),
331 TREE_TYPE (expr),
332 TREE_OPERAND (expr, 0),
333 TREE_OPERAND (expr, 1),
334 TREE_OPERAND (expr, 2));
335 else
336 result = fold_const_aggregate_ref (expr);
f0eddb90 337
2301a394 338 if (result && is_gimple_min_invariant (result))
f0eddb90 339 return result;
cbdd87d4 340
cbdd87d4
RG
341 return NULL_TREE;
342}
343
52a5515e
RB
344/* Return true if EXPR is an acceptable right-hand-side for a
345 GIMPLE assignment. We validate the entire tree, not just
346 the root node, thus catching expressions that embed complex
347 operands that are not permitted in GIMPLE. This function
348 is needed because the folding routines in fold-const.c
349 may return such expressions in some cases, e.g., an array
350 access with an embedded index addition. It may make more
351 sense to have folding routines that are sensitive to the
352 constraints on GIMPLE operands, rather than abandoning any
353 any attempt to fold if the usual folding turns out to be too
354 aggressive. */
355
356bool
357valid_gimple_rhs_p (tree expr)
358{
359 enum tree_code code = TREE_CODE (expr);
360
361 switch (TREE_CODE_CLASS (code))
362 {
363 case tcc_declaration:
364 if (!is_gimple_variable (expr))
365 return false;
366 break;
367
368 case tcc_constant:
369 /* All constants are ok. */
370 break;
371
372 case tcc_comparison:
373 /* GENERIC allows comparisons with non-boolean types, reject
374 those for GIMPLE. Let vector-typed comparisons pass - rules
375 for GENERIC and GIMPLE are the same here. */
376 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
377 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
378 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
379 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
380 return false;
381
382 /* Fallthru. */
383 case tcc_binary:
384 if (!is_gimple_val (TREE_OPERAND (expr, 0))
385 || !is_gimple_val (TREE_OPERAND (expr, 1)))
386 return false;
387 break;
388
389 case tcc_unary:
390 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
391 return false;
392 break;
393
394 case tcc_expression:
395 switch (code)
396 {
397 case ADDR_EXPR:
398 {
399 tree t;
400 if (is_gimple_min_invariant (expr))
401 return true;
402 t = TREE_OPERAND (expr, 0);
403 while (handled_component_p (t))
404 {
405 /* ??? More checks needed, see the GIMPLE verifier. */
406 if ((TREE_CODE (t) == ARRAY_REF
407 || TREE_CODE (t) == ARRAY_RANGE_REF)
408 && !is_gimple_val (TREE_OPERAND (t, 1)))
409 return false;
410 t = TREE_OPERAND (t, 0);
411 }
412 if (!is_gimple_id (t))
413 return false;
414 }
415 break;
416
417 default:
418 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
419 {
420 if ((code == COND_EXPR
421 ? !is_gimple_condexpr (TREE_OPERAND (expr, 0))
422 : !is_gimple_val (TREE_OPERAND (expr, 0)))
423 || !is_gimple_val (TREE_OPERAND (expr, 1))
424 || !is_gimple_val (TREE_OPERAND (expr, 2)))
425 return false;
426 break;
427 }
428 return false;
429 }
430 break;
431
432 case tcc_vl_exp:
433 return false;
434
435 case tcc_exceptional:
436 if (code == CONSTRUCTOR)
437 {
438 unsigned i;
439 tree elt;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
441 if (!is_gimple_val (elt))
442 return false;
443 return true;
444 }
445 if (code != SSA_NAME)
446 return false;
447 break;
448
449 case tcc_reference:
450 if (code == BIT_FIELD_REF)
451 return is_gimple_val (TREE_OPERAND (expr, 0));
452 return false;
453
454 default:
455 return false;
456 }
457
458 return true;
459}
460
cbdd87d4
RG
461
462/* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
465 folded. */
466
467static tree
468fold_gimple_assign (gimple_stmt_iterator *si)
469{
355fe088 470 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
471 enum tree_code subcode = gimple_assign_rhs_code (stmt);
472 location_t loc = gimple_location (stmt);
473
474 tree result = NULL_TREE;
475
476 switch (get_gimple_rhs_class (subcode))
477 {
478 case GIMPLE_SINGLE_RHS:
479 {
480 tree rhs = gimple_assign_rhs1 (stmt);
481
8c00ba08
JW
482 if (TREE_CLOBBER_P (rhs))
483 return NULL_TREE;
484
4e71066d 485 if (REFERENCE_CLASS_P (rhs))
0bf8cd9d 486 return maybe_fold_reference (rhs);
cbdd87d4 487
bdf37f7a
JH
488 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
489 {
490 tree val = OBJ_TYPE_REF_EXPR (rhs);
491 if (is_gimple_min_invariant (val))
492 return val;
f8a39967 493 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
494 {
495 bool final;
496 vec <cgraph_node *>targets
f8a39967 497 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 498 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 499 {
2b5f0895
XDL
500 if (dump_enabled_p ())
501 {
4f5b9c80 502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets.length () == 1
506 ? targets[0]->name ()
3ef276e4 507 : "NULL");
2b5f0895 508 }
3ef276e4
RB
509 if (targets.length () == 1)
510 {
511 val = fold_convert (TREE_TYPE (val),
512 build_fold_addr_expr_loc
513 (loc, targets[0]->decl));
514 STRIP_USELESS_TYPE_CONVERSION (val);
515 }
516 else
67914693
SL
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
3ef276e4 519 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
520 return val;
521 }
522 }
bdf37f7a 523 }
7524f419 524
cbdd87d4
RG
525 else if (TREE_CODE (rhs) == ADDR_EXPR)
526 {
70f34814 527 tree ref = TREE_OPERAND (rhs, 0);
0bf8cd9d
RB
528 if (TREE_CODE (ref) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref, 1)))
7524f419 530 {
0bf8cd9d
RB
531 result = TREE_OPERAND (ref, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs),
533 TREE_TYPE (result)))
534 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
535 return result;
7524f419 536 }
cbdd87d4
RG
537 }
538
539 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 540 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
541 {
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
543 unsigned i;
544 tree val;
545
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 547 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
548 return NULL_TREE;
549
550 return build_vector_from_ctor (TREE_TYPE (rhs),
551 CONSTRUCTOR_ELTS (rhs));
552 }
553
ca8e8301
RB
554 else if (DECL_P (rhs)
555 && is_gimple_reg_type (TREE_TYPE (rhs)))
9d60be38 556 return get_symbol_constant_value (rhs);
cbdd87d4
RG
557 }
558 break;
559
560 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
561 break;
562
563 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
564 break;
565
0354c0c7 566 case GIMPLE_TERNARY_RHS:
5c099d40
RB
567 result = fold_ternary_loc (loc, subcode,
568 TREE_TYPE (gimple_assign_lhs (stmt)),
569 gimple_assign_rhs1 (stmt),
570 gimple_assign_rhs2 (stmt),
571 gimple_assign_rhs3 (stmt));
0354c0c7
BS
572
573 if (result)
574 {
575 STRIP_USELESS_TYPE_CONVERSION (result);
576 if (valid_gimple_rhs_p (result))
577 return result;
0354c0c7
BS
578 }
579 break;
580
cbdd87d4
RG
581 case GIMPLE_INVALID_RHS:
582 gcc_unreachable ();
583 }
584
585 return NULL_TREE;
586}
587
fef5a0d9
RB
588
589/* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
593
594static void
595gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
596{
355fe088 597 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
598
599 if (gimple_has_location (stmt))
600 annotate_all_with_location (stmts, gimple_location (stmt));
601
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
355fe088 604 gimple *laststore = NULL;
fef5a0d9
RB
605 for (gimple_stmt_iterator i = gsi_last (stmts);
606 !gsi_end_p (i); gsi_prev (&i))
607 {
355fe088 608 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
609 if ((gimple_assign_single_p (new_stmt)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
611 || (is_gimple_call (new_stmt)
612 && (gimple_call_flags (new_stmt)
613 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
614 {
615 tree vdef;
616 if (!laststore)
617 vdef = gimple_vdef (stmt);
618 else
619 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
620 gimple_set_vdef (new_stmt, vdef);
621 if (vdef && TREE_CODE (vdef) == SSA_NAME)
622 SSA_NAME_DEF_STMT (vdef) = new_stmt;
623 laststore = new_stmt;
624 }
625 }
626
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse = gimple_vuse (stmt);
630 for (gimple_stmt_iterator i = gsi_start (stmts);
631 !gsi_end_p (i); gsi_next (&i))
632 {
355fe088 633 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt))
637 gimple_set_vuse (new_stmt, reaching_vuse);
638 gimple_set_modified (new_stmt, true);
639 if (gimple_vdef (new_stmt))
640 reaching_vuse = gimple_vdef (new_stmt);
641 }
642
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
645 if (reaching_vuse
646 && reaching_vuse == gimple_vuse (stmt))
647 {
648 tree vdef = gimple_vdef (stmt);
649 if (vdef
650 && TREE_CODE (vdef) == SSA_NAME)
651 {
652 unlink_stmt_vdef (stmt);
653 release_ssa_name (vdef);
654 }
655 }
656
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p, stmts, false);
659}
660
52a5515e
RB
661/* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
664
665static void
666finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
667 gimple *stmt)
668{
669 tree lhs = gimple_call_lhs (stmt);
670 gimple_call_set_lhs (new_stmt, lhs);
671 if (lhs && TREE_CODE (lhs) == SSA_NAME)
672 SSA_NAME_DEF_STMT (lhs) = new_stmt;
673 gimple_move_vops (new_stmt, stmt);
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 if (gimple_block (new_stmt) == NULL_TREE)
676 gimple_set_block (new_stmt, gimple_block (stmt));
677 gsi_replace (si_p, new_stmt, false);
678}
679
680/* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
683
684bool
685update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
686{
687 va_list ap;
688 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
689
690 gcc_assert (is_gimple_call (stmt));
691 va_start (ap, nargs);
692 new_stmt = gimple_build_call_valist (fn, nargs, ap);
693 finish_update_gimple_call (si_p, new_stmt, stmt);
694 va_end (ap);
695 return true;
696}
697
698/* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
701
702static bool
703valid_gimple_call_p (tree expr)
704{
705 unsigned i, nargs;
706
707 if (TREE_CODE (expr) != CALL_EXPR)
708 return false;
709
710 nargs = call_expr_nargs (expr);
711 for (i = 0; i < nargs; i++)
712 {
713 tree arg = CALL_EXPR_ARG (expr, i);
714 if (is_gimple_reg_type (TREE_TYPE (arg)))
715 {
716 if (!is_gimple_val (arg))
717 return false;
718 }
719 else
720 if (!is_gimple_lvalue (arg))
721 return false;
722 }
723
724 return true;
725}
726
cbdd87d4
RG
727/* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
cbdd87d4
RG
736
737void
738gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
739{
740 tree lhs;
355fe088 741 gimple *stmt, *new_stmt;
cbdd87d4 742 gimple_stmt_iterator i;
355a7673 743 gimple_seq stmts = NULL;
cbdd87d4
RG
744
745 stmt = gsi_stmt (*si_p);
746
747 gcc_assert (is_gimple_call (stmt));
748
52a5515e
RB
749 if (valid_gimple_call_p (expr))
750 {
751 /* The call has simplified to another call. */
752 tree fn = CALL_EXPR_FN (expr);
753 unsigned i;
754 unsigned nargs = call_expr_nargs (expr);
755 vec<tree> args = vNULL;
756 gcall *new_stmt;
757
758 if (nargs > 0)
759 {
760 args.create (nargs);
761 args.safe_grow_cleared (nargs, true);
762
763 for (i = 0; i < nargs; i++)
764 args[i] = CALL_EXPR_ARG (expr, i);
765 }
766
767 new_stmt = gimple_build_call_vec (fn, args);
768 finish_update_gimple_call (si_p, new_stmt, stmt);
769 args.release ();
770 return;
771 }
cbdd87d4 772
e256dfce 773 lhs = gimple_call_lhs (stmt);
cbdd87d4 774 if (lhs == NULL_TREE)
6e572326 775 {
52a5515e 776 push_gimplify_context (gimple_in_ssa_p (cfun));
6e572326 777 gimplify_and_add (expr, &stmts);
52a5515e
RB
778 pop_gimplify_context (NULL);
779
6e572326
RG
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts))
783 {
784 if (gimple_in_ssa_p (cfun))
785 {
786 unlink_stmt_vdef (stmt);
787 release_defs (stmt);
788 }
f6b4dc28 789 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
790 return;
791 }
792 }
cbdd87d4 793 else
e256dfce 794 {
381cdae4 795 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
796 new_stmt = gimple_build_assign (lhs, tmp);
797 i = gsi_last (stmts);
798 gsi_insert_after_without_update (&i, new_stmt,
799 GSI_CONTINUE_LINKING);
800 }
cbdd87d4 801
fef5a0d9
RB
802 gsi_replace_with_seq_vops (si_p, stmts);
803}
cbdd87d4 804
fef5a0d9
RB
805
806/* Replace the call at *GSI with the gimple value VAL. */
807
e3174bdf 808void
fef5a0d9
RB
809replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
810{
355fe088 811 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 812 tree lhs = gimple_call_lhs (stmt);
355fe088 813 gimple *repl;
fef5a0d9 814 if (lhs)
e256dfce 815 {
fef5a0d9
RB
816 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
817 val = fold_convert (TREE_TYPE (lhs), val);
818 repl = gimple_build_assign (lhs, val);
819 }
820 else
821 repl = gimple_build_nop ();
822 tree vdef = gimple_vdef (stmt);
823 if (vdef && TREE_CODE (vdef) == SSA_NAME)
824 {
825 unlink_stmt_vdef (stmt);
826 release_ssa_name (vdef);
827 }
f6b4dc28 828 gsi_replace (gsi, repl, false);
fef5a0d9
RB
829}
830
831/* Replace the call at *GSI with the new call REPL and fold that
832 again. */
833
834static void
355fe088 835replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 836{
355fe088 837 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
838 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
839 gimple_set_location (repl, gimple_location (stmt));
779724a5 840 gimple_move_vops (repl, stmt);
f6b4dc28 841 gsi_replace (gsi, repl, false);
fef5a0d9
RB
842 fold_stmt (gsi);
843}
844
845/* Return true if VAR is a VAR_DECL or a component thereof. */
846
847static bool
848var_decl_component_p (tree var)
849{
850 tree inner = var;
851 while (handled_component_p (inner))
852 inner = TREE_OPERAND (inner, 0);
47cac108
RB
853 return (DECL_P (inner)
854 || (TREE_CODE (inner) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
856}
857
c89af696
AH
858/* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
860
861static bool
862size_must_be_zero_p (tree size)
863{
864 if (integer_zerop (size))
865 return true;
866
3f27391f 867 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
868 return false;
869
6512c0f1
MS
870 tree type = TREE_TYPE (size);
871 int prec = TYPE_PRECISION (type);
872
6512c0f1
MS
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 876 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
877 wide_int_to_tree (type, ssize_max));
878 value_range vr;
45f4e2b0
AH
879 if (cfun)
880 get_range_query (cfun)->range_of_expr (vr, size);
881 else
882 get_global_range_query ()->range_of_expr (vr, size);
883 if (vr.undefined_p ())
884 vr.set_varying (TREE_TYPE (size));
c89af696
AH
885 vr.intersect (&valid_range);
886 return vr.zero_p ();
6512c0f1
MS
887}
888
cc8bea0a
MS
889/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
890 diagnose (otherwise undefined) overlapping copies without preventing
891 folding. When folded, GCC guarantees that overlapping memcpy has
892 the same semantics as memmove. Call to the library memcpy need not
893 provide the same guarantee. Return false if no simplification can
894 be made. */
fef5a0d9
RB
895
896static bool
897gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 898 tree dest, tree src, enum built_in_function code)
fef5a0d9 899{
355fe088 900 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
901 tree lhs = gimple_call_lhs (stmt);
902 tree len = gimple_call_arg (stmt, 2);
fef5a0d9
RB
903 location_t loc = gimple_location (stmt);
904
6512c0f1
MS
905 /* If the LEN parameter is a constant zero or in range where
906 the only valid value is zero, return DEST. */
907 if (size_must_be_zero_p (len))
fef5a0d9 908 {
355fe088 909 gimple *repl;
fef5a0d9
RB
910 if (gimple_call_lhs (stmt))
911 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
912 else
913 repl = gimple_build_nop ();
914 tree vdef = gimple_vdef (stmt);
915 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 916 {
fef5a0d9
RB
917 unlink_stmt_vdef (stmt);
918 release_ssa_name (vdef);
919 }
f6b4dc28 920 gsi_replace (gsi, repl, false);
fef5a0d9
RB
921 return true;
922 }
923
924 /* If SRC and DEST are the same (and not volatile), return
925 DEST{,+LEN,+LEN-1}. */
926 if (operand_equal_p (src, dest, 0))
927 {
cc8bea0a
MS
928 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
929 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 930 32667). */
fef5a0d9
RB
931 unlink_stmt_vdef (stmt);
932 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
933 release_ssa_name (gimple_vdef (stmt));
934 if (!lhs)
935 {
f6b4dc28 936 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
937 return true;
938 }
939 goto done;
940 }
941 else
942 {
b541b871
EB
943 /* We cannot (easily) change the type of the copy if it is a storage
944 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
945 modify the storage order of objects (see storage_order_barrier_p). */
946 tree srctype
947 = POINTER_TYPE_P (TREE_TYPE (src))
948 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
949 tree desttype
950 = POINTER_TYPE_P (TREE_TYPE (dest))
951 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
952 tree destvar, srcvar, srcoff;
fef5a0d9 953 unsigned int src_align, dest_align;
d01b568a 954 unsigned HOST_WIDE_INT tmp_len;
b541b871 955 const char *tmp_str;
fef5a0d9
RB
956
957 /* Build accesses at offset zero with a ref-all character type. */
b541b871
EB
958 tree off0
959 = build_int_cst (build_pointer_type_for_mode (char_type_node,
960 ptr_mode, true), 0);
fef5a0d9 961
5f6a6c91
RE
962 /* If we can perform the copy efficiently with first doing all loads and
963 then all stores inline it that way. Currently efficiently means that
964 we can load all the memory with a single set operation and that the
965 total size is less than MOVE_MAX * MOVE_RATIO. */
fef5a0d9
RB
966 src_align = get_pointer_alignment (src);
967 dest_align = get_pointer_alignment (dest);
968 if (tree_fits_uhwi_p (len)
5f6a6c91
RE
969 && (compare_tree_int
970 (len, (MOVE_MAX
971 * MOVE_RATIO (optimize_function_for_size_p (cfun))))
972 <= 0)
14b7950f
MS
973 /* FIXME: Don't transform copies from strings with known length.
974 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
975 from being handled, and the case was XFAILed for that reason.
976 Now that it is handled and the XFAIL removed, as soon as other
977 strlenopt tests that rely on it for passing are adjusted, this
978 hack can be removed. */
979 && !c_strlen (src, 1)
866626ef 980 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
b541b871
EB
981 && memchr (tmp_str, 0, tmp_len) == NULL)
982 && !(srctype
983 && AGGREGATE_TYPE_P (srctype)
984 && TYPE_REVERSE_STORAGE_ORDER (srctype))
985 && !(desttype
986 && AGGREGATE_TYPE_P (desttype)
987 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
988 {
989 unsigned ilen = tree_to_uhwi (len);
146ec50f 990 if (pow2p_hwi (ilen))
fef5a0d9 991 {
213694e5
MS
992 /* Detect out-of-bounds accesses without issuing warnings.
993 Avoid folding out-of-bounds copies but to avoid false
994 positives for unreachable code defer warning until after
995 DCE has worked its magic.
996 -Wrestrict is still diagnosed. */
997 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
998 dest, src, len, len,
999 false, false))
1000 if (warning != OPT_Wrestrict)
1001 return false;
cc8bea0a 1002
64ab8765 1003 scalar_int_mode mode;
fef5a0d9
RB
1004 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
1005 if (type
64ab8765
RS
1006 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
1007 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
5f6a6c91 1008 && have_insn_for (SET, mode)
fef5a0d9
RB
1009 /* If the destination pointer is not aligned we must be able
1010 to emit an unaligned store. */
64ab8765 1011 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1012 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 1013 || (optab_handler (movmisalign_optab, mode)
f869c12f 1014 != CODE_FOR_nothing)))
fef5a0d9
RB
1015 {
1016 tree srctype = type;
1017 tree desttype = type;
64ab8765 1018 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1019 srctype = build_aligned_type (type, src_align);
1020 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1021 tree tem = fold_const_aggregate_ref (srcmem);
1022 if (tem)
1023 srcmem = tem;
64ab8765 1024 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1025 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 1026 && (optab_handler (movmisalign_optab, mode)
f869c12f 1027 == CODE_FOR_nothing))
fef5a0d9
RB
1028 srcmem = NULL_TREE;
1029 if (srcmem)
1030 {
355fe088 1031 gimple *new_stmt;
fef5a0d9
RB
1032 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1033 {
1034 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
1035 srcmem
1036 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1037 new_stmt);
fef5a0d9
RB
1038 gimple_assign_set_lhs (new_stmt, srcmem);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1041 }
64ab8765 1042 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
1043 desttype = build_aligned_type (type, dest_align);
1044 new_stmt
1045 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1046 dest, off0),
1047 srcmem);
779724a5 1048 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1049 if (!lhs)
1050 {
f6b4dc28 1051 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1052 return true;
1053 }
1054 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1055 goto done;
1056 }
1057 }
1058 }
1059 }
1060
0d67a510 1061 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
1062 {
1063 /* Both DEST and SRC must be pointer types.
1064 ??? This is what old code did. Is the testing for pointer types
1065 really mandatory?
1066
1067 If either SRC is readonly or length is 1, we can use memcpy. */
1068 if (!dest_align || !src_align)
1069 return false;
1070 if (readonly_data_expr (src)
1071 || (tree_fits_uhwi_p (len)
1072 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1073 >= tree_to_uhwi (len))))
1074 {
1075 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1076 if (!fn)
1077 return false;
1078 gimple_call_set_fndecl (stmt, fn);
1079 gimple_call_set_arg (stmt, 0, dest);
1080 gimple_call_set_arg (stmt, 1, src);
1081 fold_stmt (gsi);
1082 return true;
1083 }
1084
1085 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1086 if (TREE_CODE (src) == ADDR_EXPR
1087 && TREE_CODE (dest) == ADDR_EXPR)
1088 {
1089 tree src_base, dest_base, fn;
a90c8804
RS
1090 poly_int64 src_offset = 0, dest_offset = 0;
1091 poly_uint64 maxsize;
fef5a0d9
RB
1092
1093 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
1094 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1095 if (src_base == NULL)
1096 src_base = srcvar;
fef5a0d9 1097 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
1098 dest_base = get_addr_base_and_unit_offset (destvar,
1099 &dest_offset);
1100 if (dest_base == NULL)
1101 dest_base = destvar;
a90c8804 1102 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 1103 maxsize = -1;
fef5a0d9
RB
1104 if (SSA_VAR_P (src_base)
1105 && SSA_VAR_P (dest_base))
1106 {
1107 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
1108 && ranges_maybe_overlap_p (src_offset, maxsize,
1109 dest_offset, maxsize))
fef5a0d9
RB
1110 return false;
1111 }
1112 else if (TREE_CODE (src_base) == MEM_REF
1113 && TREE_CODE (dest_base) == MEM_REF)
1114 {
1115 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1116 TREE_OPERAND (dest_base, 0), 0))
1117 return false;
a90c8804
RS
1118 poly_offset_int full_src_offset
1119 = mem_ref_offset (src_base) + src_offset;
1120 poly_offset_int full_dest_offset
1121 = mem_ref_offset (dest_base) + dest_offset;
1122 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1123 full_dest_offset, maxsize))
fef5a0d9
RB
1124 return false;
1125 }
1126 else
1127 return false;
1128
1129 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1130 if (!fn)
1131 return false;
1132 gimple_call_set_fndecl (stmt, fn);
1133 gimple_call_set_arg (stmt, 0, dest);
1134 gimple_call_set_arg (stmt, 1, src);
1135 fold_stmt (gsi);
1136 return true;
1137 }
1138
1139 /* If the destination and source do not alias optimize into
1140 memcpy as well. */
1141 if ((is_gimple_min_invariant (dest)
1142 || TREE_CODE (dest) == SSA_NAME)
1143 && (is_gimple_min_invariant (src)
1144 || TREE_CODE (src) == SSA_NAME))
1145 {
1146 ao_ref destr, srcr;
1147 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1148 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1149 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1150 {
1151 tree fn;
1152 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1153 if (!fn)
1154 return false;
1155 gimple_call_set_fndecl (stmt, fn);
1156 gimple_call_set_arg (stmt, 0, dest);
1157 gimple_call_set_arg (stmt, 1, src);
1158 fold_stmt (gsi);
1159 return true;
1160 }
1161 }
1162
1163 return false;
1164 }
1165
1166 if (!tree_fits_shwi_p (len))
1167 return false;
b541b871
EB
1168 if (!srctype
1169 || (AGGREGATE_TYPE_P (srctype)
1170 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1171 return false;
1172 if (!desttype
1173 || (AGGREGATE_TYPE_P (desttype)
1174 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
1175 return false;
1176 /* In the following try to find a type that is most natural to be
1177 used for the memcpy source and destination and that allows
1178 the most optimization when memcpy is turned into a plain assignment
1179 using that type. In theory we could always use a char[len] type
1180 but that only gains us that the destination and source possibly
1181 no longer will have their address taken. */
fef5a0d9
RB
1182 if (TREE_CODE (srctype) == ARRAY_TYPE
1183 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 1184 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
1185 if (TREE_CODE (desttype) == ARRAY_TYPE
1186 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 1187 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
1188 if (TREE_ADDRESSABLE (srctype)
1189 || TREE_ADDRESSABLE (desttype))
1190 return false;
1191
1192 /* Make sure we are not copying using a floating-point mode or
1193 a type whose size possibly does not match its precision. */
1194 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1195 || TREE_CODE (desttype) == BOOLEAN_TYPE
1196 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1197 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1198 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1199 || TREE_CODE (srctype) == BOOLEAN_TYPE
1200 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1201 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1202 if (!srctype)
1203 srctype = desttype;
1204 if (!desttype)
1205 desttype = srctype;
1206 if (!srctype)
1207 return false;
1208
1209 src_align = get_pointer_alignment (src);
1210 dest_align = get_pointer_alignment (dest);
fef5a0d9 1211
5105b576
RB
1212 /* Choose between src and destination type for the access based
1213 on alignment, whether the access constitutes a register access
1214 and whether it may actually expose a declaration for SSA rewrite
e362a897
EB
1215 or SRA decomposition. Also try to expose a string constant, we
1216 might be able to concatenate several of them later into a single
1217 string store. */
42f74245 1218 destvar = NULL_TREE;
5105b576 1219 srcvar = NULL_TREE;
42f74245
RB
1220 if (TREE_CODE (dest) == ADDR_EXPR
1221 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
1222 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1223 && dest_align >= TYPE_ALIGN (desttype)
1224 && (is_gimple_reg_type (desttype)
1225 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1226 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1227 else if (TREE_CODE (src) == ADDR_EXPR
1228 && var_decl_component_p (TREE_OPERAND (src, 0))
1229 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1230 && src_align >= TYPE_ALIGN (srctype)
1231 && (is_gimple_reg_type (srctype)
1232 || dest_align >= TYPE_ALIGN (srctype)))
1233 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
e362a897
EB
1234 /* FIXME: Don't transform copies from strings with known original length.
1235 As soon as strlenopt tests that rely on it for passing are adjusted,
1236 this hack can be removed. */
1237 else if (gimple_call_alloca_for_var_p (stmt)
1238 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1239 && integer_zerop (srcoff)
1240 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1241 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1242 srctype = TREE_TYPE (srcvar);
1243 else
fef5a0d9
RB
1244 return false;
1245
5105b576
RB
1246 /* Now that we chose an access type express the other side in
1247 terms of it if the target allows that with respect to alignment
1248 constraints. */
fef5a0d9
RB
1249 if (srcvar == NULL_TREE)
1250 {
fef5a0d9
RB
1251 if (src_align >= TYPE_ALIGN (desttype))
1252 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1253 else
1254 {
1255 if (STRICT_ALIGNMENT)
1256 return false;
1257 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1258 src_align);
1259 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1260 }
1261 }
1262 else if (destvar == NULL_TREE)
1263 {
fef5a0d9
RB
1264 if (dest_align >= TYPE_ALIGN (srctype))
1265 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1266 else
1267 {
1268 if (STRICT_ALIGNMENT)
1269 return false;
1270 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1271 dest_align);
1272 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1273 }
1274 }
1275
213694e5
MS
1276 /* Same as above, detect out-of-bounds accesses without issuing
1277 warnings. Avoid folding out-of-bounds copies but to avoid
1278 false positives for unreachable code defer warning until
1279 after DCE has worked its magic.
1280 -Wrestrict is still diagnosed. */
1281 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1282 dest, src, len, len,
1283 false, false))
1284 if (warning != OPT_Wrestrict)
1285 return false;
cc8bea0a 1286
355fe088 1287 gimple *new_stmt;
fef5a0d9
RB
1288 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1289 {
921b13d0
RB
1290 tree tem = fold_const_aggregate_ref (srcvar);
1291 if (tem)
1292 srcvar = tem;
1293 if (! is_gimple_min_invariant (srcvar))
1294 {
1295 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1296 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1297 new_stmt);
921b13d0
RB
1298 gimple_assign_set_lhs (new_stmt, srcvar);
1299 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1300 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1301 }
d7257171
RB
1302 new_stmt = gimple_build_assign (destvar, srcvar);
1303 goto set_vop_and_replace;
fef5a0d9 1304 }
d7257171 1305
e362a897
EB
1306 /* We get an aggregate copy. If the source is a STRING_CST, then
1307 directly use its type to perform the copy. */
1308 if (TREE_CODE (srcvar) == STRING_CST)
1309 desttype = srctype;
1310
1311 /* Or else, use an unsigned char[] type to perform the copy in order
1312 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1313 types or float modes behavior on copying. */
1314 else
1315 {
1316 desttype = build_array_type_nelts (unsigned_char_type_node,
1317 tree_to_uhwi (len));
1318 srctype = desttype;
1319 if (src_align > TYPE_ALIGN (srctype))
1320 srctype = build_aligned_type (srctype, src_align);
1321 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1322 }
1323
d7257171
RB
1324 if (dest_align > TYPE_ALIGN (desttype))
1325 desttype = build_aligned_type (desttype, dest_align);
e362a897
EB
1326 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1327 new_stmt = gimple_build_assign (destvar, srcvar);
1328
d7257171 1329set_vop_and_replace:
779724a5 1330 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1331 if (!lhs)
1332 {
f6b4dc28 1333 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1334 return true;
1335 }
1336 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1337 }
1338
1339done:
74e3c262 1340 gimple_seq stmts = NULL;
0d67a510 1341 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1342 len = NULL_TREE;
0d67a510 1343 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1344 {
1345 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1346 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1347 TREE_TYPE (dest), dest, len);
1348 }
0d67a510
ML
1349 else
1350 gcc_unreachable ();
fef5a0d9 1351
74e3c262 1352 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1353 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1354 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1355 return true;
1356}
1357
b3d8d88e
MS
1358/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1359 to built-in memcmp (a, b, len). */
1360
1361static bool
1362gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1363{
1364 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1365
1366 if (!fn)
1367 return false;
1368
1369 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1370
1371 gimple *stmt = gsi_stmt (*gsi);
1372 tree a = gimple_call_arg (stmt, 0);
1373 tree b = gimple_call_arg (stmt, 1);
1374 tree len = gimple_call_arg (stmt, 2);
1375
1376 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1377 replace_call_with_call_and_fold (gsi, repl);
1378
1379 return true;
1380}
1381
1382/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1383 to built-in memmove (dest, src, len). */
1384
1385static bool
1386gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1387{
1388 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1389
1390 if (!fn)
1391 return false;
1392
1393 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1394 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1395 len) into memmove (dest, src, len). */
1396
1397 gimple *stmt = gsi_stmt (*gsi);
1398 tree src = gimple_call_arg (stmt, 0);
1399 tree dest = gimple_call_arg (stmt, 1);
1400 tree len = gimple_call_arg (stmt, 2);
1401
1402 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1403 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1404 replace_call_with_call_and_fold (gsi, repl);
1405
1406 return true;
1407}
1408
1409/* Transform a call to built-in bzero (dest, len) at *GSI into one
1410 to built-in memset (dest, 0, len). */
1411
1412static bool
1413gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1414{
1415 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1416
1417 if (!fn)
1418 return false;
1419
1420 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1421
1422 gimple *stmt = gsi_stmt (*gsi);
1423 tree dest = gimple_call_arg (stmt, 0);
1424 tree len = gimple_call_arg (stmt, 1);
1425
1426 gimple_seq seq = NULL;
1427 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1428 gimple_seq_add_stmt_without_update (&seq, repl);
1429 gsi_replace_with_seq_vops (gsi, seq);
1430 fold_stmt (gsi);
1431
1432 return true;
1433}
1434
fef5a0d9
RB
1435/* Fold function call to builtin memset or bzero at *GSI setting the
1436 memory of size LEN to VAL. Return whether a simplification was made. */
1437
1438static bool
1439gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1440{
355fe088 1441 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1442 tree etype;
1443 unsigned HOST_WIDE_INT length, cval;
1444
1445 /* If the LEN parameter is zero, return DEST. */
1446 if (integer_zerop (len))
1447 {
1448 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1449 return true;
1450 }
1451
1452 if (! tree_fits_uhwi_p (len))
1453 return false;
1454
1455 if (TREE_CODE (c) != INTEGER_CST)
1456 return false;
1457
1458 tree dest = gimple_call_arg (stmt, 0);
1459 tree var = dest;
1460 if (TREE_CODE (var) != ADDR_EXPR)
1461 return false;
1462
1463 var = TREE_OPERAND (var, 0);
1464 if (TREE_THIS_VOLATILE (var))
1465 return false;
1466
1467 etype = TREE_TYPE (var);
1468 if (TREE_CODE (etype) == ARRAY_TYPE)
1469 etype = TREE_TYPE (etype);
1470
1471 if (!INTEGRAL_TYPE_P (etype)
1472 && !POINTER_TYPE_P (etype))
1473 return NULL_TREE;
1474
1475 if (! var_decl_component_p (var))
1476 return NULL_TREE;
1477
1478 length = tree_to_uhwi (len);
7a504f33 1479 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1480 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1481 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1482 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1483 return NULL_TREE;
1484
1485 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1486 return NULL_TREE;
1487
1ba9acb1
RB
1488 if (!type_has_mode_precision_p (etype))
1489 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1490 TYPE_UNSIGNED (etype));
1491
fef5a0d9
RB
1492 if (integer_zerop (c))
1493 cval = 0;
1494 else
1495 {
1496 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1497 return NULL_TREE;
1498
1499 cval = TREE_INT_CST_LOW (c);
1500 cval &= 0xff;
1501 cval |= cval << 8;
1502 cval |= cval << 16;
1503 cval |= (cval << 31) << 1;
1504 }
1505
1506 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1507 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1508 gimple_move_vops (store, stmt);
fef5a0d9
RB
1509 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1510 if (gimple_call_lhs (stmt))
1511 {
355fe088 1512 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1513 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1514 }
1515 else
1516 {
1517 gimple_stmt_iterator gsi2 = *gsi;
1518 gsi_prev (gsi);
1519 gsi_remove (&gsi2, true);
1520 }
1521
1522 return true;
1523}
1524
fb471a13 1525/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1526
1527static bool
03c4a945
MS
1528get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1529 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1530{
fb471a13 1531 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1532
fb471a13
MS
1533 /* The length computed by this invocation of the function. */
1534 tree val = NULL_TREE;
1535
eef2da67
MS
1536 /* True if VAL is an optimistic (tight) bound determined from
1537 the size of the character array in which the string may be
1538 stored. In that case, the computed VAL is used to set
1539 PDATA->MAXBOUND. */
1540 bool tight_bound = false;
1541
fb471a13
MS
1542 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1543 if (TREE_CODE (arg) == ADDR_EXPR
1544 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1545 {
fb471a13
MS
1546 tree op = TREE_OPERAND (arg, 0);
1547 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1548 {
fb471a13
MS
1549 tree aop0 = TREE_OPERAND (op, 0);
1550 if (TREE_CODE (aop0) == INDIRECT_REF
1551 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1552 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1553 pdata, eltsize);
fef5a0d9 1554 }
598f7235 1555 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1556 && rkind == SRK_LENRANGE)
fef5a0d9 1557 {
fb471a13
MS
1558 /* Fail if an array is the last member of a struct object
1559 since it could be treated as a (fake) flexible array
1560 member. */
1561 tree idx = TREE_OPERAND (op, 1);
1562
1563 arg = TREE_OPERAND (op, 0);
1564 tree optype = TREE_TYPE (arg);
1565 if (tree dom = TYPE_DOMAIN (optype))
1566 if (tree bound = TYPE_MAX_VALUE (dom))
1567 if (TREE_CODE (bound) == INTEGER_CST
1568 && TREE_CODE (idx) == INTEGER_CST
1569 && tree_int_cst_lt (bound, idx))
1570 return false;
fef5a0d9 1571 }
fb471a13 1572 }
7d583f42 1573
598f7235 1574 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1575 {
1576 /* We are computing the maximum value (not string length). */
1577 val = arg;
1578 if (TREE_CODE (val) != INTEGER_CST
1579 || tree_int_cst_sgn (val) < 0)
1580 return false;
1581 }
1582 else
1583 {
1584 c_strlen_data lendata = { };
1585 val = c_strlen (arg, 1, &lendata, eltsize);
1586
fb471a13
MS
1587 if (!val && lendata.decl)
1588 {
03c4a945
MS
1589 /* ARG refers to an unterminated const character array.
1590 DATA.DECL with size DATA.LEN. */
1591 val = lendata.minlen;
730832cd 1592 pdata->decl = lendata.decl;
7d583f42 1593 }
fb471a13
MS
1594 }
1595
a7160771
MS
1596 /* Set if VAL represents the maximum length based on array size (set
1597 when exact length cannot be determined). */
1598 bool maxbound = false;
1599
84de9426 1600 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1601 {
1602 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1603 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1604 pdata, eltsize);
88d0c3f0 1605
fb471a13 1606 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1607 {
fb471a13 1608 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1609
fb471a13
MS
1610 /* Determine the "innermost" array type. */
1611 while (TREE_CODE (optype) == ARRAY_TYPE
1612 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1613 optype = TREE_TYPE (optype);
c42d0aa0 1614
fb471a13
MS
1615 /* Avoid arrays of pointers. */
1616 tree eltype = TREE_TYPE (optype);
1617 if (TREE_CODE (optype) != ARRAY_TYPE
1618 || !INTEGRAL_TYPE_P (eltype))
1619 return false;
c42d0aa0 1620
fb471a13
MS
1621 /* Fail when the array bound is unknown or zero. */
1622 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1623 if (!val
1624 || TREE_CODE (val) != INTEGER_CST
1625 || integer_zerop (val))
fb471a13 1626 return false;
1bfd6a00 1627
fb471a13
MS
1628 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1629 integer_one_node);
c42d0aa0 1630
fb471a13
MS
1631 /* Set the minimum size to zero since the string in
1632 the array could have zero length. */
730832cd 1633 pdata->minlen = ssize_int (0);
204a7ecb 1634
eef2da67 1635 tight_bound = true;
fb471a13
MS
1636 }
1637 else if (TREE_CODE (arg) == COMPONENT_REF
1638 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1639 == ARRAY_TYPE))
1640 {
1641 /* Use the type of the member array to determine the upper
1642 bound on the length of the array. This may be overly
1643 optimistic if the array itself isn't NUL-terminated and
1644 the caller relies on the subsequent member to contain
1645 the NUL but that would only be considered valid if
03c4a945 1646 the array were the last member of a struct. */
fb471a13
MS
1647
1648 tree fld = TREE_OPERAND (arg, 1);
1649
1650 tree optype = TREE_TYPE (fld);
1651
1652 /* Determine the "innermost" array type. */
1653 while (TREE_CODE (optype) == ARRAY_TYPE
1654 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1655 optype = TREE_TYPE (optype);
1656
1657 /* Fail when the array bound is unknown or zero. */
1658 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1659 if (!val
1660 || TREE_CODE (val) != INTEGER_CST
1661 || integer_zerop (val))
fb471a13
MS
1662 return false;
1663 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1664 integer_one_node);
1665
1666 /* Set the minimum size to zero since the string in
1667 the array could have zero length. */
730832cd 1668 pdata->minlen = ssize_int (0);
fb471a13 1669
eef2da67
MS
1670 /* The array size determined above is an optimistic bound
1671 on the length. If the array isn't nul-terminated the
1672 length computed by the library function would be greater.
1673 Even though using strlen to cross the subobject boundary
1674 is undefined, avoid drawing conclusions from the member
1675 type about the length here. */
1676 tight_bound = true;
1677 }
e7868dc6
MS
1678 else if (TREE_CODE (arg) == MEM_REF
1679 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1680 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1681 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1682 {
1683 /* Handle a MEM_REF into a DECL accessing an array of integers,
1684 being conservative about references to extern structures with
1685 flexible array members that can be initialized to arbitrary
1686 numbers of elements as an extension (static structs are okay).
1687 FIXME: Make this less conservative -- see
1688 component_ref_size in tree.c. */
1689 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1690 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1691 && (decl_binds_to_current_def_p (ref)
1692 || !array_at_struct_end_p (arg)))
1693 {
1694 /* Fail if the offset is out of bounds. Such accesses
1695 should be diagnosed at some point. */
1696 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1697 if (!val
1698 || TREE_CODE (val) != INTEGER_CST
1699 || integer_zerop (val))
e7868dc6
MS
1700 return false;
1701
1702 poly_offset_int psiz = wi::to_offset (val);
1703 poly_offset_int poff = mem_ref_offset (arg);
1704 if (known_le (psiz, poff))
1705 return false;
1706
1707 pdata->minlen = ssize_int (0);
1708
1709 /* Subtract the offset and one for the terminating nul. */
1710 psiz -= poff;
1711 psiz -= 1;
1712 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1713 /* Since VAL reflects the size of a declared object
1714 rather the type of the access it is not a tight bound. */
1715 }
1716 }
1717 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1718 {
eef2da67
MS
1719 /* Avoid handling pointers to arrays. GCC might misuse
1720 a pointer to an array of one bound to point to an array
1721 object of a greater bound. */
1722 tree argtype = TREE_TYPE (arg);
1723 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1724 {
eef2da67 1725 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1726 if (!val
1727 || TREE_CODE (val) != INTEGER_CST
1728 || integer_zerop (val))
88d0c3f0 1729 return false;
fb471a13
MS
1730 val = wide_int_to_tree (TREE_TYPE (val),
1731 wi::sub (wi::to_wide (val), 1));
1732
e495e31a
MS
1733 /* Set the minimum size to zero since the string in
1734 the array could have zero length. */
730832cd 1735 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1736 }
1737 }
a7160771 1738 maxbound = true;
fb471a13 1739 }
88d0c3f0 1740
fb471a13
MS
1741 if (!val)
1742 return false;
fef5a0d9 1743
fb471a13 1744 /* Adjust the lower bound on the string length as necessary. */
730832cd 1745 if (!pdata->minlen
598f7235 1746 || (rkind != SRK_STRLEN
730832cd 1747 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1748 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1749 && tree_int_cst_lt (val, pdata->minlen)))
1750 pdata->minlen = val;
88d0c3f0 1751
a7160771 1752 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1753 {
1754 /* Adjust the tighter (more optimistic) string length bound
1755 if necessary and proceed to adjust the more conservative
1756 bound. */
1757 if (TREE_CODE (val) == INTEGER_CST)
1758 {
a7160771
MS
1759 if (tree_int_cst_lt (pdata->maxbound, val))
1760 pdata->maxbound = val;
730832cd
MS
1761 }
1762 else
1763 pdata->maxbound = val;
1764 }
a7160771
MS
1765 else if (pdata->maxbound || maxbound)
1766 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1767 if VAL corresponds to the maximum length determined based
1768 on the type of the object. */
730832cd
MS
1769 pdata->maxbound = val;
1770
eef2da67
MS
1771 if (tight_bound)
1772 {
1773 /* VAL computed above represents an optimistically tight bound
1774 on the length of the string based on the referenced object's
1775 or subobject's type. Determine the conservative upper bound
1776 based on the enclosing object's size if possible. */
84de9426 1777 if (rkind == SRK_LENRANGE)
eef2da67
MS
1778 {
1779 poly_int64 offset;
1780 tree base = get_addr_base_and_unit_offset (arg, &offset);
1781 if (!base)
1782 {
1783 /* When the call above fails due to a non-constant offset
1784 assume the offset is zero and use the size of the whole
1785 enclosing object instead. */
1786 base = get_base_address (arg);
1787 offset = 0;
1788 }
1789 /* If the base object is a pointer no upper bound on the length
1790 can be determined. Otherwise the maximum length is equal to
1791 the size of the enclosing object minus the offset of
1792 the referenced subobject minus 1 (for the terminating nul). */
1793 tree type = TREE_TYPE (base);
1794 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1795 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1796 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1797 val = build_all_ones_cst (size_type_node);
1798 else
1799 {
1800 val = DECL_SIZE_UNIT (base);
1801 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1802 size_int (offset + 1));
1803 }
1804 }
1805 else
1806 return false;
1807 }
1808
730832cd 1809 if (pdata->maxlen)
fb471a13
MS
1810 {
1811 /* Adjust the more conservative bound if possible/necessary
1812 and fail otherwise. */
598f7235 1813 if (rkind != SRK_STRLEN)
fef5a0d9 1814 {
730832cd 1815 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1816 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1817 return false;
fef5a0d9 1818
730832cd
MS
1819 if (tree_int_cst_lt (pdata->maxlen, val))
1820 pdata->maxlen = val;
fb471a13
MS
1821 return true;
1822 }
730832cd 1823 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1824 {
1825 /* Fail if the length of this ARG is different from that
1826 previously determined from another ARG. */
1827 return false;
1828 }
fef5a0d9
RB
1829 }
1830
730832cd 1831 pdata->maxlen = val;
84de9426 1832 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1833}
1834
5d6655eb
MS
1835/* For an ARG referencing one or more strings, try to obtain the range
1836 of their lengths, or the size of the largest array ARG referes to if
1837 the range of lengths cannot be determined, and store all in *PDATA.
1838 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1839 the maximum constant value.
1840 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1841 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1842 length or if we are unable to determine the length, return false.
fb471a13 1843 VISITED is a bitmap of visited variables.
598f7235
MS
1844 RKIND determines the kind of value or range to obtain (see
1845 strlen_range_kind).
1846 Set PDATA->DECL if ARG refers to an unterminated constant array.
1847 On input, set ELTSIZE to 1 for normal single byte character strings,
1848 and either 2 or 4 for wide characer strings (the size of wchar_t).
1849 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1850
1851static bool
03c4a945
MS
1852get_range_strlen (tree arg, bitmap *visited,
1853 strlen_range_kind rkind,
1854 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1855{
1856
1857 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1858 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1859
fef5a0d9
RB
1860 /* If ARG is registered for SSA update we cannot look at its defining
1861 statement. */
1862 if (name_registered_for_update_p (arg))
1863 return false;
1864
1865 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1866 if (!*visited)
1867 *visited = BITMAP_ALLOC (NULL);
1868 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1869 return true;
1870
fb471a13
MS
1871 tree var = arg;
1872 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1873
fef5a0d9
RB
1874 switch (gimple_code (def_stmt))
1875 {
1876 case GIMPLE_ASSIGN:
598f7235
MS
1877 /* The RHS of the statement defining VAR must either have a
1878 constant length or come from another SSA_NAME with a constant
1879 length. */
fef5a0d9
RB
1880 if (gimple_assign_single_p (def_stmt)
1881 || gimple_assign_unary_nop_p (def_stmt))
1882 {
598f7235 1883 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1884 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1885 }
1886 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1887 {
c8602fe6
JJ
1888 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1889 gimple_assign_rhs3 (def_stmt) };
1890
1891 for (unsigned int i = 0; i < 2; i++)
03c4a945 1892 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1893 {
84de9426 1894 if (rkind != SRK_LENRANGE)
c8602fe6 1895 return false;
80c2bad6
MS
1896 /* Set the upper bound to the maximum to prevent
1897 it from being adjusted in the next iteration but
1898 leave MINLEN and the more conservative MAXBOUND
1899 determined so far alone (or leave them null if
1900 they haven't been set yet). That the MINLEN is
1901 in fact zero can be determined from MAXLEN being
1902 unbounded but the discovered minimum is used for
1903 diagnostics. */
730832cd 1904 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1905 }
1906 return true;
cc8bea0a 1907 }
fef5a0d9
RB
1908 return false;
1909
1910 case GIMPLE_PHI:
598f7235
MS
1911 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1912 must have a constant length. */
c8602fe6 1913 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1914 {
1915 tree arg = gimple_phi_arg (def_stmt, i)->def;
1916
1917 /* If this PHI has itself as an argument, we cannot
1918 determine the string length of this argument. However,
1919 if we can find a constant string length for the other
1920 PHI args then we can still be sure that this is a
1921 constant string length. So be optimistic and just
1922 continue with the next argument. */
1923 if (arg == gimple_phi_result (def_stmt))
1924 continue;
1925
03c4a945 1926 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1927 {
84de9426 1928 if (rkind != SRK_LENRANGE)
88d0c3f0 1929 return false;
80c2bad6
MS
1930 /* Set the upper bound to the maximum to prevent
1931 it from being adjusted in the next iteration but
1932 leave MINLEN and the more conservative MAXBOUND
1933 determined so far alone (or leave them null if
1934 they haven't been set yet). That the MINLEN is
1935 in fact zero can be determined from MAXLEN being
1936 unbounded but the discovered minimum is used for
1937 diagnostics. */
730832cd 1938 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1939 }
fef5a0d9 1940 }
fef5a0d9
RB
1941 return true;
1942
1943 default:
1944 return false;
1945 }
1946}
5d6655eb 1947
97623b52
MS
1948/* Try to obtain the range of the lengths of the string(s) referenced
1949 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1950 of lengths cannot be determined, and store all in *PDATA which must
1951 be zero-initialized on input except PDATA->MAXBOUND may be set to
1952 a non-null tree node other than INTEGER_CST to request to have it
1953 set to the length of the longest string in a PHI. ELTSIZE is
1954 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1955 some power of 2 for wide characters.
1956 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1957 for optimization. Returning false means that a nonzero PDATA->MINLEN
1958 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1959 is -1 (in that case, the actual range is indeterminate, i.e.,
1960 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1961
3f343040 1962bool
84de9426 1963get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1964{
1965 bitmap visited = NULL;
a7160771 1966 tree maxbound = pdata->maxbound;
88d0c3f0 1967
84de9426 1968 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1969 {
5d6655eb
MS
1970 /* On failure extend the length range to an impossible maximum
1971 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1972 members can stay unchanged regardless. */
1973 pdata->minlen = ssize_int (0);
1974 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1975 }
5d6655eb
MS
1976 else if (!pdata->minlen)
1977 pdata->minlen = ssize_int (0);
1978
a7160771
MS
1979 /* If it's unchanged from it initial non-null value, set the conservative
1980 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1981 if (maxbound && pdata->maxbound == maxbound)
1982 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1983
1984 if (visited)
1985 BITMAP_FREE (visited);
3f343040 1986
03c4a945 1987 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1988}
1989
5d6655eb
MS
1990/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1991 For ARG of pointer types, NONSTR indicates if the caller is prepared
1992 to handle unterminated strings. For integer ARG and when RKIND ==
1993 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1994
5d6655eb
MS
1995 If an unterminated array is discovered and our caller handles
1996 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1997 return the maximum size. Otherwise return NULL. */
1998
598f7235
MS
1999static tree
2000get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 2001{
598f7235
MS
2002 /* A non-null NONSTR is meaningless when determining the maximum
2003 value of an integer ARG. */
2004 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2005 /* ARG must have an integral type when RKIND says so. */
2006 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2007
dcb7fae2 2008 bitmap visited = NULL;
3f343040 2009
5d6655eb
MS
2010 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2011 is unbounded. */
730832cd 2012 c_strlen_data lendata = { };
03c4a945 2013 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 2014 lendata.maxlen = NULL_TREE;
5d6655eb
MS
2015 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2016 lendata.maxlen = NULL_TREE;
2017
dcb7fae2
RB
2018 if (visited)
2019 BITMAP_FREE (visited);
2020
e08341bb
MS
2021 if (nonstr)
2022 {
2023 /* For callers prepared to handle unterminated arrays set
2024 *NONSTR to point to the declaration of the array and return
2025 the maximum length/size. */
730832cd
MS
2026 *nonstr = lendata.decl;
2027 return lendata.maxlen;
e08341bb
MS
2028 }
2029
2030 /* Fail if the constant array isn't nul-terminated. */
730832cd 2031 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
2032}
2033
fef5a0d9
RB
2034
2035/* Fold function call to builtin strcpy with arguments DEST and SRC.
2036 If LEN is not NULL, it represents the length of the string to be
2037 copied. Return NULL_TREE if no simplification can be made. */
2038
2039static bool
2040gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 2041 tree dest, tree src)
fef5a0d9 2042{
cc8bea0a
MS
2043 gimple *stmt = gsi_stmt (*gsi);
2044 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2045 tree fn;
2046
2047 /* If SRC and DEST are the same (and not volatile), return DEST. */
2048 if (operand_equal_p (src, dest, 0))
2049 {
8cd95cec
MS
2050 /* Issue -Wrestrict unless the pointers are null (those do
2051 not point to objects and so do not indicate an overlap;
2052 such calls could be the result of sanitization and jump
2053 threading). */
e9e2bad7 2054 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
2055 {
2056 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2057
e9b9fa4c
MS
2058 warning_at (loc, OPT_Wrestrict,
2059 "%qD source argument is the same as destination",
2060 func);
2061 }
cc8bea0a 2062
fef5a0d9
RB
2063 replace_call_with_value (gsi, dest);
2064 return true;
2065 }
2066
2067 if (optimize_function_for_size_p (cfun))
2068 return false;
2069
2070 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2071 if (!fn)
2072 return false;
2073
e08341bb
MS
2074 /* Set to non-null if ARG refers to an unterminated array. */
2075 tree nonstr = NULL;
598f7235 2076 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
2077
2078 if (nonstr)
2079 {
2080 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 2081 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
81d6cdd3 2082 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
e9e2bad7 2083 suppress_warning (stmt, OPT_Wstringop_overread);
e08341bb
MS
2084 return false;
2085 }
2086
fef5a0d9 2087 if (!len)
dcb7fae2 2088 return false;
fef5a0d9
RB
2089
2090 len = fold_convert_loc (loc, size_type_node, len);
2091 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2092 len = force_gimple_operand_gsi (gsi, len, true,
2093 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2094 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2095 replace_call_with_call_and_fold (gsi, repl);
2096 return true;
2097}
2098
2099/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2100 If SLEN is not NULL, it represents the length of the source string.
2101 Return NULL_TREE if no simplification can be made. */
2102
2103static bool
dcb7fae2
RB
2104gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2105 tree dest, tree src, tree len)
fef5a0d9 2106{
025d57f0
MS
2107 gimple *stmt = gsi_stmt (*gsi);
2108 location_t loc = gimple_location (stmt);
6a33d0ff 2109 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
2110
2111 /* If the LEN parameter is zero, return DEST. */
2112 if (integer_zerop (len))
2113 {
53b28abf 2114 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
2115 decorate with attribute nonstring. */
2116 if (!nonstring)
2117 {
2118 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
2119
2120 /* Warn about the lack of nul termination: the result is not
2121 a (nul-terminated) string. */
598f7235 2122 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
2123 if (slen && !integer_zerop (slen))
2124 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d 2125 "%qD destination unchanged after copying no bytes "
6a33d0ff 2126 "from a string of length %E",
6d3bab5d 2127 fndecl, slen);
6a33d0ff
MS
2128 else
2129 warning_at (loc, OPT_Wstringop_truncation,
6d3bab5d
MS
2130 "%qD destination unchanged after copying no bytes",
2131 fndecl);
6a33d0ff 2132 }
025d57f0 2133
fef5a0d9
RB
2134 replace_call_with_value (gsi, dest);
2135 return true;
2136 }
2137
2138 /* We can't compare slen with len as constants below if len is not a
2139 constant. */
dcb7fae2 2140 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
2141 return false;
2142
fef5a0d9 2143 /* Now, we must be passed a constant src ptr parameter. */
598f7235 2144 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 2145 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
2146 return false;
2147
025d57f0
MS
2148 /* The size of the source string including the terminating nul. */
2149 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
2150
2151 /* We do not support simplification of this case, though we do
2152 support it when expanding trees into RTL. */
2153 /* FIXME: generate a call to __builtin_memset. */
025d57f0 2154 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
2155 return false;
2156
5d0d5d68
MS
2157 /* Diagnose truncation that leaves the copy unterminated. */
2158 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 2159
fef5a0d9 2160 /* OK transform into builtin memcpy. */
025d57f0 2161 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
2162 if (!fn)
2163 return false;
2164
2165 len = fold_convert_loc (loc, size_type_node, len);
2166 len = force_gimple_operand_gsi (gsi, len, true,
2167 NULL_TREE, true, GSI_SAME_STMT);
355fe088 2168 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 2169 replace_call_with_call_and_fold (gsi, repl);
025d57f0 2170
fef5a0d9
RB
2171 return true;
2172}
2173
71dea1dd
WD
2174/* Fold function call to builtin strchr or strrchr.
2175 If both arguments are constant, evaluate and fold the result,
2176 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
2177 In general strlen is significantly faster than strchr
2178 due to being a simpler operation. */
2179static bool
71dea1dd 2180gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
2181{
2182 gimple *stmt = gsi_stmt (*gsi);
2183 tree str = gimple_call_arg (stmt, 0);
2184 tree c = gimple_call_arg (stmt, 1);
2185 location_t loc = gimple_location (stmt);
71dea1dd
WD
2186 const char *p;
2187 char ch;
912d9ec3 2188
71dea1dd 2189 if (!gimple_call_lhs (stmt))
912d9ec3
WD
2190 return false;
2191
b5338fb3
MS
2192 /* Avoid folding if the first argument is not a nul-terminated array.
2193 Defer warning until later. */
2194 if (!check_nul_terminated_array (NULL_TREE, str))
2195 return false;
2196
71dea1dd
WD
2197 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2198 {
2199 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2200
2201 if (p1 == NULL)
2202 {
2203 replace_call_with_value (gsi, integer_zero_node);
2204 return true;
2205 }
2206
2207 tree len = build_int_cst (size_type_node, p1 - p);
2208 gimple_seq stmts = NULL;
2209 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2210 POINTER_PLUS_EXPR, str, len);
2211 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2212 gsi_replace_with_seq_vops (gsi, stmts);
2213 return true;
2214 }
2215
2216 if (!integer_zerop (c))
912d9ec3
WD
2217 return false;
2218
71dea1dd 2219 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 2220 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
2221 {
2222 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2223
c8952930 2224 if (strchr_fn)
71dea1dd
WD
2225 {
2226 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2227 replace_call_with_call_and_fold (gsi, repl);
2228 return true;
2229 }
2230
2231 return false;
2232 }
2233
912d9ec3
WD
2234 tree len;
2235 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2236
2237 if (!strlen_fn)
2238 return false;
2239
2240 /* Create newstr = strlen (str). */
2241 gimple_seq stmts = NULL;
2242 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2243 gimple_set_location (new_stmt, loc);
a15ebbcd 2244 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2245 gimple_call_set_lhs (new_stmt, len);
2246 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2247
2248 /* Create (str p+ strlen (str)). */
2249 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2250 POINTER_PLUS_EXPR, str, len);
2251 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2252 gsi_replace_with_seq_vops (gsi, stmts);
2253 /* gsi now points at the assignment to the lhs, get a
2254 stmt iterator to the strlen.
2255 ??? We can't use gsi_for_stmt as that doesn't work when the
2256 CFG isn't built yet. */
2257 gimple_stmt_iterator gsi2 = *gsi;
2258 gsi_prev (&gsi2);
2259 fold_stmt (&gsi2);
2260 return true;
2261}
2262
c8952930
JJ
2263/* Fold function call to builtin strstr.
2264 If both arguments are constant, evaluate and fold the result,
2265 additionally fold strstr (x, "") into x and strstr (x, "c")
2266 into strchr (x, 'c'). */
2267static bool
2268gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2269{
2270 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2271 if (!gimple_call_lhs (stmt))
2272 return false;
2273
c8952930
JJ
2274 tree haystack = gimple_call_arg (stmt, 0);
2275 tree needle = gimple_call_arg (stmt, 1);
c8952930 2276
b5338fb3
MS
2277 /* Avoid folding if either argument is not a nul-terminated array.
2278 Defer warning until later. */
2279 if (!check_nul_terminated_array (NULL_TREE, haystack)
2280 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2281 return false;
2282
b5338fb3 2283 const char *q = c_getstr (needle);
c8952930
JJ
2284 if (q == NULL)
2285 return false;
2286
b5338fb3 2287 if (const char *p = c_getstr (haystack))
c8952930
JJ
2288 {
2289 const char *r = strstr (p, q);
2290
2291 if (r == NULL)
2292 {
2293 replace_call_with_value (gsi, integer_zero_node);
2294 return true;
2295 }
2296
2297 tree len = build_int_cst (size_type_node, r - p);
2298 gimple_seq stmts = NULL;
2299 gimple *new_stmt
2300 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2301 haystack, len);
2302 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2303 gsi_replace_with_seq_vops (gsi, stmts);
2304 return true;
2305 }
2306
2307 /* For strstr (x, "") return x. */
2308 if (q[0] == '\0')
2309 {
2310 replace_call_with_value (gsi, haystack);
2311 return true;
2312 }
2313
2314 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2315 if (q[1] == '\0')
2316 {
2317 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2318 if (strchr_fn)
2319 {
2320 tree c = build_int_cst (integer_type_node, q[0]);
2321 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2322 replace_call_with_call_and_fold (gsi, repl);
2323 return true;
2324 }
2325 }
2326
2327 return false;
2328}
2329
fef5a0d9
RB
2330/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2331 to the call.
2332
2333 Return NULL_TREE if no simplification was possible, otherwise return the
2334 simplified form of the call as a tree.
2335
2336 The simplified form may be a constant or other expression which
2337 computes the same value, but in a more efficient manner (including
2338 calls to other builtin functions).
2339
2340 The call may contain arguments which need to be evaluated, but
2341 which are not useful to determine the result of the call. In
2342 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2343 COMPOUND_EXPR will be an argument which must be evaluated.
2344 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2345 COMPOUND_EXPR in the chain will contain the tree for the simplified
2346 form of the builtin function call. */
2347
2348static bool
dcb7fae2 2349gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2350{
355fe088 2351 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2352 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2353
2354 const char *p = c_getstr (src);
2355
2356 /* If the string length is zero, return the dst parameter. */
2357 if (p && *p == '\0')
2358 {
2359 replace_call_with_value (gsi, dst);
2360 return true;
2361 }
2362
2363 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2364 return false;
2365
2366 /* See if we can store by pieces into (dst + strlen(dst)). */
2367 tree newdst;
2368 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2369 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2370
2371 if (!strlen_fn || !memcpy_fn)
2372 return false;
2373
2374 /* If the length of the source string isn't computable don't
2375 split strcat into strlen and memcpy. */
598f7235 2376 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2377 if (! len)
fef5a0d9
RB
2378 return false;
2379
2380 /* Create strlen (dst). */
2381 gimple_seq stmts = NULL, stmts2;
355fe088 2382 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2383 gimple_set_location (repl, loc);
a15ebbcd 2384 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2385 gimple_call_set_lhs (repl, newdst);
2386 gimple_seq_add_stmt_without_update (&stmts, repl);
2387
2388 /* Create (dst p+ strlen (dst)). */
2389 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2390 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2391 gimple_seq_add_seq_without_update (&stmts, stmts2);
2392
2393 len = fold_convert_loc (loc, size_type_node, len);
2394 len = size_binop_loc (loc, PLUS_EXPR, len,
2395 build_int_cst (size_type_node, 1));
2396 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2397 gimple_seq_add_seq_without_update (&stmts, stmts2);
2398
2399 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2400 gimple_seq_add_stmt_without_update (&stmts, repl);
2401 if (gimple_call_lhs (stmt))
2402 {
2403 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2404 gimple_seq_add_stmt_without_update (&stmts, repl);
2405 gsi_replace_with_seq_vops (gsi, stmts);
2406 /* gsi now points at the assignment to the lhs, get a
2407 stmt iterator to the memcpy call.
2408 ??? We can't use gsi_for_stmt as that doesn't work when the
2409 CFG isn't built yet. */
2410 gimple_stmt_iterator gsi2 = *gsi;
2411 gsi_prev (&gsi2);
2412 fold_stmt (&gsi2);
2413 }
2414 else
2415 {
2416 gsi_replace_with_seq_vops (gsi, stmts);
2417 fold_stmt (gsi);
2418 }
2419 return true;
2420}
2421
07f1cf56
RB
2422/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2423 are the arguments to the call. */
2424
2425static bool
2426gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2427{
355fe088 2428 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2429 tree dest = gimple_call_arg (stmt, 0);
2430 tree src = gimple_call_arg (stmt, 1);
2431 tree size = gimple_call_arg (stmt, 2);
2432 tree fn;
2433 const char *p;
2434
2435
2436 p = c_getstr (src);
2437 /* If the SRC parameter is "", return DEST. */
2438 if (p && *p == '\0')
2439 {
2440 replace_call_with_value (gsi, dest);
2441 return true;
2442 }
2443
2444 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2445 return false;
2446
2447 /* If __builtin_strcat_chk is used, assume strcat is available. */
2448 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2449 if (!fn)
2450 return false;
2451
355fe088 2452 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2453 replace_call_with_call_and_fold (gsi, repl);
2454 return true;
2455}
2456
ad03a744
RB
2457/* Simplify a call to the strncat builtin. */
2458
2459static bool
2460gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2461{
8a45b051 2462 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2463 tree dst = gimple_call_arg (stmt, 0);
2464 tree src = gimple_call_arg (stmt, 1);
2465 tree len = gimple_call_arg (stmt, 2);
2466
2467 const char *p = c_getstr (src);
2468
2469 /* If the requested length is zero, or the src parameter string
2470 length is zero, return the dst parameter. */
2471 if (integer_zerop (len) || (p && *p == '\0'))
2472 {
2473 replace_call_with_value (gsi, dst);
2474 return true;
2475 }
2476
025d57f0
MS
2477 if (TREE_CODE (len) != INTEGER_CST || !p)
2478 return false;
2479
2480 unsigned srclen = strlen (p);
2481
2482 int cmpsrc = compare_tree_int (len, srclen);
2483
2484 /* Return early if the requested len is less than the string length.
2485 Warnings will be issued elsewhere later. */
2486 if (cmpsrc < 0)
2487 return false;
2488
2489 unsigned HOST_WIDE_INT dstsize;
2490
e9e2bad7 2491 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
025d57f0
MS
2492
2493 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2494 {
025d57f0 2495 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2496
025d57f0
MS
2497 if (cmpdst >= 0)
2498 {
2499 tree fndecl = gimple_call_fndecl (stmt);
2500
2501 /* Strncat copies (at most) LEN bytes and always appends
2502 the terminating NUL so the specified bound should never
2503 be equal to (or greater than) the size of the destination.
2504 If it is, the copy could overflow. */
2505 location_t loc = gimple_location (stmt);
2506 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2507 cmpdst == 0
6d3bab5d 2508 ? G_("%qD specified bound %E equals "
025d57f0 2509 "destination size")
6d3bab5d 2510 : G_("%qD specified bound %E exceeds "
025d57f0 2511 "destination size %wu"),
6d3bab5d 2512 fndecl, len, dstsize);
025d57f0 2513 if (nowarn)
e9e2bad7 2514 suppress_warning (stmt, OPT_Wstringop_overflow_);
025d57f0
MS
2515 }
2516 }
ad03a744 2517
025d57f0
MS
2518 if (!nowarn && cmpsrc == 0)
2519 {
2520 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2521 location_t loc = gimple_location (stmt);
eec5f615
MS
2522
2523 /* To avoid possible overflow the specified bound should also
2524 not be equal to the length of the source, even when the size
2525 of the destination is unknown (it's not an uncommon mistake
2526 to specify as the bound to strncpy the length of the source). */
025d57f0 2527 if (warning_at (loc, OPT_Wstringop_overflow_,
6d3bab5d
MS
2528 "%qD specified bound %E equals source length",
2529 fndecl, len))
e9e2bad7 2530 suppress_warning (stmt, OPT_Wstringop_overflow_);
ad03a744
RB
2531 }
2532
025d57f0
MS
2533 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2534
2535 /* If the replacement _DECL isn't initialized, don't do the
2536 transformation. */
2537 if (!fn)
2538 return false;
2539
2540 /* Otherwise, emit a call to strcat. */
2541 gcall *repl = gimple_build_call (fn, 2, dst, src);
2542 replace_call_with_call_and_fold (gsi, repl);
2543 return true;
ad03a744
RB
2544}
2545
745583f9
RB
2546/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2547 LEN, and SIZE. */
2548
2549static bool
2550gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2551{
355fe088 2552 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2553 tree dest = gimple_call_arg (stmt, 0);
2554 tree src = gimple_call_arg (stmt, 1);
2555 tree len = gimple_call_arg (stmt, 2);
2556 tree size = gimple_call_arg (stmt, 3);
2557 tree fn;
2558 const char *p;
2559
2560 p = c_getstr (src);
2561 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2562 if ((p && *p == '\0')
2563 || integer_zerop (len))
2564 {
2565 replace_call_with_value (gsi, dest);
2566 return true;
2567 }
2568
2569 if (! tree_fits_uhwi_p (size))
2570 return false;
2571
2572 if (! integer_all_onesp (size))
2573 {
2574 tree src_len = c_strlen (src, 1);
2575 if (src_len
2576 && tree_fits_uhwi_p (src_len)
2577 && tree_fits_uhwi_p (len)
2578 && ! tree_int_cst_lt (len, src_len))
2579 {
2580 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2581 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2582 if (!fn)
2583 return false;
2584
355fe088 2585 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2586 replace_call_with_call_and_fold (gsi, repl);
2587 return true;
2588 }
2589 return false;
2590 }
2591
2592 /* If __builtin_strncat_chk is used, assume strncat is available. */
2593 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2594 if (!fn)
2595 return false;
2596
355fe088 2597 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2598 replace_call_with_call_and_fold (gsi, repl);
2599 return true;
2600}
2601
a918bfbf
ML
2602/* Build and append gimple statements to STMTS that would load a first
2603 character of a memory location identified by STR. LOC is location
2604 of the statement. */
2605
2606static tree
2607gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2608{
2609 tree var;
2610
2611 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2612 tree cst_uchar_ptr_node
2613 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2614 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2615
2616 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2617 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2618 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2619
2620 gimple_assign_set_lhs (stmt, var);
2621 gimple_seq_add_stmt_without_update (stmts, stmt);
2622
2623 return var;
2624}
2625
d2f8402a 2626/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2627
2628static bool
2629gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2630{
2631 gimple *stmt = gsi_stmt (*gsi);
2632 tree callee = gimple_call_fndecl (stmt);
2633 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2634
2635 tree type = integer_type_node;
2636 tree str1 = gimple_call_arg (stmt, 0);
2637 tree str2 = gimple_call_arg (stmt, 1);
2638 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2639
2640 tree bound_node = NULL_TREE;
d2f8402a 2641 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2642
2643 /* Handle strncmp and strncasecmp functions. */
2644 if (gimple_call_num_args (stmt) == 3)
2645 {
d86d8b35
MS
2646 bound_node = gimple_call_arg (stmt, 2);
2647 if (tree_fits_uhwi_p (bound_node))
2648 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2649 }
2650
d86d8b35 2651 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2652 if (bound == 0)
a918bfbf
ML
2653 {
2654 replace_call_with_value (gsi, integer_zero_node);
2655 return true;
2656 }
2657
2658 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2659 if (operand_equal_p (str1, str2, 0))
2660 {
2661 replace_call_with_value (gsi, integer_zero_node);
2662 return true;
2663 }
2664
d2f8402a
MS
2665 /* Initially set to the number of characters, including the terminating
2666 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2667 the array Sx is not terminated by a nul.
2668 For nul-terminated strings then adjusted to their length so that
2669 LENx == NULPOSx holds. */
2670 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
866626ef
MS
2671 const char *p1 = getbyterep (str1, &len1);
2672 const char *p2 = getbyterep (str2, &len2);
d2f8402a
MS
2673
2674 /* The position of the terminating nul character if one exists, otherwise
2675 a value greater than LENx. */
2676 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2677
2678 if (p1)
2679 {
2680 size_t n = strnlen (p1, len1);
2681 if (n < len1)
2682 len1 = nulpos1 = n;
2683 }
2684
2685 if (p2)
2686 {
2687 size_t n = strnlen (p2, len2);
2688 if (n < len2)
2689 len2 = nulpos2 = n;
2690 }
a918bfbf
ML
2691
2692 /* For known strings, return an immediate value. */
2693 if (p1 && p2)
2694 {
2695 int r = 0;
2696 bool known_result = false;
2697
2698 switch (fcode)
2699 {
2700 case BUILT_IN_STRCMP:
8b0b334a 2701 case BUILT_IN_STRCMP_EQ:
d2f8402a 2702 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2703 break;
d2f8402a
MS
2704
2705 r = strcmp (p1, p2);
2706 known_result = true;
2707 break;
2708
a918bfbf 2709 case BUILT_IN_STRNCMP:
8b0b334a 2710 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2711 {
d86d8b35
MS
2712 if (bound == HOST_WIDE_INT_M1U)
2713 break;
2714
d2f8402a
MS
2715 /* Reduce the bound to be no more than the length
2716 of the shorter of the two strings, or the sizes
2717 of the unterminated arrays. */
2718 unsigned HOST_WIDE_INT n = bound;
2719
2720 if (len1 == nulpos1 && len1 < n)
2721 n = len1 + 1;
2722 if (len2 == nulpos2 && len2 < n)
2723 n = len2 + 1;
2724
2725 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2726 break;
d2f8402a
MS
2727
2728 r = strncmp (p1, p2, n);
a918bfbf
ML
2729 known_result = true;
2730 break;
2731 }
2732 /* Only handleable situation is where the string are equal (result 0),
2733 which is already handled by operand_equal_p case. */
2734 case BUILT_IN_STRCASECMP:
2735 break;
2736 case BUILT_IN_STRNCASECMP:
2737 {
d2f8402a 2738 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2739 break;
d2f8402a 2740 r = strncmp (p1, p2, bound);
a918bfbf
ML
2741 if (r == 0)
2742 known_result = true;
5de73c05 2743 break;
a918bfbf
ML
2744 }
2745 default:
2746 gcc_unreachable ();
2747 }
2748
2749 if (known_result)
2750 {
2751 replace_call_with_value (gsi, build_cmp_result (type, r));
2752 return true;
2753 }
2754 }
2755
d2f8402a 2756 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2757 || fcode == BUILT_IN_STRCMP
8b0b334a 2758 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2759 || fcode == BUILT_IN_STRCASECMP;
2760
2761 location_t loc = gimple_location (stmt);
2762
2763 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2764 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2765 {
2766 gimple_seq stmts = NULL;
2767 tree var = gimple_load_first_char (loc, str1, &stmts);
2768 if (lhs)
2769 {
2770 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2771 gimple_seq_add_stmt_without_update (&stmts, stmt);
2772 }
2773
2774 gsi_replace_with_seq_vops (gsi, stmts);
2775 return true;
2776 }
2777
2778 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2779 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2780 {
2781 gimple_seq stmts = NULL;
2782 tree var = gimple_load_first_char (loc, str2, &stmts);
2783
2784 if (lhs)
2785 {
2786 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2787 stmt = gimple_build_assign (c, NOP_EXPR, var);
2788 gimple_seq_add_stmt_without_update (&stmts, stmt);
2789
2790 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2791 gimple_seq_add_stmt_without_update (&stmts, stmt);
2792 }
2793
2794 gsi_replace_with_seq_vops (gsi, stmts);
2795 return true;
2796 }
2797
d2f8402a 2798 /* If BOUND is one, return an expression corresponding to
a918bfbf 2799 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2800 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2801 {
2802 gimple_seq stmts = NULL;
2803 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2804 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2805
2806 if (lhs)
2807 {
2808 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2809 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2810 gimple_seq_add_stmt_without_update (&stmts, convert1);
2811
2812 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2813 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2814 gimple_seq_add_stmt_without_update (&stmts, convert2);
2815
2816 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2817 gimple_seq_add_stmt_without_update (&stmts, stmt);
2818 }
2819
2820 gsi_replace_with_seq_vops (gsi, stmts);
2821 return true;
2822 }
2823
d2f8402a
MS
2824 /* If BOUND is greater than the length of one constant string,
2825 and the other argument is also a nul-terminated string, replace
2826 strncmp with strcmp. */
2827 if (fcode == BUILT_IN_STRNCMP
2828 && bound > 0 && bound < HOST_WIDE_INT_M1U
2829 && ((p2 && len2 < bound && len2 == nulpos2)
2830 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2831 {
2832 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2833 if (!fn)
2834 return false;
2835 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2836 replace_call_with_call_and_fold (gsi, repl);
2837 return true;
2838 }
2839
a918bfbf
ML
2840 return false;
2841}
2842
488c6247
ML
2843/* Fold a call to the memchr pointed by GSI iterator. */
2844
2845static bool
2846gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2847{
2848 gimple *stmt = gsi_stmt (*gsi);
2849 tree lhs = gimple_call_lhs (stmt);
2850 tree arg1 = gimple_call_arg (stmt, 0);
2851 tree arg2 = gimple_call_arg (stmt, 1);
2852 tree len = gimple_call_arg (stmt, 2);
2853
2854 /* If the LEN parameter is zero, return zero. */
2855 if (integer_zerop (len))
2856 {
2857 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2858 return true;
2859 }
2860
2861 char c;
2862 if (TREE_CODE (arg2) != INTEGER_CST
2863 || !tree_fits_uhwi_p (len)
2864 || !target_char_cst_p (arg2, &c))
2865 return false;
2866
2867 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2868 unsigned HOST_WIDE_INT string_length;
866626ef 2869 const char *p1 = getbyterep (arg1, &string_length);
488c6247
ML
2870
2871 if (p1)
2872 {
2873 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2874 if (r == NULL)
2875 {
5fd336bb 2876 tree mem_size, offset_node;
bb04901d 2877 byte_representation (arg1, &offset_node, &mem_size, NULL);
5fd336bb
JM
2878 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2879 ? 0 : tree_to_uhwi (offset_node);
2880 /* MEM_SIZE is the size of the array the string literal
2881 is stored in. */
2882 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2883 gcc_checking_assert (string_length <= string_size);
2884 if (length <= string_size)
488c6247
ML
2885 {
2886 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2887 return true;
2888 }
2889 }
2890 else
2891 {
2892 unsigned HOST_WIDE_INT offset = r - p1;
2893 gimple_seq stmts = NULL;
2894 if (lhs != NULL_TREE)
2895 {
aec2d684 2896 tree offset_cst = build_int_cst (sizetype, offset);
488c6247
ML
2897 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2898 arg1, offset_cst);
2899 gimple_seq_add_stmt_without_update (&stmts, stmt);
2900 }
2901 else
2902 gimple_seq_add_stmt_without_update (&stmts,
2903 gimple_build_nop ());
2904
2905 gsi_replace_with_seq_vops (gsi, stmts);
2906 return true;
2907 }
2908 }
2909
2910 return false;
2911}
a918bfbf 2912
fef5a0d9
RB
2913/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2914 to the call. IGNORE is true if the value returned
2915 by the builtin will be ignored. UNLOCKED is true is true if this
2916 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2917 the known length of the string. Return NULL_TREE if no simplification
2918 was possible. */
2919
2920static bool
2921gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2922 tree arg0, tree arg1,
dcb7fae2 2923 bool unlocked)
fef5a0d9 2924{
355fe088 2925 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2926
fef5a0d9
RB
2927 /* If we're using an unlocked function, assume the other unlocked
2928 functions exist explicitly. */
2929 tree const fn_fputc = (unlocked
2930 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2931 : builtin_decl_implicit (BUILT_IN_FPUTC));
2932 tree const fn_fwrite = (unlocked
2933 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2934 : builtin_decl_implicit (BUILT_IN_FWRITE));
2935
2936 /* If the return value is used, don't do the transformation. */
dcb7fae2 2937 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2938 return false;
2939
fef5a0d9
RB
2940 /* Get the length of the string passed to fputs. If the length
2941 can't be determined, punt. */
598f7235 2942 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2943 if (!len
2944 || TREE_CODE (len) != INTEGER_CST)
2945 return false;
2946
2947 switch (compare_tree_int (len, 1))
2948 {
2949 case -1: /* length is 0, delete the call entirely . */
2950 replace_call_with_value (gsi, integer_zero_node);
2951 return true;
2952
2953 case 0: /* length is 1, call fputc. */
2954 {
2955 const char *p = c_getstr (arg0);
2956 if (p != NULL)
2957 {
2958 if (!fn_fputc)
2959 return false;
2960
355fe088 2961 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2962 build_int_cst
2963 (integer_type_node, p[0]), arg1);
2964 replace_call_with_call_and_fold (gsi, repl);
2965 return true;
2966 }
2967 }
2968 /* FALLTHROUGH */
2969 case 1: /* length is greater than 1, call fwrite. */
2970 {
2971 /* If optimizing for size keep fputs. */
2972 if (optimize_function_for_size_p (cfun))
2973 return false;
2974 /* New argument list transforming fputs(string, stream) to
2975 fwrite(string, 1, len, stream). */
2976 if (!fn_fwrite)
2977 return false;
2978
355fe088 2979 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2980 size_one_node, len, arg1);
2981 replace_call_with_call_and_fold (gsi, repl);
2982 return true;
2983 }
2984 default:
2985 gcc_unreachable ();
2986 }
2987 return false;
2988}
2989
2990/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2991 DEST, SRC, LEN, and SIZE are the arguments to the call.
2992 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2993 code of the builtin. If MAXLEN is not NULL, it is maximum length
2994 passed as third argument. */
2995
2996static bool
2997gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2998 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2999 enum built_in_function fcode)
3000{
355fe088 3001 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
3002 location_t loc = gimple_location (stmt);
3003 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3004 tree fn;
3005
3006 /* If SRC and DEST are the same (and not volatile), return DEST
3007 (resp. DEST+LEN for __mempcpy_chk). */
3008 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3009 {
3010 if (fcode != BUILT_IN_MEMPCPY_CHK)
3011 {
3012 replace_call_with_value (gsi, dest);
3013 return true;
3014 }
3015 else
3016 {
74e3c262
RB
3017 gimple_seq stmts = NULL;
3018 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
3019 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3020 TREE_TYPE (dest), dest, len);
74e3c262 3021 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
3022 replace_call_with_value (gsi, temp);
3023 return true;
3024 }
3025 }
3026
3027 if (! tree_fits_uhwi_p (size))
3028 return false;
3029
598f7235 3030 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
3031 if (! integer_all_onesp (size))
3032 {
3033 if (! tree_fits_uhwi_p (len))
3034 {
3035 /* If LEN is not constant, try MAXLEN too.
3036 For MAXLEN only allow optimizing into non-_ocs function
3037 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3038 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3039 {
3040 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3041 {
3042 /* (void) __mempcpy_chk () can be optimized into
3043 (void) __memcpy_chk (). */
3044 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3045 if (!fn)
3046 return false;
3047
355fe088 3048 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3049 replace_call_with_call_and_fold (gsi, repl);
3050 return true;
3051 }
3052 return false;
3053 }
3054 }
3055 else
3056 maxlen = len;
3057
3058 if (tree_int_cst_lt (size, maxlen))
3059 return false;
3060 }
3061
3062 fn = NULL_TREE;
3063 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3064 mem{cpy,pcpy,move,set} is available. */
3065 switch (fcode)
3066 {
3067 case BUILT_IN_MEMCPY_CHK:
3068 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3069 break;
3070 case BUILT_IN_MEMPCPY_CHK:
3071 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3072 break;
3073 case BUILT_IN_MEMMOVE_CHK:
3074 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3075 break;
3076 case BUILT_IN_MEMSET_CHK:
3077 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3078 break;
3079 default:
3080 break;
3081 }
3082
3083 if (!fn)
3084 return false;
3085
355fe088 3086 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3087 replace_call_with_call_and_fold (gsi, repl);
3088 return true;
3089}
3090
3091/* Fold a call to the __st[rp]cpy_chk builtin.
3092 DEST, SRC, and SIZE are the arguments to the call.
3093 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3094 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3095 strings passed as second argument. */
3096
3097static bool
3098gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3099 tree dest,
fef5a0d9 3100 tree src, tree size,
fef5a0d9
RB
3101 enum built_in_function fcode)
3102{
355fe088 3103 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
3104 location_t loc = gimple_location (stmt);
3105 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3106 tree len, fn;
3107
3108 /* If SRC and DEST are the same (and not volatile), return DEST. */
3109 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3110 {
8cd95cec
MS
3111 /* Issue -Wrestrict unless the pointers are null (those do
3112 not point to objects and so do not indicate an overlap;
3113 such calls could be the result of sanitization and jump
3114 threading). */
e9e2bad7
MS
3115 if (!integer_zerop (dest)
3116 && !warning_suppressed_p (stmt, OPT_Wrestrict))
e9b9fa4c
MS
3117 {
3118 tree func = gimple_call_fndecl (stmt);
cc8bea0a 3119
e9b9fa4c
MS
3120 warning_at (loc, OPT_Wrestrict,
3121 "%qD source argument is the same as destination",
3122 func);
3123 }
cc8bea0a 3124
fef5a0d9
RB
3125 replace_call_with_value (gsi, dest);
3126 return true;
3127 }
3128
3129 if (! tree_fits_uhwi_p (size))
3130 return false;
3131
598f7235 3132 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
3133 if (! integer_all_onesp (size))
3134 {
3135 len = c_strlen (src, 1);
3136 if (! len || ! tree_fits_uhwi_p (len))
3137 {
3138 /* If LEN is not constant, try MAXLEN too.
3139 For MAXLEN only allow optimizing into non-_ocs function
3140 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3141 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3142 {
3143 if (fcode == BUILT_IN_STPCPY_CHK)
3144 {
3145 if (! ignore)
3146 return false;
3147
3148 /* If return value of __stpcpy_chk is ignored,
3149 optimize into __strcpy_chk. */
3150 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3151 if (!fn)
3152 return false;
3153
355fe088 3154 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
3155 replace_call_with_call_and_fold (gsi, repl);
3156 return true;
3157 }
3158
3159 if (! len || TREE_SIDE_EFFECTS (len))
3160 return false;
3161
3162 /* If c_strlen returned something, but not a constant,
3163 transform __strcpy_chk into __memcpy_chk. */
3164 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3165 if (!fn)
3166 return false;
3167
74e3c262 3168 gimple_seq stmts = NULL;
770fe3a3 3169 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
3170 len = gimple_convert (&stmts, loc, size_type_node, len);
3171 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3172 build_int_cst (size_type_node, 1));
3173 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 3174 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3175 replace_call_with_call_and_fold (gsi, repl);
3176 return true;
3177 }
e256dfce 3178 }
fef5a0d9
RB
3179 else
3180 maxlen = len;
3181
3182 if (! tree_int_cst_lt (maxlen, size))
3183 return false;
e256dfce
RG
3184 }
3185
fef5a0d9
RB
3186 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3187 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
3188 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3189 if (!fn)
3190 return false;
3191
355fe088 3192 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
3193 replace_call_with_call_and_fold (gsi, repl);
3194 return true;
3195}
3196
3197/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3198 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3199 length passed as third argument. IGNORE is true if return value can be
3200 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3201
3202static bool
3203gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3204 tree dest, tree src,
dcb7fae2 3205 tree len, tree size,
fef5a0d9
RB
3206 enum built_in_function fcode)
3207{
355fe088 3208 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3209 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3210 tree fn;
3211
3212 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 3213 {
fef5a0d9
RB
3214 /* If return value of __stpncpy_chk is ignored,
3215 optimize into __strncpy_chk. */
3216 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3217 if (fn)
3218 {
355fe088 3219 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3220 replace_call_with_call_and_fold (gsi, repl);
3221 return true;
3222 }
cbdd87d4
RG
3223 }
3224
fef5a0d9
RB
3225 if (! tree_fits_uhwi_p (size))
3226 return false;
3227
598f7235 3228 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3229 if (! integer_all_onesp (size))
cbdd87d4 3230 {
fef5a0d9 3231 if (! tree_fits_uhwi_p (len))
fe2ef088 3232 {
fef5a0d9
RB
3233 /* If LEN is not constant, try MAXLEN too.
3234 For MAXLEN only allow optimizing into non-_ocs function
3235 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3236 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3237 return false;
8a1561bc 3238 }
fef5a0d9
RB
3239 else
3240 maxlen = len;
3241
3242 if (tree_int_cst_lt (size, maxlen))
3243 return false;
cbdd87d4
RG
3244 }
3245
fef5a0d9
RB
3246 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3247 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3248 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3249 if (!fn)
3250 return false;
3251
355fe088 3252 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3253 replace_call_with_call_and_fold (gsi, repl);
3254 return true;
cbdd87d4
RG
3255}
3256
2625bb5d
RB
3257/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3258 Return NULL_TREE if no simplification can be made. */
3259
3260static bool
3261gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3262{
3263 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3264 location_t loc = gimple_location (stmt);
3265 tree dest = gimple_call_arg (stmt, 0);
3266 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3267 tree fn, lenp1;
2625bb5d
RB
3268
3269 /* If the result is unused, replace stpcpy with strcpy. */
3270 if (gimple_call_lhs (stmt) == NULL_TREE)
3271 {
3272 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3273 if (!fn)
3274 return false;
3275 gimple_call_set_fndecl (stmt, fn);
3276 fold_stmt (gsi);
3277 return true;
3278 }
3279
01b0acb7 3280 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3281 c_strlen_data data = { };
d14c547a
MS
3282 /* The size of the unterminated array if SRC referes to one. */
3283 tree size;
3284 /* True if the size is exact/constant, false if it's the lower bound
3285 of a range. */
3286 bool exact;
7d583f42 3287 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3288 if (!len
3289 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3290 {
d14c547a 3291 data.decl = unterminated_array (src, &size, &exact);
7d583f42 3292 if (!data.decl)
01b0acb7
MS
3293 return false;
3294 }
3295
7d583f42 3296 if (data.decl)
01b0acb7
MS
3297 {
3298 /* Avoid folding calls with unterminated arrays. */
e9e2bad7 3299 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
81d6cdd3 3300 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
d14c547a 3301 exact);
e9e2bad7 3302 suppress_warning (stmt, OPT_Wstringop_overread);
01b0acb7
MS
3303 return false;
3304 }
2625bb5d
RB
3305
3306 if (optimize_function_for_size_p (cfun)
3307 /* If length is zero it's small enough. */
3308 && !integer_zerop (len))
3309 return false;
3310
3311 /* If the source has a known length replace stpcpy with memcpy. */
3312 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3313 if (!fn)
3314 return false;
3315
3316 gimple_seq stmts = NULL;
3317 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3318 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3319 tem, build_int_cst (size_type_node, 1));
3320 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3321 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3322 gimple_move_vops (repl, stmt);
2625bb5d
RB
3323 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3324 /* Replace the result with dest + len. */
3325 stmts = NULL;
3326 tem = gimple_convert (&stmts, loc, sizetype, len);
3327 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3328 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3329 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3330 gsi_replace (gsi, ret, false);
2625bb5d
RB
3331 /* Finally fold the memcpy call. */
3332 gimple_stmt_iterator gsi2 = *gsi;
3333 gsi_prev (&gsi2);
3334 fold_stmt (&gsi2);
3335 return true;
3336}
3337
fef5a0d9
RB
3338/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3339 NULL_TREE if a normal call should be emitted rather than expanding
3340 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3341 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3342 passed as second argument. */
cbdd87d4
RG
3343
3344static bool
fef5a0d9 3345gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3346 enum built_in_function fcode)
cbdd87d4 3347{
538dd0b7 3348 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3349 tree dest, size, len, fn, fmt, flag;
3350 const char *fmt_str;
cbdd87d4 3351
fef5a0d9
RB
3352 /* Verify the required arguments in the original call. */
3353 if (gimple_call_num_args (stmt) < 5)
3354 return false;
cbdd87d4 3355
fef5a0d9
RB
3356 dest = gimple_call_arg (stmt, 0);
3357 len = gimple_call_arg (stmt, 1);
3358 flag = gimple_call_arg (stmt, 2);
3359 size = gimple_call_arg (stmt, 3);
3360 fmt = gimple_call_arg (stmt, 4);
3361
3362 if (! tree_fits_uhwi_p (size))
3363 return false;
3364
3365 if (! integer_all_onesp (size))
3366 {
598f7235 3367 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3368 if (! tree_fits_uhwi_p (len))
cbdd87d4 3369 {
fef5a0d9
RB
3370 /* If LEN is not constant, try MAXLEN too.
3371 For MAXLEN only allow optimizing into non-_ocs function
3372 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3373 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3374 return false;
3375 }
3376 else
fef5a0d9 3377 maxlen = len;
cbdd87d4 3378
fef5a0d9
RB
3379 if (tree_int_cst_lt (size, maxlen))
3380 return false;
3381 }
cbdd87d4 3382
fef5a0d9
RB
3383 if (!init_target_chars ())
3384 return false;
cbdd87d4 3385
fef5a0d9
RB
3386 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3387 or if format doesn't contain % chars or is "%s". */
3388 if (! integer_zerop (flag))
3389 {
3390 fmt_str = c_getstr (fmt);
3391 if (fmt_str == NULL)
3392 return false;
3393 if (strchr (fmt_str, target_percent) != NULL
3394 && strcmp (fmt_str, target_percent_s))
3395 return false;
cbdd87d4
RG
3396 }
3397
fef5a0d9
RB
3398 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3399 available. */
3400 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3401 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3402 if (!fn)
491e0b9b
RG
3403 return false;
3404
fef5a0d9
RB
3405 /* Replace the called function and the first 5 argument by 3 retaining
3406 trailing varargs. */
3407 gimple_call_set_fndecl (stmt, fn);
3408 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3409 gimple_call_set_arg (stmt, 0, dest);
3410 gimple_call_set_arg (stmt, 1, len);
3411 gimple_call_set_arg (stmt, 2, fmt);
3412 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3413 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3414 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3415 fold_stmt (gsi);
3416 return true;
3417}
cbdd87d4 3418
fef5a0d9
RB
3419/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3420 Return NULL_TREE if a normal call should be emitted rather than
3421 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3422 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3423
fef5a0d9
RB
3424static bool
3425gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3426 enum built_in_function fcode)
3427{
538dd0b7 3428 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3429 tree dest, size, len, fn, fmt, flag;
3430 const char *fmt_str;
3431 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3432
fef5a0d9
RB
3433 /* Verify the required arguments in the original call. */
3434 if (nargs < 4)
3435 return false;
3436 dest = gimple_call_arg (stmt, 0);
3437 flag = gimple_call_arg (stmt, 1);
3438 size = gimple_call_arg (stmt, 2);
3439 fmt = gimple_call_arg (stmt, 3);
3440
3441 if (! tree_fits_uhwi_p (size))
3442 return false;
3443
3444 len = NULL_TREE;
3445
3446 if (!init_target_chars ())
3447 return false;
3448
3449 /* Check whether the format is a literal string constant. */
3450 fmt_str = c_getstr (fmt);
3451 if (fmt_str != NULL)
3452 {
3453 /* If the format doesn't contain % args or %%, we know the size. */
3454 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3455 {
fef5a0d9
RB
3456 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3457 len = build_int_cstu (size_type_node, strlen (fmt_str));
3458 }
3459 /* If the format is "%s" and first ... argument is a string literal,
3460 we know the size too. */
3461 else if (fcode == BUILT_IN_SPRINTF_CHK
3462 && strcmp (fmt_str, target_percent_s) == 0)
3463 {
3464 tree arg;
cbdd87d4 3465
fef5a0d9
RB
3466 if (nargs == 5)
3467 {
3468 arg = gimple_call_arg (stmt, 4);
3469 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3470 {
3471 len = c_strlen (arg, 1);
3472 if (! len || ! tree_fits_uhwi_p (len))
3473 len = NULL_TREE;
3474 }
3475 }
3476 }
3477 }
cbdd87d4 3478
fef5a0d9
RB
3479 if (! integer_all_onesp (size))
3480 {
3481 if (! len || ! tree_int_cst_lt (len, size))
3482 return false;
3483 }
cbdd87d4 3484
fef5a0d9
RB
3485 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3486 or if format doesn't contain % chars or is "%s". */
3487 if (! integer_zerop (flag))
3488 {
3489 if (fmt_str == NULL)
3490 return false;
3491 if (strchr (fmt_str, target_percent) != NULL
3492 && strcmp (fmt_str, target_percent_s))
3493 return false;
3494 }
cbdd87d4 3495
fef5a0d9
RB
3496 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3497 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3498 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3499 if (!fn)
3500 return false;
3501
3502 /* Replace the called function and the first 4 argument by 2 retaining
3503 trailing varargs. */
3504 gimple_call_set_fndecl (stmt, fn);
3505 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3506 gimple_call_set_arg (stmt, 0, dest);
3507 gimple_call_set_arg (stmt, 1, fmt);
3508 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3509 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3510 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3511 fold_stmt (gsi);
3512 return true;
3513}
3514
35770bb2
RB
3515/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3516 ORIG may be null if this is a 2-argument call. We don't attempt to
3517 simplify calls with more than 3 arguments.
3518
a104bd88 3519 Return true if simplification was possible, otherwise false. */
35770bb2 3520
a104bd88 3521bool
dcb7fae2 3522gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3523{
355fe088 3524 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3525
3526 /* Verify the required arguments in the original call. We deal with two
3527 types of sprintf() calls: 'sprintf (str, fmt)' and
3528 'sprintf (dest, "%s", orig)'. */
3529 if (gimple_call_num_args (stmt) > 3)
3530 return false;
3531
9816f509 3532 tree orig = NULL_TREE;
35770bb2
RB
3533 if (gimple_call_num_args (stmt) == 3)
3534 orig = gimple_call_arg (stmt, 2);
3535
3536 /* Check whether the format is a literal string constant. */
9816f509
MS
3537 tree fmt = gimple_call_arg (stmt, 1);
3538 const char *fmt_str = c_getstr (fmt);
35770bb2
RB
3539 if (fmt_str == NULL)
3540 return false;
3541
9816f509
MS
3542 tree dest = gimple_call_arg (stmt, 0);
3543
35770bb2
RB
3544 if (!init_target_chars ())
3545 return false;
3546
9816f509
MS
3547 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3548 if (!fn)
3549 return false;
3550
35770bb2
RB
3551 /* If the format doesn't contain % args or %%, use strcpy. */
3552 if (strchr (fmt_str, target_percent) == NULL)
3553 {
35770bb2
RB
3554 /* Don't optimize sprintf (buf, "abc", ptr++). */
3555 if (orig)
3556 return false;
3557
3558 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3559 'format' is known to contain no % formats. */
3560 gimple_seq stmts = NULL;
355fe088 3561 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3562
3563 /* Propagate the NO_WARNING bit to avoid issuing the same
3564 warning more than once. */
e9e2bad7 3565 copy_warning (repl, stmt);
01b0acb7 3566
35770bb2 3567 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3568 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3569 {
a73468e8
JJ
3570 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3571 strlen (fmt_str)));
35770bb2
RB
3572 gimple_seq_add_stmt_without_update (&stmts, repl);
3573 gsi_replace_with_seq_vops (gsi, stmts);
3574 /* gsi now points at the assignment to the lhs, get a
3575 stmt iterator to the memcpy call.
3576 ??? We can't use gsi_for_stmt as that doesn't work when the
3577 CFG isn't built yet. */
3578 gimple_stmt_iterator gsi2 = *gsi;
3579 gsi_prev (&gsi2);
3580 fold_stmt (&gsi2);
3581 }
3582 else
3583 {
3584 gsi_replace_with_seq_vops (gsi, stmts);
3585 fold_stmt (gsi);
3586 }
3587 return true;
3588 }
3589
3590 /* If the format is "%s", use strcpy if the result isn't used. */
3591 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3592 {
35770bb2
RB
3593 /* Don't crash on sprintf (str1, "%s"). */
3594 if (!orig)
3595 return false;
3596
9816f509
MS
3597 /* Don't fold calls with source arguments of invalid (nonpointer)
3598 types. */
3599 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3600 return false;
3601
dcb7fae2
RB
3602 tree orig_len = NULL_TREE;
3603 if (gimple_call_lhs (stmt))
35770bb2 3604 {
598f7235 3605 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3606 if (!orig_len)
35770bb2
RB
3607 return false;
3608 }
3609
3610 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3611 gimple_seq stmts = NULL;
355fe088 3612 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3613
3614 /* Propagate the NO_WARNING bit to avoid issuing the same
3615 warning more than once. */
e9e2bad7 3616 copy_warning (repl, stmt);
01b0acb7 3617
35770bb2 3618 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3619 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3620 {
a73468e8 3621 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3622 TREE_TYPE (orig_len)))
a73468e8
JJ
3623 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3624 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3625 gimple_seq_add_stmt_without_update (&stmts, repl);
3626 gsi_replace_with_seq_vops (gsi, stmts);
3627 /* gsi now points at the assignment to the lhs, get a
3628 stmt iterator to the memcpy call.
3629 ??? We can't use gsi_for_stmt as that doesn't work when the
3630 CFG isn't built yet. */
3631 gimple_stmt_iterator gsi2 = *gsi;
3632 gsi_prev (&gsi2);
3633 fold_stmt (&gsi2);
3634 }
3635 else
3636 {
3637 gsi_replace_with_seq_vops (gsi, stmts);
3638 fold_stmt (gsi);
3639 }
3640 return true;
3641 }
3642 return false;
3643}
3644
d7e78447
RB
3645/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3646 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3647 attempt to simplify calls with more than 4 arguments.
35770bb2 3648
a104bd88 3649 Return true if simplification was possible, otherwise false. */
d7e78447 3650
a104bd88 3651bool
dcb7fae2 3652gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3653{
538dd0b7 3654 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3655 tree dest = gimple_call_arg (stmt, 0);
3656 tree destsize = gimple_call_arg (stmt, 1);
3657 tree fmt = gimple_call_arg (stmt, 2);
3658 tree orig = NULL_TREE;
3659 const char *fmt_str = NULL;
3660
3661 if (gimple_call_num_args (stmt) > 4)
3662 return false;
3663
3664 if (gimple_call_num_args (stmt) == 4)
3665 orig = gimple_call_arg (stmt, 3);
3666
3667 if (!tree_fits_uhwi_p (destsize))
3668 return false;
3669 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3670
3671 /* Check whether the format is a literal string constant. */
3672 fmt_str = c_getstr (fmt);
3673 if (fmt_str == NULL)
3674 return false;
3675
3676 if (!init_target_chars ())
3677 return false;
3678
3679 /* If the format doesn't contain % args or %%, use strcpy. */
3680 if (strchr (fmt_str, target_percent) == NULL)
3681 {
3682 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3683 if (!fn)
3684 return false;
3685
3686 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3687 if (orig)
3688 return false;
3689
3690 /* We could expand this as
3691 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3692 or to
3693 memcpy (str, fmt_with_nul_at_cstm1, cst);
3694 but in the former case that might increase code size
3695 and in the latter case grow .rodata section too much.
3696 So punt for now. */
3697 size_t len = strlen (fmt_str);
3698 if (len >= destlen)
3699 return false;
3700
3701 gimple_seq stmts = NULL;
355fe088 3702 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3703 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3704 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3705 {
a73468e8
JJ
3706 repl = gimple_build_assign (lhs,
3707 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3708 gimple_seq_add_stmt_without_update (&stmts, repl);
3709 gsi_replace_with_seq_vops (gsi, stmts);
3710 /* gsi now points at the assignment to the lhs, get a
3711 stmt iterator to the memcpy call.
3712 ??? We can't use gsi_for_stmt as that doesn't work when the
3713 CFG isn't built yet. */
3714 gimple_stmt_iterator gsi2 = *gsi;
3715 gsi_prev (&gsi2);
3716 fold_stmt (&gsi2);
3717 }
3718 else
3719 {
3720 gsi_replace_with_seq_vops (gsi, stmts);
3721 fold_stmt (gsi);
3722 }
3723 return true;
3724 }
3725
3726 /* If the format is "%s", use strcpy if the result isn't used. */
3727 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3728 {
3729 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3730 if (!fn)
3731 return false;
3732
3733 /* Don't crash on snprintf (str1, cst, "%s"). */
3734 if (!orig)
3735 return false;
3736
598f7235 3737 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3738 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3739 return false;
d7e78447
RB
3740
3741 /* We could expand this as
3742 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3743 or to
3744 memcpy (str1, str2_with_nul_at_cstm1, cst);
3745 but in the former case that might increase code size
3746 and in the latter case grow .rodata section too much.
3747 So punt for now. */
3748 if (compare_tree_int (orig_len, destlen) >= 0)
3749 return false;
3750
3751 /* Convert snprintf (str1, cst, "%s", str2) into
3752 strcpy (str1, str2) if strlen (str2) < cst. */
3753 gimple_seq stmts = NULL;
355fe088 3754 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3755 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3756 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3757 {
a73468e8 3758 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3759 TREE_TYPE (orig_len)))
a73468e8
JJ
3760 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3761 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3762 gimple_seq_add_stmt_without_update (&stmts, repl);
3763 gsi_replace_with_seq_vops (gsi, stmts);
3764 /* gsi now points at the assignment to the lhs, get a
3765 stmt iterator to the memcpy call.
3766 ??? We can't use gsi_for_stmt as that doesn't work when the
3767 CFG isn't built yet. */
3768 gimple_stmt_iterator gsi2 = *gsi;
3769 gsi_prev (&gsi2);
3770 fold_stmt (&gsi2);
3771 }
3772 else
3773 {
3774 gsi_replace_with_seq_vops (gsi, stmts);
3775 fold_stmt (gsi);
3776 }
3777 return true;
3778 }
3779 return false;
3780}
35770bb2 3781
edd7ae68
RB
3782/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3783 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3784 more than 3 arguments, and ARG may be null in the 2-argument case.
3785
3786 Return NULL_TREE if no simplification was possible, otherwise return the
3787 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3788 code of the function to be simplified. */
3789
3790static bool
3791gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3792 tree fp, tree fmt, tree arg,
3793 enum built_in_function fcode)
3794{
3795 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3796 tree fn_fputc, fn_fputs;
3797 const char *fmt_str = NULL;
3798
3799 /* If the return value is used, don't do the transformation. */
3800 if (gimple_call_lhs (stmt) != NULL_TREE)
3801 return false;
3802
3803 /* Check whether the format is a literal string constant. */
3804 fmt_str = c_getstr (fmt);
3805 if (fmt_str == NULL)
3806 return false;
3807
3808 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3809 {
3810 /* If we're using an unlocked function, assume the other
3811 unlocked functions exist explicitly. */
3812 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3813 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3814 }
3815 else
3816 {
3817 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3818 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3819 }
3820
3821 if (!init_target_chars ())
3822 return false;
3823
3824 /* If the format doesn't contain % args or %%, use strcpy. */
3825 if (strchr (fmt_str, target_percent) == NULL)
3826 {
3827 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3828 && arg)
3829 return false;
3830
3831 /* If the format specifier was "", fprintf does nothing. */
3832 if (fmt_str[0] == '\0')
3833 {
3834 replace_call_with_value (gsi, NULL_TREE);
3835 return true;
3836 }
3837
3838 /* When "string" doesn't contain %, replace all cases of
3839 fprintf (fp, string) with fputs (string, fp). The fputs
3840 builtin will take care of special cases like length == 1. */
3841 if (fn_fputs)
3842 {
3843 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3844 replace_call_with_call_and_fold (gsi, repl);
3845 return true;
3846 }
3847 }
3848
3849 /* The other optimizations can be done only on the non-va_list variants. */
3850 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3851 return false;
3852
3853 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3854 else if (strcmp (fmt_str, target_percent_s) == 0)
3855 {
3856 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3857 return false;
3858 if (fn_fputs)
3859 {
3860 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3861 replace_call_with_call_and_fold (gsi, repl);
3862 return true;
3863 }
3864 }
3865
3866 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3867 else if (strcmp (fmt_str, target_percent_c) == 0)
3868 {
3869 if (!arg
3870 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3871 return false;
3872 if (fn_fputc)
3873 {
3874 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3875 replace_call_with_call_and_fold (gsi, repl);
3876 return true;
3877 }
3878 }
3879
3880 return false;
3881}
3882
ad03a744
RB
3883/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3884 FMT and ARG are the arguments to the call; we don't fold cases with
3885 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3886
3887 Return NULL_TREE if no simplification was possible, otherwise return the
3888 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3889 code of the function to be simplified. */
3890
3891static bool
3892gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3893 tree arg, enum built_in_function fcode)
3894{
3895 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3896 tree fn_putchar, fn_puts, newarg;
3897 const char *fmt_str = NULL;
3898
3899 /* If the return value is used, don't do the transformation. */
3900 if (gimple_call_lhs (stmt) != NULL_TREE)
3901 return false;
3902
3903 /* Check whether the format is a literal string constant. */
3904 fmt_str = c_getstr (fmt);
3905 if (fmt_str == NULL)
3906 return false;
3907
3908 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3909 {
3910 /* If we're using an unlocked function, assume the other
3911 unlocked functions exist explicitly. */
3912 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3913 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3914 }
3915 else
3916 {
3917 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3918 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3919 }
3920
3921 if (!init_target_chars ())
3922 return false;
3923
3924 if (strcmp (fmt_str, target_percent_s) == 0
3925 || strchr (fmt_str, target_percent) == NULL)
3926 {
3927 const char *str;
3928
3929 if (strcmp (fmt_str, target_percent_s) == 0)
3930 {
3931 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3932 return false;
3933
3934 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3935 return false;
3936
3937 str = c_getstr (arg);
3938 if (str == NULL)
3939 return false;
3940 }
3941 else
3942 {
3943 /* The format specifier doesn't contain any '%' characters. */
3944 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3945 && arg)
3946 return false;
3947 str = fmt_str;
3948 }
3949
3950 /* If the string was "", printf does nothing. */
3951 if (str[0] == '\0')
3952 {
3953 replace_call_with_value (gsi, NULL_TREE);
3954 return true;
3955 }
3956
3957 /* If the string has length of 1, call putchar. */
3958 if (str[1] == '\0')
3959 {
3960 /* Given printf("c"), (where c is any one character,)
3961 convert "c"[0] to an int and pass that to the replacement
3962 function. */
3963 newarg = build_int_cst (integer_type_node, str[0]);
3964 if (fn_putchar)
3965 {
3966 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3967 replace_call_with_call_and_fold (gsi, repl);
3968 return true;
3969 }
3970 }
3971 else
3972 {
3973 /* If the string was "string\n", call puts("string"). */
3974 size_t len = strlen (str);
3975 if ((unsigned char)str[len - 1] == target_newline
3976 && (size_t) (int) len == len
3977 && (int) len > 0)
3978 {
3979 char *newstr;
ad03a744
RB
3980
3981 /* Create a NUL-terminated string that's one char shorter
3982 than the original, stripping off the trailing '\n'. */
a353fec4 3983 newstr = xstrdup (str);
ad03a744 3984 newstr[len - 1] = '\0';
a353fec4
BE
3985 newarg = build_string_literal (len, newstr);
3986 free (newstr);
ad03a744
RB
3987 if (fn_puts)
3988 {
3989 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3990 replace_call_with_call_and_fold (gsi, repl);
3991 return true;
3992 }
3993 }
3994 else
3995 /* We'd like to arrange to call fputs(string,stdout) here,
3996 but we need stdout and don't have a way to get it yet. */
3997 return false;
3998 }
3999 }
4000
4001 /* The other optimizations can be done only on the non-va_list variants. */
4002 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
4003 return false;
4004
4005 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4006 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
4007 {
4008 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
4009 return false;
4010 if (fn_puts)
4011 {
4012 gcall *repl = gimple_build_call (fn_puts, 1, arg);
4013 replace_call_with_call_and_fold (gsi, repl);
4014 return true;
4015 }
4016 }
4017
4018 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4019 else if (strcmp (fmt_str, target_percent_c) == 0)
4020 {
4021 if (!arg || ! useless_type_conversion_p (integer_type_node,
4022 TREE_TYPE (arg)))
4023 return false;
4024 if (fn_putchar)
4025 {
4026 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
4027 replace_call_with_call_and_fold (gsi, repl);
4028 return true;
4029 }
4030 }
4031
4032 return false;
4033}
4034
edd7ae68 4035
fef5a0d9
RB
4036
4037/* Fold a call to __builtin_strlen with known length LEN. */
4038
4039static bool
dcb7fae2 4040gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 4041{
355fe088 4042 gimple *stmt = gsi_stmt (*gsi);
e08341bb 4043 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
4044
4045 wide_int minlen;
4046 wide_int maxlen;
4047
5d6655eb 4048 c_strlen_data lendata = { };
03c4a945 4049 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
4050 && !lendata.decl
4051 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4052 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
4053 {
4054 /* The range of lengths refers to either a single constant
4055 string or to the longest and shortest constant string
4056 referenced by the argument of the strlen() call, or to
4057 the strings that can possibly be stored in the arrays
4058 the argument refers to. */
5d6655eb
MS
4059 minlen = wi::to_wide (lendata.minlen);
4060 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
4061 }
4062 else
4063 {
4064 unsigned prec = TYPE_PRECISION (sizetype);
4065
4066 minlen = wi::shwi (0, prec);
4067 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4068 }
4069
4070 if (minlen == maxlen)
4071 {
5d6655eb
MS
4072 /* Fold the strlen call to a constant. */
4073 tree type = TREE_TYPE (lendata.minlen);
4074 tree len = force_gimple_operand_gsi (gsi,
4075 wide_int_to_tree (type, minlen),
4076 true, NULL, true, GSI_SAME_STMT);
4077 replace_call_with_value (gsi, len);
c42d0aa0
MS
4078 return true;
4079 }
4080
d4bf6975 4081 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 4082 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 4083 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
4084
4085 return false;
cbdd87d4
RG
4086}
4087
48126138
NS
4088/* Fold a call to __builtin_acc_on_device. */
4089
4090static bool
4091gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4092{
4093 /* Defer folding until we know which compiler we're in. */
4094 if (symtab->state != EXPANSION)
4095 return false;
4096
4097 unsigned val_host = GOMP_DEVICE_HOST;
4098 unsigned val_dev = GOMP_DEVICE_NONE;
4099
4100#ifdef ACCEL_COMPILER
4101 val_host = GOMP_DEVICE_NOT_HOST;
4102 val_dev = ACCEL_COMPILER_acc_device;
4103#endif
4104
4105 location_t loc = gimple_location (gsi_stmt (*gsi));
4106
4107 tree host_eq = make_ssa_name (boolean_type_node);
4108 gimple *host_ass = gimple_build_assign
4109 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4110 gimple_set_location (host_ass, loc);
4111 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4112
4113 tree dev_eq = make_ssa_name (boolean_type_node);
4114 gimple *dev_ass = gimple_build_assign
4115 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4116 gimple_set_location (dev_ass, loc);
4117 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4118
4119 tree result = make_ssa_name (boolean_type_node);
4120 gimple *result_ass = gimple_build_assign
4121 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4122 gimple_set_location (result_ass, loc);
4123 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4124
4125 replace_call_with_value (gsi, result);
4126
4127 return true;
4128}
cbdd87d4 4129
fe75f732
PK
4130/* Fold realloc (0, n) -> malloc (n). */
4131
4132static bool
4133gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4134{
4135 gimple *stmt = gsi_stmt (*gsi);
4136 tree arg = gimple_call_arg (stmt, 0);
4137 tree size = gimple_call_arg (stmt, 1);
4138
4139 if (operand_equal_p (arg, null_pointer_node, 0))
4140 {
4141 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4142 if (fn_malloc)
4143 {
4144 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4145 replace_call_with_call_and_fold (gsi, repl);
4146 return true;
4147 }
4148 }
4149 return false;
4150}
4151
1bea0d0a
JJ
4152/* Number of bytes into which any type but aggregate or vector types
4153 should fit. */
4154static constexpr size_t clear_padding_unit
4155 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4156/* Buffer size on which __builtin_clear_padding folding code works. */
4157static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4158
4159/* Data passed through __builtin_clear_padding folding. */
4160struct clear_padding_struct {
4161 location_t loc;
896048cf
JJ
4162 /* 0 during __builtin_clear_padding folding, nonzero during
4163 clear_type_padding_in_mask. In that case, instead of clearing the
4164 non-padding bits in union_ptr array clear the padding bits in there. */
4165 bool clear_in_mask;
1bea0d0a
JJ
4166 tree base;
4167 tree alias_type;
4168 gimple_stmt_iterator *gsi;
4169 /* Alignment of buf->base + 0. */
4170 unsigned align;
4171 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4172 HOST_WIDE_INT off;
4173 /* Number of padding bytes before buf->off that don't have padding clear
4174 code emitted yet. */
4175 HOST_WIDE_INT padding_bytes;
4176 /* The size of the whole object. Never emit code to touch
4177 buf->base + buf->sz or following bytes. */
4178 HOST_WIDE_INT sz;
4179 /* Number of bytes recorded in buf->buf. */
4180 size_t size;
4181 /* When inside union, instead of emitting code we and bits inside of
4182 the union_ptr array. */
4183 unsigned char *union_ptr;
4184 /* Set bits mean padding bits that need to be cleared by the builtin. */
4185 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4186};
4187
4188/* Emit code to clear padding requested in BUF->buf - set bits
4189 in there stand for padding that should be cleared. FULL is true
4190 if everything from the buffer should be flushed, otherwise
4191 it can leave up to 2 * clear_padding_unit bytes for further
4192 processing. */
4193
4194static void
4195clear_padding_flush (clear_padding_struct *buf, bool full)
4196{
4197 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4198 if (!full && buf->size < 2 * clear_padding_unit)
4199 return;
4200 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4201 size_t end = buf->size;
4202 if (!full)
4203 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4204 * clear_padding_unit);
4205 size_t padding_bytes = buf->padding_bytes;
4206 if (buf->union_ptr)
4207 {
896048cf
JJ
4208 if (buf->clear_in_mask)
4209 {
4210 /* During clear_type_padding_in_mask, clear the padding
4211 bits set in buf->buf in the buf->union_ptr mask. */
4212 for (size_t i = 0; i < end; i++)
4213 {
4214 if (buf->buf[i] == (unsigned char) ~0)
4215 padding_bytes++;
4216 else
4217 {
4218 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4219 0, padding_bytes);
4220 padding_bytes = 0;
4221 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4222 }
4223 }
4224 if (full)
4225 {
4226 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4227 0, padding_bytes);
4228 buf->off = 0;
4229 buf->size = 0;
4230 buf->padding_bytes = 0;
4231 }
4232 else
4233 {
4234 memmove (buf->buf, buf->buf + end, buf->size - end);
4235 buf->off += end;
4236 buf->size -= end;
4237 buf->padding_bytes = padding_bytes;
4238 }
4239 return;
4240 }
1bea0d0a
JJ
4241 /* Inside of a union, instead of emitting any code, instead
4242 clear all bits in the union_ptr buffer that are clear
4243 in buf. Whole padding bytes don't clear anything. */
4244 for (size_t i = 0; i < end; i++)
4245 {
4246 if (buf->buf[i] == (unsigned char) ~0)
4247 padding_bytes++;
4248 else
4249 {
4250 padding_bytes = 0;
4251 buf->union_ptr[buf->off + i] &= buf->buf[i];
4252 }
4253 }
4254 if (full)
4255 {
4256 buf->off = 0;
4257 buf->size = 0;
4258 buf->padding_bytes = 0;
4259 }
4260 else
4261 {
4262 memmove (buf->buf, buf->buf + end, buf->size - end);
4263 buf->off += end;
4264 buf->size -= end;
4265 buf->padding_bytes = padding_bytes;
4266 }
4267 return;
4268 }
4269 size_t wordsize = UNITS_PER_WORD;
4270 for (size_t i = 0; i < end; i += wordsize)
4271 {
4272 size_t nonzero_first = wordsize;
4273 size_t nonzero_last = 0;
4adfcea0
JJ
4274 size_t zero_first = wordsize;
4275 size_t zero_last = 0;
4276 bool all_ones = true, bytes_only = true;
1bea0d0a
JJ
4277 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4278 > (unsigned HOST_WIDE_INT) buf->sz)
4279 {
4280 gcc_assert (wordsize > 1);
4281 wordsize /= 2;
4282 i -= wordsize;
4283 continue;
4284 }
4285 for (size_t j = i; j < i + wordsize && j < end; j++)
4286 {
4287 if (buf->buf[j])
4288 {
4289 if (nonzero_first == wordsize)
4290 {
4291 nonzero_first = j - i;
4292 nonzero_last = j - i;
4293 }
4294 if (nonzero_last != j - i)
4295 all_ones = false;
4296 nonzero_last = j + 1 - i;
4297 }
4adfcea0
JJ
4298 else
4299 {
4300 if (zero_first == wordsize)
4301 zero_first = j - i;
4302 zero_last = j + 1 - i;
4303 }
1bea0d0a 4304 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4adfcea0
JJ
4305 {
4306 all_ones = false;
4307 bytes_only = false;
4308 }
1bea0d0a 4309 }
4adfcea0 4310 size_t padding_end = i;
1bea0d0a
JJ
4311 if (padding_bytes)
4312 {
4313 if (nonzero_first == 0
4314 && nonzero_last == wordsize
4315 && all_ones)
4316 {
4317 /* All bits are padding and we had some padding
4318 before too. Just extend it. */
4319 padding_bytes += wordsize;
4320 continue;
4321 }
1bea0d0a
JJ
4322 if (all_ones && nonzero_first == 0)
4323 {
4324 padding_bytes += nonzero_last;
4325 padding_end += nonzero_last;
4326 nonzero_first = wordsize;
4327 nonzero_last = 0;
4328 }
4adfcea0
JJ
4329 else if (bytes_only && nonzero_first == 0)
4330 {
4331 gcc_assert (zero_first && zero_first != wordsize);
4332 padding_bytes += zero_first;
4333 padding_end += zero_first;
4334 }
4335 tree atype, src;
4336 if (padding_bytes == 1)
4337 {
4338 atype = char_type_node;
4339 src = build_zero_cst (char_type_node);
4340 }
4341 else
4342 {
4343 atype = build_array_type_nelts (char_type_node, padding_bytes);
4344 src = build_constructor (atype, NULL);
4345 }
1bea0d0a
JJ
4346 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4347 build_int_cst (buf->alias_type,
4348 buf->off + padding_end
4349 - padding_bytes));
1bea0d0a
JJ
4350 gimple *g = gimple_build_assign (dst, src);
4351 gimple_set_location (g, buf->loc);
4352 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4353 padding_bytes = 0;
4354 buf->padding_bytes = 0;
4355 }
4356 if (nonzero_first == wordsize)
4357 /* All bits in a word are 0, there are no padding bits. */
4358 continue;
4359 if (all_ones && nonzero_last == wordsize)
4360 {
4361 /* All bits between nonzero_first and end of word are padding
4362 bits, start counting padding_bytes. */
4363 padding_bytes = nonzero_last - nonzero_first;
4364 continue;
4365 }
4adfcea0
JJ
4366 if (bytes_only)
4367 {
4368 /* If bitfields aren't involved in this word, prefer storing
4369 individual bytes or groups of them over performing a RMW
4370 operation on the whole word. */
4371 gcc_assert (i + zero_last <= end);
4372 for (size_t j = padding_end; j < i + zero_last; j++)
4373 {
4374 if (buf->buf[j])
4375 {
4376 size_t k;
4377 for (k = j; k < i + zero_last; k++)
4378 if (buf->buf[k] == 0)
4379 break;
4380 HOST_WIDE_INT off = buf->off + j;
4381 tree atype, src;
4382 if (k - j == 1)
4383 {
4384 atype = char_type_node;
4385 src = build_zero_cst (char_type_node);
4386 }
4387 else
4388 {
4389 atype = build_array_type_nelts (char_type_node, k - j);
4390 src = build_constructor (atype, NULL);
4391 }
4392 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4393 buf->base,
4394 build_int_cst (buf->alias_type, off));
4395 gimple *g = gimple_build_assign (dst, src);
4396 gimple_set_location (g, buf->loc);
4397 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4398 j = k;
4399 }
4400 }
4401 if (nonzero_last == wordsize)
4402 padding_bytes = nonzero_last - zero_last;
4403 continue;
4404 }
1bea0d0a
JJ
4405 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4406 {
4407 if (nonzero_last - nonzero_first <= eltsz
4408 && ((nonzero_first & ~(eltsz - 1))
4409 == ((nonzero_last - 1) & ~(eltsz - 1))))
4410 {
4411 tree type;
4412 if (eltsz == 1)
4413 type = char_type_node;
4414 else
4415 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4416 0);
4417 size_t start = nonzero_first & ~(eltsz - 1);
4418 HOST_WIDE_INT off = buf->off + i + start;
4419 tree atype = type;
4420 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4421 atype = build_aligned_type (type, buf->align);
4422 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4423 build_int_cst (buf->alias_type, off));
4424 tree src;
4425 gimple *g;
4426 if (all_ones
4427 && nonzero_first == start
4428 && nonzero_last == start + eltsz)
4429 src = build_zero_cst (type);
4430 else
4431 {
4432 src = make_ssa_name (type);
4433 g = gimple_build_assign (src, unshare_expr (dst));
4434 gimple_set_location (g, buf->loc);
4435 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4436 tree mask = native_interpret_expr (type,
4437 buf->buf + i + start,
4438 eltsz);
4439 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4440 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4441 tree src_masked = make_ssa_name (type);
4442 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4443 src, mask);
4444 gimple_set_location (g, buf->loc);
4445 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4446 src = src_masked;
4447 }
4448 g = gimple_build_assign (dst, src);
4449 gimple_set_location (g, buf->loc);
4450 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4451 break;
4452 }
4453 }
4454 }
4455 if (full)
4456 {
4457 if (padding_bytes)
4458 {
4adfcea0
JJ
4459 tree atype, src;
4460 if (padding_bytes == 1)
4461 {
4462 atype = char_type_node;
4463 src = build_zero_cst (char_type_node);
4464 }
4465 else
4466 {
4467 atype = build_array_type_nelts (char_type_node, padding_bytes);
4468 src = build_constructor (atype, NULL);
4469 }
1bea0d0a
JJ
4470 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4471 build_int_cst (buf->alias_type,
4472 buf->off + end
4473 - padding_bytes));
1bea0d0a
JJ
4474 gimple *g = gimple_build_assign (dst, src);
4475 gimple_set_location (g, buf->loc);
4476 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4477 }
4478 size_t end_rem = end % UNITS_PER_WORD;
4479 buf->off += end - end_rem;
4480 buf->size = end_rem;
4481 memset (buf->buf, 0, buf->size);
4482 buf->padding_bytes = 0;
4483 }
4484 else
4485 {
4486 memmove (buf->buf, buf->buf + end, buf->size - end);
4487 buf->off += end;
4488 buf->size -= end;
4489 buf->padding_bytes = padding_bytes;
4490 }
4491}
4492
4493/* Append PADDING_BYTES padding bytes. */
4494
4495static void
4496clear_padding_add_padding (clear_padding_struct *buf,
4497 HOST_WIDE_INT padding_bytes)
4498{
4499 if (padding_bytes == 0)
4500 return;
4501 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4502 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4503 clear_padding_flush (buf, false);
4504 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4505 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4506 {
4507 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4508 padding_bytes -= clear_padding_buf_size - buf->size;
4509 buf->size = clear_padding_buf_size;
4510 clear_padding_flush (buf, false);
4511 gcc_assert (buf->padding_bytes);
4512 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4513 is guaranteed to be all ones. */
4514 padding_bytes += buf->size;
4515 buf->size = padding_bytes % UNITS_PER_WORD;
4516 memset (buf->buf, ~0, buf->size);
4517 buf->off += padding_bytes - buf->size;
4518 buf->padding_bytes += padding_bytes - buf->size;
4519 }
4520 else
4521 {
4522 memset (buf->buf + buf->size, ~0, padding_bytes);
4523 buf->size += padding_bytes;
4524 }
4525}
4526
a25e0b5e 4527static void clear_padding_type (clear_padding_struct *, tree,
4528 HOST_WIDE_INT, bool);
1bea0d0a
JJ
4529
4530/* Clear padding bits of union type TYPE. */
4531
4532static void
a25e0b5e 4533clear_padding_union (clear_padding_struct *buf, tree type,
4534 HOST_WIDE_INT sz, bool for_auto_init)
1bea0d0a
JJ
4535{
4536 clear_padding_struct *union_buf;
4537 HOST_WIDE_INT start_off = 0, next_off = 0;
4538 size_t start_size = 0;
4539 if (buf->union_ptr)
4540 {
4541 start_off = buf->off + buf->size;
4542 next_off = start_off + sz;
4543 start_size = start_off % UNITS_PER_WORD;
4544 start_off -= start_size;
4545 clear_padding_flush (buf, true);
4546 union_buf = buf;
4547 }
4548 else
4549 {
4550 if (sz + buf->size > clear_padding_buf_size)
4551 clear_padding_flush (buf, false);
4552 union_buf = XALLOCA (clear_padding_struct);
4553 union_buf->loc = buf->loc;
896048cf 4554 union_buf->clear_in_mask = buf->clear_in_mask;
1bea0d0a
JJ
4555 union_buf->base = NULL_TREE;
4556 union_buf->alias_type = NULL_TREE;
4557 union_buf->gsi = NULL;
4558 union_buf->align = 0;
4559 union_buf->off = 0;
4560 union_buf->padding_bytes = 0;
4561 union_buf->sz = sz;
4562 union_buf->size = 0;
4563 if (sz + buf->size <= clear_padding_buf_size)
4564 union_buf->union_ptr = buf->buf + buf->size;
4565 else
4566 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4567 memset (union_buf->union_ptr, ~0, sz);
4568 }
4569
4570 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4571 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4572 {
a7285c86
JJ
4573 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4574 {
4575 if (TREE_TYPE (field) == error_mark_node)
4576 continue;
4577 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4578 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
a25e0b5e 4579 if (!buf->clear_in_mask && !for_auto_init)
896048cf
JJ
4580 error_at (buf->loc, "flexible array member %qD does not have "
4581 "well defined padding bits for %qs",
4582 field, "__builtin_clear_padding");
a7285c86
JJ
4583 continue;
4584 }
1bea0d0a
JJ
4585 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4586 gcc_assert (union_buf->size == 0);
4587 union_buf->off = start_off;
4588 union_buf->size = start_size;
4589 memset (union_buf->buf, ~0, start_size);
a25e0b5e 4590 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
1bea0d0a
JJ
4591 clear_padding_add_padding (union_buf, sz - fldsz);
4592 clear_padding_flush (union_buf, true);
4593 }
4594
4595 if (buf == union_buf)
4596 {
4597 buf->off = next_off;
4598 buf->size = next_off % UNITS_PER_WORD;
4599 buf->off -= buf->size;
4600 memset (buf->buf, ~0, buf->size);
4601 }
4602 else if (sz + buf->size <= clear_padding_buf_size)
4603 buf->size += sz;
4604 else
4605 {
4606 unsigned char *union_ptr = union_buf->union_ptr;
4607 while (sz)
4608 {
4609 clear_padding_flush (buf, false);
4610 HOST_WIDE_INT this_sz
4611 = MIN ((unsigned HOST_WIDE_INT) sz,
4612 clear_padding_buf_size - buf->size);
4613 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4614 buf->size += this_sz;
4615 union_ptr += this_sz;
4616 sz -= this_sz;
4617 }
4618 XDELETE (union_buf->union_ptr);
4619 }
4620}
4621
4622/* The only known floating point formats with padding bits are the
4623 IEEE extended ones. */
4624
4625static bool
4626clear_padding_real_needs_padding_p (tree type)
4627{
4628 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4629 return (fmt->b == 2
4630 && fmt->signbit_ro == fmt->signbit_rw
4631 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4632}
4633
4634/* Return true if TYPE might contain any padding bits. */
4635
4636static bool
4637clear_padding_type_may_have_padding_p (tree type)
4638{
4639 switch (TREE_CODE (type))
4640 {
4641 case RECORD_TYPE:
4642 case UNION_TYPE:
4643 return true;
4644 case ARRAY_TYPE:
4645 case COMPLEX_TYPE:
4646 case VECTOR_TYPE:
4647 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4648 case REAL_TYPE:
4649 return clear_padding_real_needs_padding_p (type);
4650 default:
4651 return false;
4652 }
4653}
4654
4655/* Emit a runtime loop:
4656 for (; buf.base != end; buf.base += sz)
4657 __builtin_clear_padding (buf.base); */
4658
4659static void
a25e0b5e 4660clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4661 tree end, bool for_auto_init)
1bea0d0a
JJ
4662{
4663 tree l1 = create_artificial_label (buf->loc);
4664 tree l2 = create_artificial_label (buf->loc);
4665 tree l3 = create_artificial_label (buf->loc);
4666 gimple *g = gimple_build_goto (l2);
4667 gimple_set_location (g, buf->loc);
4668 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4669 g = gimple_build_label (l1);
4670 gimple_set_location (g, buf->loc);
4671 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
a25e0b5e 4672 clear_padding_type (buf, type, buf->sz, for_auto_init);
1bea0d0a
JJ
4673 clear_padding_flush (buf, true);
4674 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4675 size_int (buf->sz));
4676 gimple_set_location (g, buf->loc);
4677 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4678 g = gimple_build_label (l2);
4679 gimple_set_location (g, buf->loc);
4680 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4681 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4682 gimple_set_location (g, buf->loc);
4683 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4684 g = gimple_build_label (l3);
4685 gimple_set_location (g, buf->loc);
4686 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4687}
4688
4689/* Clear padding bits for TYPE. Called recursively from
a25e0b5e 4690 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4691 the __builtin_clear_padding is not called by the end user,
4692 instead, it's inserted by the compiler to initialize the
4693 paddings of automatic variable. Therefore, we should not
4694 emit the error messages for flexible array members to confuse
4695 the end user. */
1bea0d0a
JJ
4696
4697static void
a25e0b5e 4698clear_padding_type (clear_padding_struct *buf, tree type,
4699 HOST_WIDE_INT sz, bool for_auto_init)
1bea0d0a
JJ
4700{
4701 switch (TREE_CODE (type))
4702 {
4703 case RECORD_TYPE:
4704 HOST_WIDE_INT cur_pos;
4705 cur_pos = 0;
4706 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4707 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4708 {
a7285c86 4709 tree ftype = TREE_TYPE (field);
1bea0d0a
JJ
4710 if (DECL_BIT_FIELD (field))
4711 {
a7285c86 4712 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
1bea0d0a
JJ
4713 if (fldsz == 0)
4714 continue;
4715 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4716 if (pos >= sz)
4717 continue;
1bea0d0a
JJ
4718 HOST_WIDE_INT bpos
4719 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4720 bpos %= BITS_PER_UNIT;
4721 HOST_WIDE_INT end
4722 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4723 if (pos + end > cur_pos)
4724 {
4725 clear_padding_add_padding (buf, pos + end - cur_pos);
4726 cur_pos = pos + end;
4727 }
4728 gcc_assert (cur_pos > pos
4729 && ((unsigned HOST_WIDE_INT) buf->size
4730 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4731 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4732 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4733 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4734 " in %qs", "__builtin_clear_padding");
4735 else if (BYTES_BIG_ENDIAN)
4736 {
4737 /* Big endian. */
4738 if (bpos + fldsz <= BITS_PER_UNIT)
4739 *p &= ~(((1 << fldsz) - 1)
4740 << (BITS_PER_UNIT - bpos - fldsz));
4741 else
4742 {
4743 if (bpos)
4744 {
4745 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4746 p++;
4747 fldsz -= BITS_PER_UNIT - bpos;
4748 }
4749 memset (p, 0, fldsz / BITS_PER_UNIT);
4750 p += fldsz / BITS_PER_UNIT;
4751 fldsz %= BITS_PER_UNIT;
4752 if (fldsz)
4753 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4754 }
4755 }
4756 else
4757 {
4758 /* Little endian. */
4759 if (bpos + fldsz <= BITS_PER_UNIT)
4760 *p &= ~(((1 << fldsz) - 1) << bpos);
4761 else
4762 {
4763 if (bpos)
4764 {
4765 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4766 p++;
4767 fldsz -= BITS_PER_UNIT - bpos;
4768 }
4769 memset (p, 0, fldsz / BITS_PER_UNIT);
4770 p += fldsz / BITS_PER_UNIT;
4771 fldsz %= BITS_PER_UNIT;
4772 if (fldsz)
4773 *p &= ~((1 << fldsz) - 1);
4774 }
4775 }
4776 }
a7285c86
JJ
4777 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4778 {
4779 if (ftype == error_mark_node)
4780 continue;
4781 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4782 && !COMPLETE_TYPE_P (ftype));
a25e0b5e 4783 if (!buf->clear_in_mask && !for_auto_init)
896048cf
JJ
4784 error_at (buf->loc, "flexible array member %qD does not "
4785 "have well defined padding bits for %qs",
4786 field, "__builtin_clear_padding");
a7285c86 4787 }
bf0a63a1
JJ
4788 else if (is_empty_type (TREE_TYPE (field)))
4789 continue;
1bea0d0a
JJ
4790 else
4791 {
4792 HOST_WIDE_INT pos = int_byte_position (field);
a21bd3ce
JJ
4793 if (pos >= sz)
4794 continue;
1bea0d0a
JJ
4795 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4796 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4797 clear_padding_add_padding (buf, pos - cur_pos);
4798 cur_pos = pos;
a25e0b5e 4799 clear_padding_type (buf, TREE_TYPE (field),
4800 fldsz, for_auto_init);
1bea0d0a
JJ
4801 cur_pos += fldsz;
4802 }
4803 }
4804 gcc_assert (sz >= cur_pos);
4805 clear_padding_add_padding (buf, sz - cur_pos);
4806 break;
4807 case ARRAY_TYPE:
4808 HOST_WIDE_INT nelts, fldsz;
4809 fldsz = int_size_in_bytes (TREE_TYPE (type));
337d6362
ML
4810 if (fldsz == 0)
4811 break;
1bea0d0a
JJ
4812 nelts = sz / fldsz;
4813 if (nelts > 1
4814 && sz > 8 * UNITS_PER_WORD
4815 && buf->union_ptr == NULL
4816 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4817 {
4818 /* For sufficiently large array of more than one elements,
4819 emit a runtime loop to keep code size manageable. */
4820 tree base = buf->base;
4821 unsigned int prev_align = buf->align;
4822 HOST_WIDE_INT off = buf->off + buf->size;
4823 HOST_WIDE_INT prev_sz = buf->sz;
4824 clear_padding_flush (buf, true);
4825 tree elttype = TREE_TYPE (type);
4826 buf->base = create_tmp_var (build_pointer_type (elttype));
4827 tree end = make_ssa_name (TREE_TYPE (buf->base));
4828 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4829 base, size_int (off));
4830 gimple_set_location (g, buf->loc);
4831 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4832 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4833 size_int (sz));
4834 gimple_set_location (g, buf->loc);
4835 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4836 buf->sz = fldsz;
4837 buf->align = TYPE_ALIGN (elttype);
4838 buf->off = 0;
4839 buf->size = 0;
a25e0b5e 4840 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
1bea0d0a
JJ
4841 buf->base = base;
4842 buf->sz = prev_sz;
4843 buf->align = prev_align;
4844 buf->size = off % UNITS_PER_WORD;
4845 buf->off = off - buf->size;
4846 memset (buf->buf, 0, buf->size);
4847 break;
4848 }
4849 for (HOST_WIDE_INT i = 0; i < nelts; i++)
a25e0b5e 4850 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
1bea0d0a
JJ
4851 break;
4852 case UNION_TYPE:
a25e0b5e 4853 clear_padding_union (buf, type, sz, for_auto_init);
1bea0d0a
JJ
4854 break;
4855 case REAL_TYPE:
4856 gcc_assert ((size_t) sz <= clear_padding_unit);
4857 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4858 clear_padding_flush (buf, false);
4859 if (clear_padding_real_needs_padding_p (type))
4860 {
4861 /* Use native_interpret_expr + native_encode_expr to figure out
4862 which bits are padding. */
4863 memset (buf->buf + buf->size, ~0, sz);
4864 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4865 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4866 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4867 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4868 for (size_t i = 0; i < (size_t) sz; i++)
4869 buf->buf[buf->size + i] ^= ~0;
4870 }
4871 else
4872 memset (buf->buf + buf->size, 0, sz);
4873 buf->size += sz;
4874 break;
4875 case COMPLEX_TYPE:
4876 fldsz = int_size_in_bytes (TREE_TYPE (type));
a25e0b5e 4877 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4878 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
1bea0d0a
JJ
4879 break;
4880 case VECTOR_TYPE:
4881 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4882 fldsz = int_size_in_bytes (TREE_TYPE (type));
4883 for (HOST_WIDE_INT i = 0; i < nelts; i++)
a25e0b5e 4884 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
1bea0d0a
JJ
4885 break;
4886 case NULLPTR_TYPE:
4887 gcc_assert ((size_t) sz <= clear_padding_unit);
4888 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4889 clear_padding_flush (buf, false);
4890 memset (buf->buf + buf->size, ~0, sz);
4891 buf->size += sz;
4892 break;
4893 default:
4894 gcc_assert ((size_t) sz <= clear_padding_unit);
4895 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4896 clear_padding_flush (buf, false);
4897 memset (buf->buf + buf->size, 0, sz);
4898 buf->size += sz;
4899 break;
4900 }
4901}
4902
896048cf
JJ
4903/* Clear padding bits of TYPE in MASK. */
4904
4905void
4906clear_type_padding_in_mask (tree type, unsigned char *mask)
4907{
4908 clear_padding_struct buf;
4909 buf.loc = UNKNOWN_LOCATION;
4910 buf.clear_in_mask = true;
4911 buf.base = NULL_TREE;
4912 buf.alias_type = NULL_TREE;
4913 buf.gsi = NULL;
4914 buf.align = 0;
4915 buf.off = 0;
4916 buf.padding_bytes = 0;
4917 buf.sz = int_size_in_bytes (type);
4918 buf.size = 0;
4919 buf.union_ptr = mask;
a25e0b5e 4920 clear_padding_type (&buf, type, buf.sz, false);
896048cf
JJ
4921 clear_padding_flush (&buf, true);
4922}
4923
1bea0d0a
JJ
4924/* Fold __builtin_clear_padding builtin. */
4925
4926static bool
4927gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4928{
4929 gimple *stmt = gsi_stmt (*gsi);
a25e0b5e 4930 gcc_assert (gimple_call_num_args (stmt) == 3);
1bea0d0a
JJ
4931 tree ptr = gimple_call_arg (stmt, 0);
4932 tree typearg = gimple_call_arg (stmt, 1);
a25e0b5e 4933 /* the 3rd argument of __builtin_clear_padding is to distinguish whether
4934 this call is made by the user or by the compiler for automatic variable
4935 initialization. */
4936 bool for_auto_init = (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 2));
1bea0d0a
JJ
4937 tree type = TREE_TYPE (TREE_TYPE (typearg));
4938 location_t loc = gimple_location (stmt);
4939 clear_padding_struct buf;
4940 gimple_stmt_iterator gsiprev = *gsi;
4941 /* This should be folded during the lower pass. */
4942 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4943 gcc_assert (COMPLETE_TYPE_P (type));
4944 gsi_prev (&gsiprev);
4945
4946 buf.loc = loc;
896048cf 4947 buf.clear_in_mask = false;
1bea0d0a
JJ
4948 buf.base = ptr;
4949 buf.alias_type = NULL_TREE;
4950 buf.gsi = gsi;
4951 buf.align = get_pointer_alignment (ptr);
4952 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4953 buf.align = MAX (buf.align, talign);
4954 buf.off = 0;
4955 buf.padding_bytes = 0;
4956 buf.size = 0;
4957 buf.sz = int_size_in_bytes (type);
4958 buf.union_ptr = NULL;
4959 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4960 sorry_at (loc, "%s not supported for variable length aggregates",
4961 "__builtin_clear_padding");
4962 /* The implementation currently assumes 8-bit host and target
4963 chars which is the case for all currently supported targets
4964 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4965 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4966 sorry_at (loc, "%s not supported on this target",
4967 "__builtin_clear_padding");
4968 else if (!clear_padding_type_may_have_padding_p (type))
4969 ;
4970 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4971 {
4972 tree sz = TYPE_SIZE_UNIT (type);
4973 tree elttype = type;
4974 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4975 while (TREE_CODE (elttype) == ARRAY_TYPE
4976 && int_size_in_bytes (elttype) < 0)
4977 elttype = TREE_TYPE (elttype);
4978 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4979 gcc_assert (eltsz >= 0);
4980 if (eltsz)
4981 {
4982 buf.base = create_tmp_var (build_pointer_type (elttype));
4983 tree end = make_ssa_name (TREE_TYPE (buf.base));
4984 gimple *g = gimple_build_assign (buf.base, ptr);
4985 gimple_set_location (g, loc);
4986 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4987 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4988 gimple_set_location (g, loc);
4989 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4990 buf.sz = eltsz;
4991 buf.align = TYPE_ALIGN (elttype);
4992 buf.alias_type = build_pointer_type (elttype);
a25e0b5e 4993 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
1bea0d0a
JJ
4994 }
4995 }
4996 else
4997 {
4998 if (!is_gimple_mem_ref_addr (buf.base))
4999 {
5000 buf.base = make_ssa_name (TREE_TYPE (ptr));
5001 gimple *g = gimple_build_assign (buf.base, ptr);
5002 gimple_set_location (g, loc);
5003 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5004 }
5005 buf.alias_type = build_pointer_type (type);
a25e0b5e 5006 clear_padding_type (&buf, type, buf.sz, for_auto_init);
1bea0d0a
JJ
5007 clear_padding_flush (&buf, true);
5008 }
5009
5010 gimple_stmt_iterator gsiprev2 = *gsi;
5011 gsi_prev (&gsiprev2);
5012 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
5013 gsi_replace (gsi, gimple_build_nop (), true);
5014 else
5015 {
5016 gsi_remove (gsi, true);
5017 *gsi = gsiprev2;
5018 }
5019 return true;
5020}
5021
dcb7fae2
RB
5022/* Fold the non-target builtin at *GSI and return whether any simplification
5023 was made. */
cbdd87d4 5024
fef5a0d9 5025static bool
dcb7fae2 5026gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 5027{
538dd0b7 5028 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 5029 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 5030
dcb7fae2
RB
5031 /* Give up for always_inline inline builtins until they are
5032 inlined. */
5033 if (avoid_folding_inline_builtin (callee))
5034 return false;
cbdd87d4 5035
edd7ae68
RB
5036 unsigned n = gimple_call_num_args (stmt);
5037 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5038 switch (fcode)
cbdd87d4 5039 {
b3d8d88e
MS
5040 case BUILT_IN_BCMP:
5041 return gimple_fold_builtin_bcmp (gsi);
5042 case BUILT_IN_BCOPY:
5043 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 5044 case BUILT_IN_BZERO:
b3d8d88e
MS
5045 return gimple_fold_builtin_bzero (gsi);
5046
dcb7fae2
RB
5047 case BUILT_IN_MEMSET:
5048 return gimple_fold_builtin_memset (gsi,
5049 gimple_call_arg (stmt, 1),
5050 gimple_call_arg (stmt, 2));
dcb7fae2 5051 case BUILT_IN_MEMCPY:
dcb7fae2 5052 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
5053 case BUILT_IN_MEMMOVE:
5054 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 5055 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
5056 case BUILT_IN_SPRINTF_CHK:
5057 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 5058 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
5059 case BUILT_IN_STRCAT_CHK:
5060 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
5061 case BUILT_IN_STRNCAT_CHK:
5062 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 5063 case BUILT_IN_STRLEN:
dcb7fae2 5064 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 5065 case BUILT_IN_STRCPY:
dcb7fae2 5066 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 5067 gimple_call_arg (stmt, 0),
dcb7fae2 5068 gimple_call_arg (stmt, 1));
cbdd87d4 5069 case BUILT_IN_STRNCPY:
dcb7fae2 5070 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
5071 gimple_call_arg (stmt, 0),
5072 gimple_call_arg (stmt, 1),
dcb7fae2 5073 gimple_call_arg (stmt, 2));
9a7eefec 5074 case BUILT_IN_STRCAT:
dcb7fae2
RB
5075 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5076 gimple_call_arg (stmt, 1));
ad03a744
RB
5077 case BUILT_IN_STRNCAT:
5078 return gimple_fold_builtin_strncat (gsi);
71dea1dd 5079 case BUILT_IN_INDEX:
912d9ec3 5080 case BUILT_IN_STRCHR:
71dea1dd
WD
5081 return gimple_fold_builtin_strchr (gsi, false);
5082 case BUILT_IN_RINDEX:
5083 case BUILT_IN_STRRCHR:
5084 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
5085 case BUILT_IN_STRSTR:
5086 return gimple_fold_builtin_strstr (gsi);
a918bfbf 5087 case BUILT_IN_STRCMP:
8b0b334a 5088 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
5089 case BUILT_IN_STRCASECMP:
5090 case BUILT_IN_STRNCMP:
8b0b334a 5091 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
5092 case BUILT_IN_STRNCASECMP:
5093 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
5094 case BUILT_IN_MEMCHR:
5095 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 5096 case BUILT_IN_FPUTS:
dcb7fae2
RB
5097 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5098 gimple_call_arg (stmt, 1), false);
cbdd87d4 5099 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
5100 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5101 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
5102 case BUILT_IN_MEMCPY_CHK:
5103 case BUILT_IN_MEMPCPY_CHK:
5104 case BUILT_IN_MEMMOVE_CHK:
5105 case BUILT_IN_MEMSET_CHK:
dcb7fae2 5106 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
5107 gimple_call_arg (stmt, 0),
5108 gimple_call_arg (stmt, 1),
5109 gimple_call_arg (stmt, 2),
5110 gimple_call_arg (stmt, 3),
edd7ae68 5111 fcode);
2625bb5d
RB
5112 case BUILT_IN_STPCPY:
5113 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
5114 case BUILT_IN_STRCPY_CHK:
5115 case BUILT_IN_STPCPY_CHK:
dcb7fae2 5116 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
5117 gimple_call_arg (stmt, 0),
5118 gimple_call_arg (stmt, 1),
5119 gimple_call_arg (stmt, 2),
edd7ae68 5120 fcode);
cbdd87d4 5121 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 5122 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
5123 return gimple_fold_builtin_stxncpy_chk (gsi,
5124 gimple_call_arg (stmt, 0),
5125 gimple_call_arg (stmt, 1),
5126 gimple_call_arg (stmt, 2),
5127 gimple_call_arg (stmt, 3),
edd7ae68 5128 fcode);
cbdd87d4
RG
5129 case BUILT_IN_SNPRINTF_CHK:
5130 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 5131 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 5132
edd7ae68
RB
5133 case BUILT_IN_FPRINTF:
5134 case BUILT_IN_FPRINTF_UNLOCKED:
5135 case BUILT_IN_VFPRINTF:
5136 if (n == 2 || n == 3)
5137 return gimple_fold_builtin_fprintf (gsi,
5138 gimple_call_arg (stmt, 0),
5139 gimple_call_arg (stmt, 1),
5140 n == 3
5141 ? gimple_call_arg (stmt, 2)
5142 : NULL_TREE,
5143 fcode);
5144 break;
5145 case BUILT_IN_FPRINTF_CHK:
5146 case BUILT_IN_VFPRINTF_CHK:
5147 if (n == 3 || n == 4)
5148 return gimple_fold_builtin_fprintf (gsi,
5149 gimple_call_arg (stmt, 0),
5150 gimple_call_arg (stmt, 2),
5151 n == 4
5152 ? gimple_call_arg (stmt, 3)
5153 : NULL_TREE,
5154 fcode);
5155 break;
ad03a744
RB
5156 case BUILT_IN_PRINTF:
5157 case BUILT_IN_PRINTF_UNLOCKED:
5158 case BUILT_IN_VPRINTF:
5159 if (n == 1 || n == 2)
5160 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5161 n == 2
5162 ? gimple_call_arg (stmt, 1)
5163 : NULL_TREE, fcode);
5164 break;
5165 case BUILT_IN_PRINTF_CHK:
5166 case BUILT_IN_VPRINTF_CHK:
5167 if (n == 2 || n == 3)
5168 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5169 n == 3
5170 ? gimple_call_arg (stmt, 2)
5171 : NULL_TREE, fcode);
242a37f1 5172 break;
48126138
NS
5173 case BUILT_IN_ACC_ON_DEVICE:
5174 return gimple_fold_builtin_acc_on_device (gsi,
5175 gimple_call_arg (stmt, 0));
fe75f732
PK
5176 case BUILT_IN_REALLOC:
5177 return gimple_fold_builtin_realloc (gsi);
5178
1bea0d0a
JJ
5179 case BUILT_IN_CLEAR_PADDING:
5180 return gimple_fold_builtin_clear_padding (gsi);
5181
fef5a0d9
RB
5182 default:;
5183 }
5184
5185 /* Try the generic builtin folder. */
5186 bool ignore = (gimple_call_lhs (stmt) == NULL);
5187 tree result = fold_call_stmt (stmt, ignore);
5188 if (result)
5189 {
5190 if (ignore)
5191 STRIP_NOPS (result);
5192 else
5193 result = fold_convert (gimple_call_return_type (stmt), result);
52a5515e 5194 gimplify_and_update_call_from_tree (gsi, result);
fef5a0d9
RB
5195 return true;
5196 }
5197
5198 return false;
5199}
5200
451e8dae
NS
5201/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5202 function calls to constants, where possible. */
5203
5204static tree
5205fold_internal_goacc_dim (const gimple *call)
5206{
629b3d75
MJ
5207 int axis = oacc_get_ifn_dim_arg (call);
5208 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 5209 tree result = NULL_TREE;
67d2229e 5210 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 5211
67d2229e 5212 switch (gimple_call_internal_fn (call))
451e8dae 5213 {
67d2229e
TV
5214 case IFN_GOACC_DIM_POS:
5215 /* If the size is 1, we know the answer. */
5216 if (size == 1)
5217 result = build_int_cst (type, 0);
5218 break;
5219 case IFN_GOACC_DIM_SIZE:
5220 /* If the size is not dynamic, we know the answer. */
5221 if (size)
5222 result = build_int_cst (type, size);
5223 break;
5224 default:
5225 break;
451e8dae
NS
5226 }
5227
5228 return result;
5229}
5230
849a76a5
JJ
5231/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5232 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5233 &var where var is only addressable because of such calls. */
5234
5235bool
5236optimize_atomic_compare_exchange_p (gimple *stmt)
5237{
5238 if (gimple_call_num_args (stmt) != 6
5239 || !flag_inline_atomics
5240 || !optimize
45b2222a 5241 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
5242 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5243 || !gimple_vdef (stmt)
5244 || !gimple_vuse (stmt))
5245 return false;
5246
5247 tree fndecl = gimple_call_fndecl (stmt);
5248 switch (DECL_FUNCTION_CODE (fndecl))
5249 {
5250 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5251 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5252 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5253 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5254 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5255 break;
5256 default:
5257 return false;
5258 }
5259
5260 tree expected = gimple_call_arg (stmt, 1);
5261 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
5262 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5263 return false;
5264
5265 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5266 if (!is_gimple_reg_type (etype)
849a76a5 5267 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
5268 || TREE_THIS_VOLATILE (etype)
5269 || VECTOR_TYPE_P (etype)
5270 || TREE_CODE (etype) == COMPLEX_TYPE
5271 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5272 might not preserve all the bits. See PR71716. */
5273 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
5274 || maybe_ne (TYPE_PRECISION (etype),
5275 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
5276 return false;
5277
5278 tree weak = gimple_call_arg (stmt, 3);
5279 if (!integer_zerop (weak) && !integer_onep (weak))
5280 return false;
5281
5282 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5283 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5284 machine_mode mode = TYPE_MODE (itype);
5285
5286 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5287 == CODE_FOR_nothing
5288 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5289 return false;
5290
cf098191 5291 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
5292 return false;
5293
5294 return true;
5295}
5296
5297/* Fold
5298 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5299 into
5300 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5301 i = IMAGPART_EXPR <t>;
5302 r = (_Bool) i;
5303 e = REALPART_EXPR <t>; */
5304
5305void
5306fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5307{
5308 gimple *stmt = gsi_stmt (*gsi);
5309 tree fndecl = gimple_call_fndecl (stmt);
5310 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5311 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5312 tree ctype = build_complex_type (itype);
5313 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
5314 bool throws = false;
5315 edge e = NULL;
849a76a5
JJ
5316 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5317 expected);
5318 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5319 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5320 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5321 {
5322 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5323 build1 (VIEW_CONVERT_EXPR, itype,
5324 gimple_assign_lhs (g)));
5325 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5326 }
5327 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5328 + int_size_in_bytes (itype);
5329 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5330 gimple_call_arg (stmt, 0),
5331 gimple_assign_lhs (g),
5332 gimple_call_arg (stmt, 2),
5333 build_int_cst (integer_type_node, flag),
5334 gimple_call_arg (stmt, 4),
5335 gimple_call_arg (stmt, 5));
5336 tree lhs = make_ssa_name (ctype);
5337 gimple_call_set_lhs (g, lhs);
779724a5 5338 gimple_move_vops (g, stmt);
cc195d46 5339 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 5340 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
5341 {
5342 throws = true;
5343 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5344 }
5345 gimple_call_set_nothrow (as_a <gcall *> (g),
5346 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5347 gimple_call_set_lhs (stmt, NULL_TREE);
5348 gsi_replace (gsi, g, true);
5349 if (oldlhs)
849a76a5 5350 {
849a76a5
JJ
5351 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5352 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
5353 if (throws)
5354 {
5355 gsi_insert_on_edge_immediate (e, g);
5356 *gsi = gsi_for_stmt (g);
5357 }
5358 else
5359 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5360 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5361 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 5362 }
849a76a5
JJ
5363 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5364 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
5365 if (throws && oldlhs == NULL_TREE)
5366 {
5367 gsi_insert_on_edge_immediate (e, g);
5368 *gsi = gsi_for_stmt (g);
5369 }
5370 else
5371 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
5372 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5373 {
5374 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5375 VIEW_CONVERT_EXPR,
5376 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5377 gimple_assign_lhs (g)));
5378 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5379 }
5380 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5381 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5382 *gsi = gsiret;
5383}
5384
1304953e
JJ
5385/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5386 doesn't fit into TYPE. The test for overflow should be regardless of
5387 -fwrapv, and even for unsigned types. */
5388
5389bool
5390arith_overflowed_p (enum tree_code code, const_tree type,
5391 const_tree arg0, const_tree arg1)
5392{
1304953e
JJ
5393 widest2_int warg0 = widest2_int_cst (arg0);
5394 widest2_int warg1 = widest2_int_cst (arg1);
5395 widest2_int wres;
5396 switch (code)
5397 {
5398 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5399 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5400 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5401 default: gcc_unreachable ();
5402 }
5403 signop sign = TYPE_SIGN (type);
5404 if (sign == UNSIGNED && wi::neg_p (wres))
5405 return true;
5406 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5407}
5408
868363d4
RS
5409/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5410 for the memory it references, otherwise return null. VECTYPE is the
5411 type of the memory vector. */
5412
5413static tree
5414gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5415{
5416 tree ptr = gimple_call_arg (call, 0);
5417 tree alias_align = gimple_call_arg (call, 1);
5418 tree mask = gimple_call_arg (call, 2);
5419 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5420 return NULL_TREE;
5421
aa204d51 5422 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
868363d4
RS
5423 if (TYPE_ALIGN (vectype) != align)
5424 vectype = build_aligned_type (vectype, align);
5425 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5426 return fold_build2 (MEM_REF, vectype, ptr, offset);
5427}
5428
5429/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5430
5431static bool
5432gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5433{
5434 tree lhs = gimple_call_lhs (call);
5435 if (!lhs)
5436 return false;
5437
5438 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5439 {
5440 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5441 gimple_set_location (new_stmt, gimple_location (call));
5442 gimple_move_vops (new_stmt, call);
5443 gsi_replace (gsi, new_stmt, false);
5444 return true;
5445 }
5446 return false;
5447}
5448
5449/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5450
5451static bool
5452gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5453{
5454 tree rhs = gimple_call_arg (call, 3);
5455 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5456 {
5457 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5458 gimple_set_location (new_stmt, gimple_location (call));
5459 gimple_move_vops (new_stmt, call);
5460 gsi_replace (gsi, new_stmt, false);
5461 return true;
5462 }
5463 return false;
5464}
5465
cbdd87d4
RG
5466/* Attempt to fold a call statement referenced by the statement iterator GSI.
5467 The statement may be replaced by another statement, e.g., if the call
5468 simplifies to a constant value. Return true if any changes were made.
5469 It is assumed that the operands have been previously folded. */
5470
e021c122 5471static bool
ceeffab0 5472gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 5473{
538dd0b7 5474 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 5475 tree callee;
e021c122 5476 bool changed = false;
3b45a007
RG
5477
5478 /* Check for virtual calls that became direct calls. */
5479 callee = gimple_call_fn (stmt);
25583c4f 5480 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 5481 {
49c471e3
MJ
5482 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5483 {
450ad0cd
JH
5484 if (dump_file && virtual_method_call_p (callee)
5485 && !possible_polymorphic_call_target_p
6f8091fc
JH
5486 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5487 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
5488 {
5489 fprintf (dump_file,
a70e9985 5490 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
5491 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5492 fprintf (dump_file, " to ");
5493 print_generic_expr (dump_file, callee, TDF_SLIM);
5494 fprintf (dump_file, "\n");
5495 }
5496
49c471e3 5497 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
5498 changed = true;
5499 }
a70e9985 5500 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 5501 {
61dd6a2e
JH
5502 bool final;
5503 vec <cgraph_node *>targets
058d0a90 5504 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 5505 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 5506 {
a70e9985 5507 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
5508 if (dump_enabled_p ())
5509 {
4f5b9c80 5510 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
5511 "folding virtual function call to %s\n",
5512 targets.length () == 1
5513 ? targets[0]->name ()
5514 : "__builtin_unreachable");
5515 }
61dd6a2e 5516 if (targets.length () == 1)
cf3e5a89 5517 {
18954840
JJ
5518 tree fndecl = targets[0]->decl;
5519 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 5520 changed = true;
18954840
JJ
5521 /* If changing the call to __cxa_pure_virtual
5522 or similar noreturn function, adjust gimple_call_fntype
5523 too. */
865f7046 5524 if (gimple_call_noreturn_p (stmt)
18954840
JJ
5525 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5526 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5527 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5528 == void_type_node))
5529 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 5530 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
5531 if (lhs
5532 && gimple_call_noreturn_p (stmt)
18954840 5533 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 5534 || should_remove_lhs_p (lhs)))
a70e9985
JJ
5535 {
5536 if (TREE_CODE (lhs) == SSA_NAME)
5537 {
b731b390 5538 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 5539 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 5540 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
5541 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5542 }
5543 gimple_call_set_lhs (stmt, NULL_TREE);
5544 }
0b986c6a 5545 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 5546 }
a70e9985 5547 else
cf3e5a89
JJ
5548 {
5549 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 5550 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 5551 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
5552 /* If the call had a SSA name as lhs morph that into
5553 an uninitialized value. */
a70e9985
JJ
5554 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5555 {
b731b390 5556 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
5557 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5558 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5559 set_ssa_default_def (cfun, var, lhs);
42e52a51 5560 }
779724a5 5561 gimple_move_vops (new_stmt, stmt);
2da6996c 5562 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
5563 return true;
5564 }
e021c122 5565 }
49c471e3 5566 }
e021c122 5567 }
49c471e3 5568
f2d3d07e
RH
5569 /* Check for indirect calls that became direct calls, and then
5570 no longer require a static chain. */
5571 if (gimple_call_chain (stmt))
5572 {
5573 tree fn = gimple_call_fndecl (stmt);
5574 if (fn && !DECL_STATIC_CHAIN (fn))
5575 {
5576 gimple_call_set_chain (stmt, NULL);
5577 changed = true;
5578 }
f2d3d07e
RH
5579 }
5580
e021c122
RG
5581 if (inplace)
5582 return changed;
5583
5584 /* Check for builtins that CCP can handle using information not
5585 available in the generic fold routines. */
fef5a0d9
RB
5586 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5587 {
5588 if (gimple_fold_builtin (gsi))
5589 changed = true;
5590 }
5591 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 5592 {
ea679d55 5593 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 5594 }
368b454d 5595 else if (gimple_call_internal_p (stmt))
ed9c79e1 5596 {
368b454d
JJ
5597 enum tree_code subcode = ERROR_MARK;
5598 tree result = NULL_TREE;
1304953e
JJ
5599 bool cplx_result = false;
5600 tree overflow = NULL_TREE;
368b454d
JJ
5601 switch (gimple_call_internal_fn (stmt))
5602 {
5603 case IFN_BUILTIN_EXPECT:
5604 result = fold_builtin_expect (gimple_location (stmt),
5605 gimple_call_arg (stmt, 0),
5606 gimple_call_arg (stmt, 1),
1e9168b2
ML
5607 gimple_call_arg (stmt, 2),
5608 NULL_TREE);
368b454d 5609 break;
0e82f089 5610 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
5611 {
5612 tree offset = gimple_call_arg (stmt, 1);
5613 tree objsize = gimple_call_arg (stmt, 2);
5614 if (integer_all_onesp (objsize)
5615 || (TREE_CODE (offset) == INTEGER_CST
5616 && TREE_CODE (objsize) == INTEGER_CST
5617 && tree_int_cst_le (offset, objsize)))
5618 {
5619 replace_call_with_value (gsi, NULL_TREE);
5620 return true;
5621 }
5622 }
5623 break;
5624 case IFN_UBSAN_PTR:
5625 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 5626 {
ca1150f0 5627 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
5628 return true;
5629 }
5630 break;
ca1150f0
JJ
5631 case IFN_UBSAN_BOUNDS:
5632 {
5633 tree index = gimple_call_arg (stmt, 1);
5634 tree bound = gimple_call_arg (stmt, 2);
5635 if (TREE_CODE (index) == INTEGER_CST
5636 && TREE_CODE (bound) == INTEGER_CST)
5637 {
5638 index = fold_convert (TREE_TYPE (bound), index);
5639 if (TREE_CODE (index) == INTEGER_CST
5640 && tree_int_cst_le (index, bound))
5641 {
5642 replace_call_with_value (gsi, NULL_TREE);
5643 return true;
5644 }
5645 }
5646 }
5647 break;
451e8dae
NS
5648 case IFN_GOACC_DIM_SIZE:
5649 case IFN_GOACC_DIM_POS:
5650 result = fold_internal_goacc_dim (stmt);
5651 break;
368b454d
JJ
5652 case IFN_UBSAN_CHECK_ADD:
5653 subcode = PLUS_EXPR;
5654 break;
5655 case IFN_UBSAN_CHECK_SUB:
5656 subcode = MINUS_EXPR;
5657 break;
5658 case IFN_UBSAN_CHECK_MUL:
5659 subcode = MULT_EXPR;
5660 break;
1304953e
JJ
5661 case IFN_ADD_OVERFLOW:
5662 subcode = PLUS_EXPR;
5663 cplx_result = true;
5664 break;
5665 case IFN_SUB_OVERFLOW:
5666 subcode = MINUS_EXPR;
5667 cplx_result = true;
5668 break;
5669 case IFN_MUL_OVERFLOW:
5670 subcode = MULT_EXPR;
5671 cplx_result = true;
5672 break;
868363d4
RS
5673 case IFN_MASK_LOAD:
5674 changed |= gimple_fold_mask_load (gsi, stmt);
5675 break;
5676 case IFN_MASK_STORE:
5677 changed |= gimple_fold_mask_store (gsi, stmt);
5678 break;
368b454d
JJ
5679 default:
5680 break;
5681 }
5682 if (subcode != ERROR_MARK)
5683 {
5684 tree arg0 = gimple_call_arg (stmt, 0);
5685 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
5686 tree type = TREE_TYPE (arg0);
5687 if (cplx_result)
5688 {
5689 tree lhs = gimple_call_lhs (stmt);
5690 if (lhs == NULL_TREE)
5691 type = NULL_TREE;
5692 else
5693 type = TREE_TYPE (TREE_TYPE (lhs));
5694 }
5695 if (type == NULL_TREE)
5696 ;
368b454d 5697 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
5698 else if (integer_zerop (arg1))
5699 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
5700 /* x = 0 + y; x = 0 * y; */
5701 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 5702 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
5703 /* x = y - y; */
5704 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 5705 result = integer_zero_node;
368b454d 5706 /* x = y * 1; x = 1 * y; */
1304953e
JJ
5707 else if (subcode == MULT_EXPR && integer_onep (arg1))
5708 result = arg0;
5709 else if (subcode == MULT_EXPR && integer_onep (arg0))
5710 result = arg1;
5711 else if (TREE_CODE (arg0) == INTEGER_CST
5712 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 5713 {
1304953e
JJ
5714 if (cplx_result)
5715 result = int_const_binop (subcode, fold_convert (type, arg0),
5716 fold_convert (type, arg1));
5717 else
5718 result = int_const_binop (subcode, arg0, arg1);
5719 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5720 {
5721 if (cplx_result)
5722 overflow = build_one_cst (type);
5723 else
5724 result = NULL_TREE;
5725 }
5726 }
5727 if (result)
5728 {
5729 if (result == integer_zero_node)
5730 result = build_zero_cst (type);
5731 else if (cplx_result && TREE_TYPE (result) != type)
5732 {
5733 if (TREE_CODE (result) == INTEGER_CST)
5734 {
5735 if (arith_overflowed_p (PLUS_EXPR, type, result,
5736 integer_zero_node))
5737 overflow = build_one_cst (type);
5738 }
5739 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5740 && TYPE_UNSIGNED (type))
5741 || (TYPE_PRECISION (type)
5742 < (TYPE_PRECISION (TREE_TYPE (result))
5743 + (TYPE_UNSIGNED (TREE_TYPE (result))
5744 && !TYPE_UNSIGNED (type)))))
5745 result = NULL_TREE;
5746 if (result)
5747 result = fold_convert (type, result);
5748 }
368b454d
JJ
5749 }
5750 }
1304953e 5751
ed9c79e1
JJ
5752 if (result)
5753 {
1304953e
JJ
5754 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5755 result = drop_tree_overflow (result);
5756 if (cplx_result)
5757 {
5758 if (overflow == NULL_TREE)
5759 overflow = build_zero_cst (TREE_TYPE (result));
5760 tree ctype = build_complex_type (TREE_TYPE (result));
5761 if (TREE_CODE (result) == INTEGER_CST
5762 && TREE_CODE (overflow) == INTEGER_CST)
5763 result = build_complex (ctype, result, overflow);
5764 else
5765 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5766 ctype, result, overflow);
5767 }
52a5515e 5768 gimplify_and_update_call_from_tree (gsi, result);
ed9c79e1
JJ
5769 changed = true;
5770 }
5771 }
3b45a007 5772
e021c122 5773 return changed;
cbdd87d4
RG
5774}
5775
e0ee10ed 5776
89a79e96
RB
5777/* Return true whether NAME has a use on STMT. */
5778
5779static bool
355fe088 5780has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
5781{
5782 imm_use_iterator iter;
5783 use_operand_p use_p;
5784 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5785 if (USE_STMT (use_p) == stmt)
5786 return true;
5787 return false;
5788}
5789
e0ee10ed
RB
5790/* Worker for fold_stmt_1 dispatch to pattern based folding with
5791 gimple_simplify.
5792
5793 Replaces *GSI with the simplification result in RCODE and OPS
5794 and the associated statements in *SEQ. Does the replacement
5795 according to INPLACE and returns true if the operation succeeded. */
5796
5797static bool
5798replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 5799 gimple_match_op *res_op,
e0ee10ed
RB
5800 gimple_seq *seq, bool inplace)
5801{
355fe088 5802 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
5803 tree *ops = res_op->ops;
5804 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
5805
5806 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
5807 newly created statements. See also maybe_push_res_to_seq.
5808 As an exception allow such uses if there was a use of the
5809 same SSA name on the old stmt. */
5d75ad95
RS
5810 for (unsigned int i = 0; i < num_ops; ++i)
5811 if (TREE_CODE (ops[i]) == SSA_NAME
5812 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5813 && !has_use_on_stmt (ops[i], stmt))
5814 return false;
5815
5816 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5817 for (unsigned int i = 0; i < 2; ++i)
5818 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5819 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5820 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5821 return false;
e0ee10ed 5822
fec40d06
RS
5823 /* Don't insert new statements when INPLACE is true, even if we could
5824 reuse STMT for the final statement. */
5825 if (inplace && !gimple_seq_empty_p (*seq))
5826 return false;
5827
538dd0b7 5828 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 5829 {
5d75ad95
RS
5830 gcc_assert (res_op->code.is_tree_code ());
5831 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
5832 /* GIMPLE_CONDs condition may not throw. */
5833 && (!flag_exceptions
5834 || !cfun->can_throw_non_call_exceptions
5d75ad95 5835 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
5836 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5837 false, NULL_TREE)))
5d75ad95
RS
5838 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5839 else if (res_op->code == SSA_NAME)
538dd0b7 5840 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 5841 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 5842 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
5843 {
5844 if (integer_zerop (ops[0]))
538dd0b7 5845 gimple_cond_make_false (cond_stmt);
e0ee10ed 5846 else
538dd0b7 5847 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
5848 }
5849 else if (!inplace)
5850 {
5d75ad95 5851 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
5852 if (!res)
5853 return false;
538dd0b7 5854 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
5855 build_zero_cst (TREE_TYPE (res)));
5856 }
5857 else
5858 return false;
5859 if (dump_file && (dump_flags & TDF_DETAILS))
5860 {
5861 fprintf (dump_file, "gimple_simplified to ");
5862 if (!gimple_seq_empty_p (*seq))
5863 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5864 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5865 0, TDF_SLIM);
5866 }
5867 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5868 return true;
5869 }
5870 else if (is_gimple_assign (stmt)
5d75ad95 5871 && res_op->code.is_tree_code ())
e0ee10ed
RB
5872 {
5873 if (!inplace
5d75ad95 5874 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 5875 {
5d75ad95
RS
5876 maybe_build_generic_op (res_op);
5877 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5878 res_op->op_or_null (0),
5879 res_op->op_or_null (1),
5880 res_op->op_or_null (2));
e0ee10ed
RB
5881 if (dump_file && (dump_flags & TDF_DETAILS))
5882 {
5883 fprintf (dump_file, "gimple_simplified to ");
5884 if (!gimple_seq_empty_p (*seq))
5885 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5886 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5887 0, TDF_SLIM);
5888 }
5889 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5890 return true;
5891 }
5892 }
5d75ad95
RS
5893 else if (res_op->code.is_fn_code ()
5894 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 5895 {
5d75ad95
RS
5896 gcc_assert (num_ops == gimple_call_num_args (stmt));
5897 for (unsigned int i = 0; i < num_ops; ++i)
5898 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
5899 if (dump_file && (dump_flags & TDF_DETAILS))
5900 {
5901 fprintf (dump_file, "gimple_simplified to ");
5902 if (!gimple_seq_empty_p (*seq))
5903 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5904 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5905 }
5906 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
5907 return true;
5908 }
e0ee10ed
RB
5909 else if (!inplace)
5910 {
5911 if (gimple_has_lhs (stmt))
5912 {
5913 tree lhs = gimple_get_lhs (stmt);
5d75ad95 5914 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 5915 return false;
e0ee10ed
RB
5916 if (dump_file && (dump_flags & TDF_DETAILS))
5917 {
5918 fprintf (dump_file, "gimple_simplified to ");
5919 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5920 }
5921 gsi_replace_with_seq_vops (gsi, *seq);
5922 return true;
5923 }
5924 else
5925 gcc_unreachable ();
5926 }
5927
5928 return false;
5929}
5930
040292e7
RB
5931/* Canonicalize MEM_REFs invariant address operand after propagation. */
5932
5933static bool
fabe0ede 5934maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
040292e7
RB
5935{
5936 bool res = false;
fe8c8f1e 5937 tree *orig_t = t;
040292e7
RB
5938
5939 if (TREE_CODE (*t) == ADDR_EXPR)
5940 t = &TREE_OPERAND (*t, 0);
5941
f17a223d
RB
5942 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5943 generic vector extension. The actual vector referenced is
5944 view-converted to an array type for this purpose. If the index
5945 is constant the canonical representation in the middle-end is a
5946 BIT_FIELD_REF so re-write the former to the latter here. */
5947 if (TREE_CODE (*t) == ARRAY_REF
5948 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5949 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5950 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5951 {
5952 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5953 if (VECTOR_TYPE_P (vtype))
5954 {
5955 tree low = array_ref_low_bound (*t);
5956 if (TREE_CODE (low) == INTEGER_CST)
5957 {
5958 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5959 {
5960 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5961 wi::to_widest (low));
5962 idx = wi::mul (idx, wi::to_widest
5963 (TYPE_SIZE (TREE_TYPE (*t))));
5964 widest_int ext
5965 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5966 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5967 {
5968 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5969 TREE_TYPE (*t),
5970 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5971 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 5972 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
5973 res = true;
5974 }
5975 }
5976 }
5977 }
5978 }
5979
040292e7
RB
5980 while (handled_component_p (*t))
5981 t = &TREE_OPERAND (*t, 0);
5982
5983 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5984 of invariant addresses into a SSA name MEM_REF address. */
5985 if (TREE_CODE (*t) == MEM_REF
5986 || TREE_CODE (*t) == TARGET_MEM_REF)
5987 {
5988 tree addr = TREE_OPERAND (*t, 0);
5989 if (TREE_CODE (addr) == ADDR_EXPR
5990 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5991 || handled_component_p (TREE_OPERAND (addr, 0))))
5992 {
5993 tree base;
a90c8804 5994 poly_int64 coffset;
040292e7
RB
5995 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5996 &coffset);
5997 if (!base)
fabe0ede
JJ
5998 {
5999 if (is_debug)
6000 return false;
6001 gcc_unreachable ();
6002 }
040292e7
RB
6003
6004 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
6005 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
6006 TREE_OPERAND (*t, 1),
6007 size_int (coffset));
6008 res = true;
6009 }
6010 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
6011 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
6012 }
6013
6014 /* Canonicalize back MEM_REFs to plain reference trees if the object
6015 accessed is a decl that has the same access semantics as the MEM_REF. */
6016 if (TREE_CODE (*t) == MEM_REF
6017 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
6018 && integer_zerop (TREE_OPERAND (*t, 1))
6019 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
6020 {
6021 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6022 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
6023 if (/* Same volatile qualification. */
6024 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
6025 /* Same TBAA behavior with -fstrict-aliasing. */
6026 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
6027 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
6028 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
6029 /* Same alignment. */
6030 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
6031 /* We have to look out here to not drop a required conversion
6032 from the rhs to the lhs if *t appears on the lhs or vice-versa
6033 if it appears on the rhs. Thus require strict type
6034 compatibility. */
6035 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6036 {
6037 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6038 res = true;
6039 }
6040 }
6041
fe8c8f1e
RB
6042 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6043 && TREE_CODE (*t) == MEM_REF
6044 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6045 {
6046 tree base;
6047 poly_int64 coffset;
6048 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6049 &coffset);
6050 if (base)
6051 {
6052 gcc_assert (TREE_CODE (base) == MEM_REF);
6053 poly_int64 moffset;
6054 if (mem_ref_offset (base).to_shwi (&moffset))
6055 {
6056 coffset += moffset;
6057 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6058 {
6059 coffset += moffset;
6060 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6061 return true;
6062 }
6063 }
6064 }
6065 }
6066
040292e7
RB
6067 /* Canonicalize TARGET_MEM_REF in particular with respect to
6068 the indexes becoming constant. */
6069 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6070 {
6071 tree tem = maybe_fold_tmr (*t);
6072 if (tem)
6073 {
6074 *t = tem;
c7789683
RS
6075 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6076 recompute_tree_invariant_for_addr_expr (*orig_t);
040292e7
RB
6077 res = true;
6078 }
6079 }
6080
6081 return res;
6082}
6083
cbdd87d4
RG
6084/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6085 distinguishes both cases. */
6086
6087static bool
e0ee10ed 6088fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
6089{
6090 bool changed = false;
355fe088 6091 gimple *stmt = gsi_stmt (*gsi);
e9e2bad7 6092 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
cbdd87d4 6093 unsigned i;
a8b85ce9 6094 fold_defer_overflow_warnings ();
cbdd87d4 6095
040292e7
RB
6096 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6097 after propagation.
6098 ??? This shouldn't be done in generic folding but in the
6099 propagation helpers which also know whether an address was
89a79e96
RB
6100 propagated.
6101 Also canonicalize operand order. */
040292e7
RB
6102 switch (gimple_code (stmt))
6103 {
6104 case GIMPLE_ASSIGN:
6105 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6106 {
6107 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6108 if ((REFERENCE_CLASS_P (*rhs)
6109 || TREE_CODE (*rhs) == ADDR_EXPR)
6110 && maybe_canonicalize_mem_ref_addr (rhs))
6111 changed = true;
6112 tree *lhs = gimple_assign_lhs_ptr (stmt);
6113 if (REFERENCE_CLASS_P (*lhs)
6114 && maybe_canonicalize_mem_ref_addr (lhs))
6115 changed = true;
6116 }
89a79e96
RB
6117 else
6118 {
6119 /* Canonicalize operand order. */
6120 enum tree_code code = gimple_assign_rhs_code (stmt);
6121 if (TREE_CODE_CLASS (code) == tcc_comparison
6122 || commutative_tree_code (code)
6123 || commutative_ternary_tree_code (code))
6124 {
6125 tree rhs1 = gimple_assign_rhs1 (stmt);
6126 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 6127 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
6128 {
6129 gimple_assign_set_rhs1 (stmt, rhs2);
6130 gimple_assign_set_rhs2 (stmt, rhs1);
6131 if (TREE_CODE_CLASS (code) == tcc_comparison)
6132 gimple_assign_set_rhs_code (stmt,
6133 swap_tree_comparison (code));
6134 changed = true;
6135 }
6136 }
6137 }
040292e7
RB
6138 break;
6139 case GIMPLE_CALL:
6140 {
6141 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6142 {
6143 tree *arg = gimple_call_arg_ptr (stmt, i);
6144 if (REFERENCE_CLASS_P (*arg)
6145 && maybe_canonicalize_mem_ref_addr (arg))
6146 changed = true;
6147 }
6148 tree *lhs = gimple_call_lhs_ptr (stmt);
6149 if (*lhs
6150 && REFERENCE_CLASS_P (*lhs)
6151 && maybe_canonicalize_mem_ref_addr (lhs))
6152 changed = true;
6153 break;
6154 }
6155 case GIMPLE_ASM:
6156 {
538dd0b7
DM
6157 gasm *asm_stmt = as_a <gasm *> (stmt);
6158 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 6159 {
538dd0b7 6160 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
6161 tree op = TREE_VALUE (link);
6162 if (REFERENCE_CLASS_P (op)
6163 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6164 changed = true;
6165 }
538dd0b7 6166 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 6167 {
538dd0b7 6168 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
6169 tree op = TREE_VALUE (link);
6170 if ((REFERENCE_CLASS_P (op)
6171 || TREE_CODE (op) == ADDR_EXPR)
6172 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6173 changed = true;
6174 }
6175 }
6176 break;
6177 case GIMPLE_DEBUG:
6178 if (gimple_debug_bind_p (stmt))
6179 {
6180 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6181 if (*val
6182 && (REFERENCE_CLASS_P (*val)
6183 || TREE_CODE (*val) == ADDR_EXPR)
fabe0ede 6184 && maybe_canonicalize_mem_ref_addr (val, true))
040292e7
RB
6185 changed = true;
6186 }
6187 break;
89a79e96
RB
6188 case GIMPLE_COND:
6189 {
6190 /* Canonicalize operand order. */
6191 tree lhs = gimple_cond_lhs (stmt);
6192 tree rhs = gimple_cond_rhs (stmt);
14e72812 6193 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
6194 {
6195 gcond *gc = as_a <gcond *> (stmt);
6196 gimple_cond_set_lhs (gc, rhs);
6197 gimple_cond_set_rhs (gc, lhs);
6198 gimple_cond_set_code (gc,
6199 swap_tree_comparison (gimple_cond_code (gc)));
6200 changed = true;
6201 }
6202 }
040292e7
RB
6203 default:;
6204 }
6205
e0ee10ed
RB
6206 /* Dispatch to pattern-based folding. */
6207 if (!inplace
6208 || is_gimple_assign (stmt)
6209 || gimple_code (stmt) == GIMPLE_COND)
6210 {
6211 gimple_seq seq = NULL;
5d75ad95
RS
6212 gimple_match_op res_op;
6213 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 6214 valueize, valueize))
e0ee10ed 6215 {
5d75ad95 6216 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
6217 changed = true;
6218 else
6219 gimple_seq_discard (seq);
6220 }
6221 }
6222
6223 stmt = gsi_stmt (*gsi);
6224
cbdd87d4
RG
6225 /* Fold the main computation performed by the statement. */
6226 switch (gimple_code (stmt))
6227 {
6228 case GIMPLE_ASSIGN:
6229 {
819ec64c
RB
6230 /* Try to canonicalize for boolean-typed X the comparisons
6231 X == 0, X == 1, X != 0, and X != 1. */
6232 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6233 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 6234 {
819ec64c
RB
6235 tree lhs = gimple_assign_lhs (stmt);
6236 tree op1 = gimple_assign_rhs1 (stmt);
6237 tree op2 = gimple_assign_rhs2 (stmt);
6238 tree type = TREE_TYPE (op1);
6239
6240 /* Check whether the comparison operands are of the same boolean
6241 type as the result type is.
6242 Check that second operand is an integer-constant with value
6243 one or zero. */
6244 if (TREE_CODE (op2) == INTEGER_CST
6245 && (integer_zerop (op2) || integer_onep (op2))
6246 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6247 {
6248 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6249 bool is_logical_not = false;
6250
6251 /* X == 0 and X != 1 is a logical-not.of X
6252 X == 1 and X != 0 is X */
6253 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6254 || (cmp_code == NE_EXPR && integer_onep (op2)))
6255 is_logical_not = true;
6256
6257 if (is_logical_not == false)
6258 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6259 /* Only for one-bit precision typed X the transformation
6260 !X -> ~X is valied. */
6261 else if (TYPE_PRECISION (type) == 1)
6262 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6263 /* Otherwise we use !X -> X ^ 1. */
6264 else
6265 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6266 build_int_cst (type, 1));
6267 changed = true;
6268 break;
6269 }
5fbcc0ed 6270 }
819ec64c
RB
6271
6272 unsigned old_num_ops = gimple_num_ops (stmt);
6273 tree lhs = gimple_assign_lhs (stmt);
6274 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
6275 if (new_rhs
6276 && !useless_type_conversion_p (TREE_TYPE (lhs),
6277 TREE_TYPE (new_rhs)))
6278 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6279 if (new_rhs
6280 && (!inplace
6281 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6282 {
6283 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6284 changed = true;
6285 }
6286 break;
6287 }
6288
cbdd87d4 6289 case GIMPLE_CALL:
ceeffab0 6290 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
6291 break;
6292
bd422c4a
RG
6293 case GIMPLE_DEBUG:
6294 if (gimple_debug_bind_p (stmt))
6295 {
6296 tree val = gimple_debug_bind_get_value (stmt);
6297 if (val
6298 && REFERENCE_CLASS_P (val))
6299 {
0bf8cd9d 6300 tree tem = maybe_fold_reference (val);
bd422c4a
RG
6301 if (tem)
6302 {
6303 gimple_debug_bind_set_value (stmt, tem);
6304 changed = true;
6305 }
6306 }
3e888a5e
RG
6307 else if (val
6308 && TREE_CODE (val) == ADDR_EXPR)
6309 {
6310 tree ref = TREE_OPERAND (val, 0);
0bf8cd9d 6311 tree tem = maybe_fold_reference (ref);
3e888a5e
RG
6312 if (tem)
6313 {
6314 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6315 gimple_debug_bind_set_value (stmt, tem);
6316 changed = true;
6317 }
6318 }
bd422c4a
RG
6319 }
6320 break;
6321
cfe3d653
PK
6322 case GIMPLE_RETURN:
6323 {
6324 greturn *ret_stmt = as_a<greturn *> (stmt);
6325 tree ret = gimple_return_retval(ret_stmt);
6326
6327 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6328 {
6329 tree val = valueize (ret);
1af928db
RB
6330 if (val && val != ret
6331 && may_propagate_copy (ret, val))
cfe3d653
PK
6332 {
6333 gimple_return_set_retval (ret_stmt, val);
6334 changed = true;
6335 }
6336 }
6337 }
6338 break;
6339
cbdd87d4
RG
6340 default:;
6341 }
6342
6343 stmt = gsi_stmt (*gsi);
6344
a8b85ce9 6345 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
6346 return changed;
6347}
6348
e0ee10ed
RB
6349/* Valueziation callback that ends up not following SSA edges. */
6350
6351tree
6352no_follow_ssa_edges (tree)
6353{
6354 return NULL_TREE;
6355}
6356
45cc9f96
RB
6357/* Valueization callback that ends up following single-use SSA edges only. */
6358
6359tree
6360follow_single_use_edges (tree val)
6361{
6362 if (TREE_CODE (val) == SSA_NAME
6363 && !has_single_use (val))
6364 return NULL_TREE;
6365 return val;
6366}
6367
c566cc9f
RS
6368/* Valueization callback that follows all SSA edges. */
6369
6370tree
6371follow_all_ssa_edges (tree val)
6372{
6373 return val;
6374}
6375
cbdd87d4
RG
6376/* Fold the statement pointed to by GSI. In some cases, this function may
6377 replace the whole statement with a new one. Returns true iff folding
6378 makes any changes.
6379 The statement pointed to by GSI should be in valid gimple form but may
6380 be in unfolded state as resulting from for example constant propagation
6381 which can produce *&x = 0. */
6382
6383bool
6384fold_stmt (gimple_stmt_iterator *gsi)
6385{
e0ee10ed
RB
6386 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6387}
6388
6389bool
6390fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6391{
6392 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
6393}
6394
59401b92 6395/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
6396 *&x created by constant propagation are handled. The statement cannot
6397 be replaced with a new one. Return true if the statement was
6398 changed, false otherwise.
59401b92 6399 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
6400 be in unfolded state as resulting from for example constant propagation
6401 which can produce *&x = 0. */
6402
6403bool
59401b92 6404fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 6405{
355fe088 6406 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 6407 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 6408 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
6409 return changed;
6410}
6411
e89065a1
SL
6412/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6413 if EXPR is null or we don't know how.
6414 If non-null, the result always has boolean type. */
6415
6416static tree
6417canonicalize_bool (tree expr, bool invert)
6418{
6419 if (!expr)
6420 return NULL_TREE;
6421 else if (invert)
6422 {
6423 if (integer_nonzerop (expr))
6424 return boolean_false_node;
6425 else if (integer_zerop (expr))
6426 return boolean_true_node;
6427 else if (TREE_CODE (expr) == SSA_NAME)
6428 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6429 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6430 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6431 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6432 boolean_type_node,
6433 TREE_OPERAND (expr, 0),
6434 TREE_OPERAND (expr, 1));
6435 else
6436 return NULL_TREE;
6437 }
6438 else
6439 {
6440 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6441 return expr;
6442 if (integer_nonzerop (expr))
6443 return boolean_true_node;
6444 else if (integer_zerop (expr))
6445 return boolean_false_node;
6446 else if (TREE_CODE (expr) == SSA_NAME)
6447 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6448 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6449 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6450 return fold_build2 (TREE_CODE (expr),
6451 boolean_type_node,
6452 TREE_OPERAND (expr, 0),
6453 TREE_OPERAND (expr, 1));
6454 else
6455 return NULL_TREE;
6456 }
6457}
6458
6459/* Check to see if a boolean expression EXPR is logically equivalent to the
6460 comparison (OP1 CODE OP2). Check for various identities involving
6461 SSA_NAMEs. */
6462
6463static bool
6464same_bool_comparison_p (const_tree expr, enum tree_code code,
6465 const_tree op1, const_tree op2)
6466{
355fe088 6467 gimple *s;
e89065a1
SL
6468
6469 /* The obvious case. */
6470 if (TREE_CODE (expr) == code
6471 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6472 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6473 return true;
6474
6475 /* Check for comparing (name, name != 0) and the case where expr
6476 is an SSA_NAME with a definition matching the comparison. */
6477 if (TREE_CODE (expr) == SSA_NAME
6478 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6479 {
6480 if (operand_equal_p (expr, op1, 0))
6481 return ((code == NE_EXPR && integer_zerop (op2))
6482 || (code == EQ_EXPR && integer_nonzerop (op2)));
6483 s = SSA_NAME_DEF_STMT (expr);
6484 if (is_gimple_assign (s)
6485 && gimple_assign_rhs_code (s) == code
6486 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6487 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6488 return true;
6489 }
6490
6491 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6492 of name is a comparison, recurse. */
6493 if (TREE_CODE (op1) == SSA_NAME
6494 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6495 {
6496 s = SSA_NAME_DEF_STMT (op1);
6497 if (is_gimple_assign (s)
6498 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6499 {
6500 enum tree_code c = gimple_assign_rhs_code (s);
6501 if ((c == NE_EXPR && integer_zerop (op2))
6502 || (c == EQ_EXPR && integer_nonzerop (op2)))
6503 return same_bool_comparison_p (expr, c,
6504 gimple_assign_rhs1 (s),
6505 gimple_assign_rhs2 (s));
6506 if ((c == EQ_EXPR && integer_zerop (op2))
6507 || (c == NE_EXPR && integer_nonzerop (op2)))
6508 return same_bool_comparison_p (expr,
6509 invert_tree_comparison (c, false),
6510 gimple_assign_rhs1 (s),
6511 gimple_assign_rhs2 (s));
6512 }
6513 }
6514 return false;
6515}
6516
6517/* Check to see if two boolean expressions OP1 and OP2 are logically
6518 equivalent. */
6519
6520static bool
6521same_bool_result_p (const_tree op1, const_tree op2)
6522{
6523 /* Simple cases first. */
6524 if (operand_equal_p (op1, op2, 0))
6525 return true;
6526
6527 /* Check the cases where at least one of the operands is a comparison.
6528 These are a bit smarter than operand_equal_p in that they apply some
6529 identifies on SSA_NAMEs. */
98209db3 6530 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
6531 && same_bool_comparison_p (op1, TREE_CODE (op2),
6532 TREE_OPERAND (op2, 0),
6533 TREE_OPERAND (op2, 1)))
6534 return true;
98209db3 6535 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
6536 && same_bool_comparison_p (op2, TREE_CODE (op1),
6537 TREE_OPERAND (op1, 0),
6538 TREE_OPERAND (op1, 1)))
6539 return true;
6540
6541 /* Default case. */
6542 return false;
6543}
6544
6545/* Forward declarations for some mutually recursive functions. */
6546
6547static tree
5f487a34 6548and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6549 enum tree_code code2, tree op2a, tree op2b);
6550static tree
5f487a34 6551and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6552 enum tree_code code2, tree op2a, tree op2b);
6553static tree
5f487a34 6554and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6555 enum tree_code code2, tree op2a, tree op2b);
6556static tree
5f487a34 6557or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6558 enum tree_code code2, tree op2a, tree op2b);
6559static tree
5f487a34 6560or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
6561 enum tree_code code2, tree op2a, tree op2b);
6562static tree
5f487a34 6563or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
6564 enum tree_code code2, tree op2a, tree op2b);
6565
6566/* Helper function for and_comparisons_1: try to simplify the AND of the
6567 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6568 If INVERT is true, invert the value of the VAR before doing the AND.
6569 Return NULL_EXPR if we can't simplify this to a single expression. */
6570
6571static tree
5f487a34 6572and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6573 enum tree_code code2, tree op2a, tree op2b)
6574{
6575 tree t;
355fe088 6576 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6577
6578 /* We can only deal with variables whose definitions are assignments. */
6579 if (!is_gimple_assign (stmt))
6580 return NULL_TREE;
6581
6582 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6583 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6584 Then we only have to consider the simpler non-inverted cases. */
6585 if (invert)
5f487a34 6586 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
6587 invert_tree_comparison (code2, false),
6588 op2a, op2b);
6589 else
5f487a34 6590 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6591 return canonicalize_bool (t, invert);
6592}
6593
6594/* Try to simplify the AND of the ssa variable defined by the assignment
6595 STMT with the comparison specified by (OP2A CODE2 OP2B).
6596 Return NULL_EXPR if we can't simplify this to a single expression. */
6597
6598static tree
5f487a34 6599and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6600 enum tree_code code2, tree op2a, tree op2b)
6601{
6602 tree var = gimple_assign_lhs (stmt);
6603 tree true_test_var = NULL_TREE;
6604 tree false_test_var = NULL_TREE;
6605 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6606
6607 /* Check for identities like (var AND (var == 0)) => false. */
6608 if (TREE_CODE (op2a) == SSA_NAME
6609 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6610 {
6611 if ((code2 == NE_EXPR && integer_zerop (op2b))
6612 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6613 {
6614 true_test_var = op2a;
6615 if (var == true_test_var)
6616 return var;
6617 }
6618 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6619 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6620 {
6621 false_test_var = op2a;
6622 if (var == false_test_var)
6623 return boolean_false_node;
6624 }
6625 }
6626
6627 /* If the definition is a comparison, recurse on it. */
6628 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6629 {
5f487a34 6630 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
6631 gimple_assign_rhs1 (stmt),
6632 gimple_assign_rhs2 (stmt),
6633 code2,
6634 op2a,
6635 op2b);
6636 if (t)
6637 return t;
6638 }
6639
6640 /* If the definition is an AND or OR expression, we may be able to
6641 simplify by reassociating. */
eb9820c0
KT
6642 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6643 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6644 {
6645 tree inner1 = gimple_assign_rhs1 (stmt);
6646 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6647 gimple *s;
e89065a1
SL
6648 tree t;
6649 tree partial = NULL_TREE;
eb9820c0 6650 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
6651
6652 /* Check for boolean identities that don't require recursive examination
6653 of inner1/inner2:
6654 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6655 inner1 AND (inner1 OR inner2) => inner1
6656 !inner1 AND (inner1 AND inner2) => false
6657 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6658 Likewise for similar cases involving inner2. */
6659 if (inner1 == true_test_var)
6660 return (is_and ? var : inner1);
6661 else if (inner2 == true_test_var)
6662 return (is_and ? var : inner2);
6663 else if (inner1 == false_test_var)
6664 return (is_and
6665 ? boolean_false_node
5f487a34
LJH
6666 : and_var_with_comparison (type, inner2, false, code2, op2a,
6667 op2b));
e89065a1
SL
6668 else if (inner2 == false_test_var)
6669 return (is_and
6670 ? boolean_false_node
5f487a34
LJH
6671 : and_var_with_comparison (type, inner1, false, code2, op2a,
6672 op2b));
e89065a1
SL
6673
6674 /* Next, redistribute/reassociate the AND across the inner tests.
6675 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6676 if (TREE_CODE (inner1) == SSA_NAME
6677 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6678 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6679 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6680 gimple_assign_rhs1 (s),
6681 gimple_assign_rhs2 (s),
6682 code2, op2a, op2b)))
6683 {
6684 /* Handle the AND case, where we are reassociating:
6685 (inner1 AND inner2) AND (op2a code2 op2b)
6686 => (t AND inner2)
6687 If the partial result t is a constant, we win. Otherwise
6688 continue on to try reassociating with the other inner test. */
6689 if (is_and)
6690 {
6691 if (integer_onep (t))
6692 return inner2;
6693 else if (integer_zerop (t))
6694 return boolean_false_node;
6695 }
6696
6697 /* Handle the OR case, where we are redistributing:
6698 (inner1 OR inner2) AND (op2a code2 op2b)
6699 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
6700 else if (integer_onep (t))
6701 return boolean_true_node;
6702
6703 /* Save partial result for later. */
6704 partial = t;
e89065a1
SL
6705 }
6706
6707 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6708 if (TREE_CODE (inner2) == SSA_NAME
6709 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6710 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6711 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6712 gimple_assign_rhs1 (s),
6713 gimple_assign_rhs2 (s),
6714 code2, op2a, op2b)))
6715 {
6716 /* Handle the AND case, where we are reassociating:
6717 (inner1 AND inner2) AND (op2a code2 op2b)
6718 => (inner1 AND t) */
6719 if (is_and)
6720 {
6721 if (integer_onep (t))
6722 return inner1;
6723 else if (integer_zerop (t))
6724 return boolean_false_node;
8236c8eb
JJ
6725 /* If both are the same, we can apply the identity
6726 (x AND x) == x. */
6727 else if (partial && same_bool_result_p (t, partial))
6728 return t;
e89065a1
SL
6729 }
6730
6731 /* Handle the OR case. where we are redistributing:
6732 (inner1 OR inner2) AND (op2a code2 op2b)
6733 => (t OR (inner1 AND (op2a code2 op2b)))
6734 => (t OR partial) */
6735 else
6736 {
6737 if (integer_onep (t))
6738 return boolean_true_node;
6739 else if (partial)
6740 {
6741 /* We already got a simplification for the other
6742 operand to the redistributed OR expression. The
6743 interesting case is when at least one is false.
6744 Or, if both are the same, we can apply the identity
6745 (x OR x) == x. */
6746 if (integer_zerop (partial))
6747 return t;
6748 else if (integer_zerop (t))
6749 return partial;
6750 else if (same_bool_result_p (t, partial))
6751 return t;
6752 }
6753 }
6754 }
6755 }
6756 return NULL_TREE;
6757}
6758
6759/* Try to simplify the AND of two comparisons defined by
6760 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6761 If this can be done without constructing an intermediate value,
6762 return the resulting tree; otherwise NULL_TREE is returned.
6763 This function is deliberately asymmetric as it recurses on SSA_DEFs
6764 in the first comparison but not the second. */
6765
6766static tree
5f487a34 6767and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6768 enum tree_code code2, tree op2a, tree op2b)
6769{
ae22ac3c 6770 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6771
e89065a1
SL
6772 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6773 if (operand_equal_p (op1a, op2a, 0)
6774 && operand_equal_p (op1b, op2b, 0))
6775 {
eb9820c0 6776 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6777 tree t = combine_comparisons (UNKNOWN_LOCATION,
6778 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 6779 truth_type, op1a, op1b);
e89065a1
SL
6780 if (t)
6781 return t;
6782 }
6783
6784 /* Likewise the swapped case of the above. */
6785 if (operand_equal_p (op1a, op2b, 0)
6786 && operand_equal_p (op1b, op2a, 0))
6787 {
eb9820c0 6788 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6789 tree t = combine_comparisons (UNKNOWN_LOCATION,
6790 TRUTH_ANDIF_EXPR, code1,
6791 swap_tree_comparison (code2),
31ed6226 6792 truth_type, op1a, op1b);
e89065a1
SL
6793 if (t)
6794 return t;
6795 }
6796
e89065a1
SL
6797 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6798 NAME's definition is a truth value. See if there are any simplifications
6799 that can be done against the NAME's definition. */
6800 if (TREE_CODE (op1a) == SSA_NAME
6801 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6802 && (integer_zerop (op1b) || integer_onep (op1b)))
6803 {
6804 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6805 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6806 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6807 switch (gimple_code (stmt))
6808 {
6809 case GIMPLE_ASSIGN:
6810 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6811 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6812 op2b);
e89065a1
SL
6813
6814 case GIMPLE_PHI:
6815 /* If every argument to the PHI produces the same result when
6816 ANDed with the second comparison, we win.
6817 Do not do this unless the type is bool since we need a bool
6818 result here anyway. */
6819 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6820 {
6821 tree result = NULL_TREE;
6822 unsigned i;
6823 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6824 {
6825 tree arg = gimple_phi_arg_def (stmt, i);
6826
6827 /* If this PHI has itself as an argument, ignore it.
6828 If all the other args produce the same result,
6829 we're still OK. */
6830 if (arg == gimple_phi_result (stmt))
6831 continue;
6832 else if (TREE_CODE (arg) == INTEGER_CST)
6833 {
6834 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6835 {
6836 if (!result)
6837 result = boolean_false_node;
6838 else if (!integer_zerop (result))
6839 return NULL_TREE;
6840 }
6841 else if (!result)
6842 result = fold_build2 (code2, boolean_type_node,
6843 op2a, op2b);
6844 else if (!same_bool_comparison_p (result,
6845 code2, op2a, op2b))
6846 return NULL_TREE;
6847 }
0e8b84ec
JJ
6848 else if (TREE_CODE (arg) == SSA_NAME
6849 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6850 {
6c66f733 6851 tree temp;
355fe088 6852 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6853 /* In simple cases we can look through PHI nodes,
6854 but we have to be careful with loops.
6855 See PR49073. */
6856 if (! dom_info_available_p (CDI_DOMINATORS)
6857 || gimple_bb (def_stmt) == gimple_bb (stmt)
6858 || dominated_by_p (CDI_DOMINATORS,
6859 gimple_bb (def_stmt),
6860 gimple_bb (stmt)))
6861 return NULL_TREE;
5f487a34 6862 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 6863 op2a, op2b);
e89065a1
SL
6864 if (!temp)
6865 return NULL_TREE;
6866 else if (!result)
6867 result = temp;
6868 else if (!same_bool_result_p (result, temp))
6869 return NULL_TREE;
6870 }
6871 else
6872 return NULL_TREE;
6873 }
6874 return result;
6875 }
6876
6877 default:
6878 break;
6879 }
6880 }
6881 return NULL_TREE;
6882}
6883
5f487a34
LJH
6884/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6885 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6886 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6887 simplify this to a single expression. As we are going to lower the cost
6888 of building SSA names / gimple stmts significantly, we need to allocate
6889 them ont the stack. This will cause the code to be a bit ugly. */
6890
6891static tree
6892maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6893 enum tree_code code1,
6894 tree op1a, tree op1b,
6895 enum tree_code code2, tree op2a,
6896 tree op2b)
6897{
6898 /* Allocate gimple stmt1 on the stack. */
6899 gassign *stmt1
6900 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6901 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6902 gimple_assign_set_rhs_code (stmt1, code1);
6903 gimple_assign_set_rhs1 (stmt1, op1a);
6904 gimple_assign_set_rhs2 (stmt1, op1b);
6905
6906 /* Allocate gimple stmt2 on the stack. */
6907 gassign *stmt2
6908 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6909 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6910 gimple_assign_set_rhs_code (stmt2, code2);
6911 gimple_assign_set_rhs1 (stmt2, op2a);
6912 gimple_assign_set_rhs2 (stmt2, op2b);
6913
6914 /* Allocate SSA names(lhs1) on the stack. */
6915 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6916 memset (lhs1, 0, sizeof (tree_ssa_name));
6917 TREE_SET_CODE (lhs1, SSA_NAME);
6918 TREE_TYPE (lhs1) = type;
6919 init_ssa_name_imm_use (lhs1);
6920
6921 /* Allocate SSA names(lhs2) on the stack. */
6922 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6923 memset (lhs2, 0, sizeof (tree_ssa_name));
6924 TREE_SET_CODE (lhs2, SSA_NAME);
6925 TREE_TYPE (lhs2) = type;
6926 init_ssa_name_imm_use (lhs2);
6927
6928 gimple_assign_set_lhs (stmt1, lhs1);
6929 gimple_assign_set_lhs (stmt2, lhs2);
6930
6931 gimple_match_op op (gimple_match_cond::UNCOND, code,
6932 type, gimple_assign_lhs (stmt1),
6933 gimple_assign_lhs (stmt2));
6934 if (op.resimplify (NULL, follow_all_ssa_edges))
6935 {
6936 if (gimple_simplified_result_is_gimple_val (&op))
6937 {
6938 tree res = op.ops[0];
6939 if (res == lhs1)
6940 return build2 (code1, type, op1a, op1b);
6941 else if (res == lhs2)
6942 return build2 (code2, type, op2a, op2b);
6943 else
6944 return res;
6945 }
ae9c3507
ML
6946 else if (op.code.is_tree_code ()
6947 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6948 {
6949 tree op0 = op.ops[0];
6950 tree op1 = op.ops[1];
6951 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6952 return NULL_TREE; /* not simple */
6953
6954 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6955 }
5f487a34
LJH
6956 }
6957
6958 return NULL_TREE;
6959}
6960
e89065a1
SL
6961/* Try to simplify the AND of two comparisons, specified by
6962 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6963 If this can be simplified to a single expression (without requiring
6964 introducing more SSA variables to hold intermediate values),
6965 return the resulting tree. Otherwise return NULL_TREE.
6966 If the result expression is non-null, it has boolean type. */
6967
6968tree
5f487a34
LJH
6969maybe_fold_and_comparisons (tree type,
6970 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6971 enum tree_code code2, tree op2a, tree op2b)
6972{
5f487a34 6973 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6974 return t;
5f487a34
LJH
6975
6976 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6977 return t;
6978
6979 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6980 op1a, op1b, code2, op2a,
6981 op2b))
6982 return t;
6983
6984 return NULL_TREE;
e89065a1
SL
6985}
6986
6987/* Helper function for or_comparisons_1: try to simplify the OR of the
6988 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6989 If INVERT is true, invert the value of VAR before doing the OR.
6990 Return NULL_EXPR if we can't simplify this to a single expression. */
6991
6992static tree
5f487a34 6993or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6994 enum tree_code code2, tree op2a, tree op2b)
6995{
6996 tree t;
355fe088 6997 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6998
6999 /* We can only deal with variables whose definitions are assignments. */
7000 if (!is_gimple_assign (stmt))
7001 return NULL_TREE;
7002
7003 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7004 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7005 Then we only have to consider the simpler non-inverted cases. */
7006 if (invert)
5f487a34 7007 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
7008 invert_tree_comparison (code2, false),
7009 op2a, op2b);
7010 else
5f487a34 7011 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
7012 return canonicalize_bool (t, invert);
7013}
7014
7015/* Try to simplify the OR of the ssa variable defined by the assignment
7016 STMT with the comparison specified by (OP2A CODE2 OP2B).
7017 Return NULL_EXPR if we can't simplify this to a single expression. */
7018
7019static tree
5f487a34 7020or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
7021 enum tree_code code2, tree op2a, tree op2b)
7022{
7023 tree var = gimple_assign_lhs (stmt);
7024 tree true_test_var = NULL_TREE;
7025 tree false_test_var = NULL_TREE;
7026 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7027
7028 /* Check for identities like (var OR (var != 0)) => true . */
7029 if (TREE_CODE (op2a) == SSA_NAME
7030 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7031 {
7032 if ((code2 == NE_EXPR && integer_zerop (op2b))
7033 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7034 {
7035 true_test_var = op2a;
7036 if (var == true_test_var)
7037 return var;
7038 }
7039 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7040 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7041 {
7042 false_test_var = op2a;
7043 if (var == false_test_var)
7044 return boolean_true_node;
7045 }
7046 }
7047
7048 /* If the definition is a comparison, recurse on it. */
7049 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7050 {
5f487a34 7051 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
7052 gimple_assign_rhs1 (stmt),
7053 gimple_assign_rhs2 (stmt),
7054 code2,
7055 op2a,
7056 op2b);
7057 if (t)
7058 return t;
7059 }
7060
7061 /* If the definition is an AND or OR expression, we may be able to
7062 simplify by reassociating. */
eb9820c0
KT
7063 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7064 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
7065 {
7066 tree inner1 = gimple_assign_rhs1 (stmt);
7067 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 7068 gimple *s;
e89065a1
SL
7069 tree t;
7070 tree partial = NULL_TREE;
eb9820c0 7071 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
7072
7073 /* Check for boolean identities that don't require recursive examination
7074 of inner1/inner2:
7075 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7076 inner1 OR (inner1 AND inner2) => inner1
7077 !inner1 OR (inner1 OR inner2) => true
7078 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7079 */
7080 if (inner1 == true_test_var)
7081 return (is_or ? var : inner1);
7082 else if (inner2 == true_test_var)
7083 return (is_or ? var : inner2);
7084 else if (inner1 == false_test_var)
7085 return (is_or
7086 ? boolean_true_node
5f487a34
LJH
7087 : or_var_with_comparison (type, inner2, false, code2, op2a,
7088 op2b));
e89065a1
SL
7089 else if (inner2 == false_test_var)
7090 return (is_or
7091 ? boolean_true_node
5f487a34
LJH
7092 : or_var_with_comparison (type, inner1, false, code2, op2a,
7093 op2b));
e89065a1
SL
7094
7095 /* Next, redistribute/reassociate the OR across the inner tests.
7096 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7097 if (TREE_CODE (inner1) == SSA_NAME
7098 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7099 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7100 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7101 gimple_assign_rhs1 (s),
7102 gimple_assign_rhs2 (s),
7103 code2, op2a, op2b)))
7104 {
7105 /* Handle the OR case, where we are reassociating:
7106 (inner1 OR inner2) OR (op2a code2 op2b)
7107 => (t OR inner2)
7108 If the partial result t is a constant, we win. Otherwise
7109 continue on to try reassociating with the other inner test. */
8236c8eb 7110 if (is_or)
e89065a1
SL
7111 {
7112 if (integer_onep (t))
7113 return boolean_true_node;
7114 else if (integer_zerop (t))
7115 return inner2;
7116 }
7117
7118 /* Handle the AND case, where we are redistributing:
7119 (inner1 AND inner2) OR (op2a code2 op2b)
7120 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
7121 else if (integer_zerop (t))
7122 return boolean_false_node;
7123
7124 /* Save partial result for later. */
7125 partial = t;
e89065a1
SL
7126 }
7127
7128 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7129 if (TREE_CODE (inner2) == SSA_NAME
7130 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7131 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 7132 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
7133 gimple_assign_rhs1 (s),
7134 gimple_assign_rhs2 (s),
7135 code2, op2a, op2b)))
7136 {
7137 /* Handle the OR case, where we are reassociating:
7138 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
7139 => (inner1 OR t)
7140 => (t OR partial) */
7141 if (is_or)
e89065a1
SL
7142 {
7143 if (integer_zerop (t))
7144 return inner1;
7145 else if (integer_onep (t))
7146 return boolean_true_node;
8236c8eb
JJ
7147 /* If both are the same, we can apply the identity
7148 (x OR x) == x. */
7149 else if (partial && same_bool_result_p (t, partial))
7150 return t;
e89065a1
SL
7151 }
7152
7153 /* Handle the AND case, where we are redistributing:
7154 (inner1 AND inner2) OR (op2a code2 op2b)
7155 => (t AND (inner1 OR (op2a code2 op2b)))
7156 => (t AND partial) */
7157 else
7158 {
7159 if (integer_zerop (t))
7160 return boolean_false_node;
7161 else if (partial)
7162 {
7163 /* We already got a simplification for the other
7164 operand to the redistributed AND expression. The
7165 interesting case is when at least one is true.
7166 Or, if both are the same, we can apply the identity
8236c8eb 7167 (x AND x) == x. */
e89065a1
SL
7168 if (integer_onep (partial))
7169 return t;
7170 else if (integer_onep (t))
7171 return partial;
7172 else if (same_bool_result_p (t, partial))
8236c8eb 7173 return t;
e89065a1
SL
7174 }
7175 }
7176 }
7177 }
7178 return NULL_TREE;
7179}
7180
7181/* Try to simplify the OR of two comparisons defined by
7182 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7183 If this can be done without constructing an intermediate value,
7184 return the resulting tree; otherwise NULL_TREE is returned.
7185 This function is deliberately asymmetric as it recurses on SSA_DEFs
7186 in the first comparison but not the second. */
7187
7188static tree
5f487a34 7189or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7190 enum tree_code code2, tree op2a, tree op2b)
7191{
ae22ac3c 7192 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 7193
e89065a1
SL
7194 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7195 if (operand_equal_p (op1a, op2a, 0)
7196 && operand_equal_p (op1b, op2b, 0))
7197 {
eb9820c0 7198 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7199 tree t = combine_comparisons (UNKNOWN_LOCATION,
7200 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 7201 truth_type, op1a, op1b);
e89065a1
SL
7202 if (t)
7203 return t;
7204 }
7205
7206 /* Likewise the swapped case of the above. */
7207 if (operand_equal_p (op1a, op2b, 0)
7208 && operand_equal_p (op1b, op2a, 0))
7209 {
eb9820c0 7210 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7211 tree t = combine_comparisons (UNKNOWN_LOCATION,
7212 TRUTH_ORIF_EXPR, code1,
7213 swap_tree_comparison (code2),
31ed6226 7214 truth_type, op1a, op1b);
e89065a1
SL
7215 if (t)
7216 return t;
7217 }
7218
e89065a1
SL
7219 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7220 NAME's definition is a truth value. See if there are any simplifications
7221 that can be done against the NAME's definition. */
7222 if (TREE_CODE (op1a) == SSA_NAME
7223 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7224 && (integer_zerop (op1b) || integer_onep (op1b)))
7225 {
7226 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7227 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 7228 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
7229 switch (gimple_code (stmt))
7230 {
7231 case GIMPLE_ASSIGN:
7232 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
7233 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7234 op2b);
e89065a1
SL
7235
7236 case GIMPLE_PHI:
7237 /* If every argument to the PHI produces the same result when
7238 ORed with the second comparison, we win.
7239 Do not do this unless the type is bool since we need a bool
7240 result here anyway. */
7241 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7242 {
7243 tree result = NULL_TREE;
7244 unsigned i;
7245 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7246 {
7247 tree arg = gimple_phi_arg_def (stmt, i);
7248
7249 /* If this PHI has itself as an argument, ignore it.
7250 If all the other args produce the same result,
7251 we're still OK. */
7252 if (arg == gimple_phi_result (stmt))
7253 continue;
7254 else if (TREE_CODE (arg) == INTEGER_CST)
7255 {
7256 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7257 {
7258 if (!result)
7259 result = boolean_true_node;
7260 else if (!integer_onep (result))
7261 return NULL_TREE;
7262 }
7263 else if (!result)
7264 result = fold_build2 (code2, boolean_type_node,
7265 op2a, op2b);
7266 else if (!same_bool_comparison_p (result,
7267 code2, op2a, op2b))
7268 return NULL_TREE;
7269 }
0e8b84ec
JJ
7270 else if (TREE_CODE (arg) == SSA_NAME
7271 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 7272 {
6c66f733 7273 tree temp;
355fe088 7274 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
7275 /* In simple cases we can look through PHI nodes,
7276 but we have to be careful with loops.
7277 See PR49073. */
7278 if (! dom_info_available_p (CDI_DOMINATORS)
7279 || gimple_bb (def_stmt) == gimple_bb (stmt)
7280 || dominated_by_p (CDI_DOMINATORS,
7281 gimple_bb (def_stmt),
7282 gimple_bb (stmt)))
7283 return NULL_TREE;
5f487a34 7284 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 7285 op2a, op2b);
e89065a1
SL
7286 if (!temp)
7287 return NULL_TREE;
7288 else if (!result)
7289 result = temp;
7290 else if (!same_bool_result_p (result, temp))
7291 return NULL_TREE;
7292 }
7293 else
7294 return NULL_TREE;
7295 }
7296 return result;
7297 }
7298
7299 default:
7300 break;
7301 }
7302 }
7303 return NULL_TREE;
7304}
7305
7306/* Try to simplify the OR of two comparisons, specified by
7307 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7308 If this can be simplified to a single expression (without requiring
7309 introducing more SSA variables to hold intermediate values),
7310 return the resulting tree. Otherwise return NULL_TREE.
7311 If the result expression is non-null, it has boolean type. */
7312
7313tree
5f487a34
LJH
7314maybe_fold_or_comparisons (tree type,
7315 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7316 enum tree_code code2, tree op2a, tree op2b)
7317{
5f487a34 7318 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 7319 return t;
cfef45c8 7320
5f487a34
LJH
7321 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7322 return t;
7323
7324 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7325 op1a, op1b, code2, op2a,
7326 op2b))
7327 return t;
7328
7329 return NULL_TREE;
7330}
cfef45c8
RG
7331
7332/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7333
7334 Either NULL_TREE, a simplified but non-constant or a constant
7335 is returned.
7336
7337 ??? This should go into a gimple-fold-inline.h file to be eventually
7338 privatized with the single valueize function used in the various TUs
7339 to avoid the indirect function call overhead. */
7340
7341tree
355fe088 7342gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 7343 tree (*gvalueize) (tree))
cfef45c8 7344{
5d75ad95 7345 gimple_match_op res_op;
45cc9f96
RB
7346 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7347 edges if there are intermediate VARYING defs. For this reason
7348 do not follow SSA edges here even though SCCVN can technically
7349 just deal fine with that. */
5d75ad95 7350 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 7351 {
34050b6b 7352 tree res = NULL_TREE;
5d75ad95
RS
7353 if (gimple_simplified_result_is_gimple_val (&res_op))
7354 res = res_op.ops[0];
34050b6b 7355 else if (mprts_hook)
5d75ad95 7356 res = mprts_hook (&res_op);
34050b6b 7357 if (res)
45cc9f96 7358 {
34050b6b
RB
7359 if (dump_file && dump_flags & TDF_DETAILS)
7360 {
7361 fprintf (dump_file, "Match-and-simplified ");
7362 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7363 fprintf (dump_file, " to ");
ef6cb4c7 7364 print_generic_expr (dump_file, res);
34050b6b
RB
7365 fprintf (dump_file, "\n");
7366 }
7367 return res;
45cc9f96 7368 }
45cc9f96
RB
7369 }
7370
cfef45c8
RG
7371 location_t loc = gimple_location (stmt);
7372 switch (gimple_code (stmt))
7373 {
7374 case GIMPLE_ASSIGN:
7375 {
7376 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7377
7378 switch (get_gimple_rhs_class (subcode))
7379 {
7380 case GIMPLE_SINGLE_RHS:
7381 {
7382 tree rhs = gimple_assign_rhs1 (stmt);
7383 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7384
7385 if (TREE_CODE (rhs) == SSA_NAME)
7386 {
7387 /* If the RHS is an SSA_NAME, return its known constant value,
7388 if any. */
7389 return (*valueize) (rhs);
7390 }
7391 /* Handle propagating invariant addresses into address
7392 operations. */
7393 else if (TREE_CODE (rhs) == ADDR_EXPR
7394 && !is_gimple_min_invariant (rhs))
7395 {
a90c8804 7396 poly_int64 offset = 0;
cfef45c8
RG
7397 tree base;
7398 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7399 &offset,
7400 valueize);
7401 if (base
7402 && (CONSTANT_CLASS_P (base)
7403 || decl_address_invariant_p (base)))
7404 return build_invariant_address (TREE_TYPE (rhs),
7405 base, offset);
7406 }
7407 else if (TREE_CODE (rhs) == CONSTRUCTOR
7408 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
7409 && known_eq (CONSTRUCTOR_NELTS (rhs),
7410 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 7411 {
794e3180
RS
7412 unsigned i, nelts;
7413 tree val;
cfef45c8 7414
928686b1 7415 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 7416 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
7417 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7418 {
7419 val = (*valueize) (val);
7420 if (TREE_CODE (val) == INTEGER_CST
7421 || TREE_CODE (val) == REAL_CST
7422 || TREE_CODE (val) == FIXED_CST)
794e3180 7423 vec.quick_push (val);
cfef45c8
RG
7424 else
7425 return NULL_TREE;
7426 }
7427
5ebaa477 7428 return vec.build ();
cfef45c8 7429 }
bdf37f7a
JH
7430 if (subcode == OBJ_TYPE_REF)
7431 {
7432 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7433 /* If callee is constant, we can fold away the wrapper. */
7434 if (is_gimple_min_invariant (val))
7435 return val;
7436 }
cfef45c8
RG
7437
7438 if (kind == tcc_reference)
7439 {
7440 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7441 || TREE_CODE (rhs) == REALPART_EXPR
7442 || TREE_CODE (rhs) == IMAGPART_EXPR)
7443 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7444 {
7445 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7446 return fold_unary_loc (EXPR_LOCATION (rhs),
7447 TREE_CODE (rhs),
7448 TREE_TYPE (rhs), val);
7449 }
7450 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7451 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7452 {
7453 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7454 return fold_ternary_loc (EXPR_LOCATION (rhs),
7455 TREE_CODE (rhs),
7456 TREE_TYPE (rhs), val,
7457 TREE_OPERAND (rhs, 1),
7458 TREE_OPERAND (rhs, 2));
7459 }
7460 else if (TREE_CODE (rhs) == MEM_REF
7461 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7462 {
7463 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7464 if (TREE_CODE (val) == ADDR_EXPR
7465 && is_gimple_min_invariant (val))
7466 {
7467 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7468 unshare_expr (val),
7469 TREE_OPERAND (rhs, 1));
7470 if (tem)
7471 rhs = tem;
7472 }
7473 }
7474 return fold_const_aggregate_ref_1 (rhs, valueize);
7475 }
7476 else if (kind == tcc_declaration)
7477 return get_symbol_constant_value (rhs);
7478 return rhs;
7479 }
7480
7481 case GIMPLE_UNARY_RHS:
f3582e54 7482 return NULL_TREE;
cfef45c8
RG
7483
7484 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
7485 /* Translate &x + CST into an invariant form suitable for
7486 further propagation. */
7487 if (subcode == POINTER_PLUS_EXPR)
7488 {
4b1b9e64
RB
7489 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7490 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
7491 if (TREE_CODE (op0) == ADDR_EXPR
7492 && TREE_CODE (op1) == INTEGER_CST)
7493 {
7494 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
7495 return build1_loc
7496 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
7497 fold_build2 (MEM_REF,
7498 TREE_TYPE (TREE_TYPE (op0)),
7499 unshare_expr (op0), off));
7500 }
7501 }
59c20dc7
RB
7502 /* Canonicalize bool != 0 and bool == 0 appearing after
7503 valueization. While gimple_simplify handles this
7504 it can get confused by the ~X == 1 -> X == 0 transform
7505 which we cant reduce to a SSA name or a constant
7506 (and we have no way to tell gimple_simplify to not
7507 consider those transforms in the first place). */
7508 else if (subcode == EQ_EXPR
7509 || subcode == NE_EXPR)
7510 {
7511 tree lhs = gimple_assign_lhs (stmt);
7512 tree op0 = gimple_assign_rhs1 (stmt);
7513 if (useless_type_conversion_p (TREE_TYPE (lhs),
7514 TREE_TYPE (op0)))
7515 {
7516 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7517 op0 = (*valueize) (op0);
8861704d
RB
7518 if (TREE_CODE (op0) == INTEGER_CST)
7519 std::swap (op0, op1);
7520 if (TREE_CODE (op1) == INTEGER_CST
7521 && ((subcode == NE_EXPR && integer_zerop (op1))
7522 || (subcode == EQ_EXPR && integer_onep (op1))))
7523 return op0;
59c20dc7
RB
7524 }
7525 }
4b1b9e64 7526 return NULL_TREE;
cfef45c8
RG
7527
7528 case GIMPLE_TERNARY_RHS:
7529 {
7530 /* Handle ternary operators that can appear in GIMPLE form. */
7531 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7532 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7533 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8 7534 return fold_ternary_loc (loc, subcode,
ce777eae
RB
7535 TREE_TYPE (gimple_assign_lhs (stmt)),
7536 op0, op1, op2);
cfef45c8
RG
7537 }
7538
7539 default:
7540 gcc_unreachable ();
7541 }
7542 }
7543
7544 case GIMPLE_CALL:
7545 {
25583c4f 7546 tree fn;
538dd0b7 7547 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
7548
7549 if (gimple_call_internal_p (stmt))
31e071ae
MP
7550 {
7551 enum tree_code subcode = ERROR_MARK;
7552 switch (gimple_call_internal_fn (stmt))
7553 {
7554 case IFN_UBSAN_CHECK_ADD:
7555 subcode = PLUS_EXPR;
7556 break;
7557 case IFN_UBSAN_CHECK_SUB:
7558 subcode = MINUS_EXPR;
7559 break;
7560 case IFN_UBSAN_CHECK_MUL:
7561 subcode = MULT_EXPR;
7562 break;
68fa96d6
ML
7563 case IFN_BUILTIN_EXPECT:
7564 {
7565 tree arg0 = gimple_call_arg (stmt, 0);
7566 tree op0 = (*valueize) (arg0);
7567 if (TREE_CODE (op0) == INTEGER_CST)
7568 return op0;
7569 return NULL_TREE;
7570 }
31e071ae
MP
7571 default:
7572 return NULL_TREE;
7573 }
368b454d
JJ
7574 tree arg0 = gimple_call_arg (stmt, 0);
7575 tree arg1 = gimple_call_arg (stmt, 1);
7576 tree op0 = (*valueize) (arg0);
7577 tree op1 = (*valueize) (arg1);
31e071ae
MP
7578
7579 if (TREE_CODE (op0) != INTEGER_CST
7580 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
7581 {
7582 switch (subcode)
7583 {
7584 case MULT_EXPR:
7585 /* x * 0 = 0 * x = 0 without overflow. */
7586 if (integer_zerop (op0) || integer_zerop (op1))
7587 return build_zero_cst (TREE_TYPE (arg0));
7588 break;
7589 case MINUS_EXPR:
7590 /* y - y = 0 without overflow. */
7591 if (operand_equal_p (op0, op1, 0))
7592 return build_zero_cst (TREE_TYPE (arg0));
7593 break;
7594 default:
7595 break;
7596 }
7597 }
7598 tree res
7599 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
7600 if (res
7601 && TREE_CODE (res) == INTEGER_CST
7602 && !TREE_OVERFLOW (res))
7603 return res;
7604 return NULL_TREE;
7605 }
25583c4f
RS
7606
7607 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 7608 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 7609 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 7610 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
7611 && gimple_builtin_call_types_compatible_p (stmt,
7612 TREE_OPERAND (fn, 0)))
cfef45c8
RG
7613 {
7614 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 7615 tree retval;
cfef45c8
RG
7616 unsigned i;
7617 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7618 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 7619 retval = fold_builtin_call_array (loc,
538dd0b7 7620 gimple_call_return_type (call_stmt),
cfef45c8 7621 fn, gimple_call_num_args (stmt), args);
cfef45c8 7622 if (retval)
5c944c6c
RB
7623 {
7624 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7625 STRIP_NOPS (retval);
538dd0b7
DM
7626 retval = fold_convert (gimple_call_return_type (call_stmt),
7627 retval);
5c944c6c 7628 }
cfef45c8
RG
7629 return retval;
7630 }
7631 return NULL_TREE;
7632 }
7633
7634 default:
7635 return NULL_TREE;
7636 }
7637}
7638
7639/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7640 Returns NULL_TREE if folding to a constant is not possible, otherwise
7641 returns a constant according to is_gimple_min_invariant. */
7642
7643tree
355fe088 7644gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
7645{
7646 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7647 if (res && is_gimple_min_invariant (res))
7648 return res;
7649 return NULL_TREE;
7650}
7651
7652
7653/* The following set of functions are supposed to fold references using
7654 their constant initializers. */
7655
cfef45c8
RG
7656/* See if we can find constructor defining value of BASE.
7657 When we know the consructor with constant offset (such as
7658 base is array[40] and we do know constructor of array), then
7659 BIT_OFFSET is adjusted accordingly.
7660
7661 As a special case, return error_mark_node when constructor
7662 is not explicitly available, but it is known to be zero
7663 such as 'static const int a;'. */
7664static tree
588db50c 7665get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
7666 tree (*valueize)(tree))
7667{
588db50c 7668 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
7669 bool reverse;
7670
cfef45c8
RG
7671 if (TREE_CODE (base) == MEM_REF)
7672 {
6a5aca53
ML
7673 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7674 if (!boff.to_shwi (bit_offset))
7675 return NULL_TREE;
cfef45c8
RG
7676
7677 if (valueize
7678 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7679 base = valueize (TREE_OPERAND (base, 0));
7680 if (!base || TREE_CODE (base) != ADDR_EXPR)
7681 return NULL_TREE;
7682 base = TREE_OPERAND (base, 0);
7683 }
13e88953
RB
7684 else if (valueize
7685 && TREE_CODE (base) == SSA_NAME)
7686 base = valueize (base);
cfef45c8
RG
7687
7688 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7689 DECL_INITIAL. If BASE is a nested reference into another
7690 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7691 the inner reference. */
7692 switch (TREE_CODE (base))
7693 {
7694 case VAR_DECL:
cfef45c8 7695 case CONST_DECL:
6a6dac52
JH
7696 {
7697 tree init = ctor_for_folding (base);
7698
688010ba 7699 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
7700 NULL means unknown, while error_mark_node is 0. */
7701 if (init == error_mark_node)
7702 return NULL_TREE;
7703 if (!init)
7704 return error_mark_node;
7705 return init;
7706 }
cfef45c8 7707
13e88953
RB
7708 case VIEW_CONVERT_EXPR:
7709 return get_base_constructor (TREE_OPERAND (base, 0),
7710 bit_offset, valueize);
7711
cfef45c8
RG
7712 case ARRAY_REF:
7713 case COMPONENT_REF:
ee45a32d
EB
7714 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7715 &reverse);
588db50c 7716 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
7717 return NULL_TREE;
7718 *bit_offset += bit_offset2;
7719 return get_base_constructor (base, bit_offset, valueize);
7720
cfef45c8
RG
7721 case CONSTRUCTOR:
7722 return base;
7723
7724 default:
13e88953
RB
7725 if (CONSTANT_CLASS_P (base))
7726 return base;
7727
cfef45c8
RG
7728 return NULL_TREE;
7729 }
7730}
7731
35b4d3a6
MS
7732/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7733 to the memory at bit OFFSET. When non-null, TYPE is the expected
7734 type of the reference; otherwise the type of the referenced element
7735 is used instead. When SIZE is zero, attempt to fold a reference to
7736 the entire element which OFFSET refers to. Increment *SUBOFF by
7737 the bit offset of the accessed element. */
cfef45c8
RG
7738
7739static tree
7740fold_array_ctor_reference (tree type, tree ctor,
7741 unsigned HOST_WIDE_INT offset,
c44c2088 7742 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7743 tree from_decl,
7744 unsigned HOST_WIDE_INT *suboff)
cfef45c8 7745{
807e902e
KZ
7746 offset_int low_bound;
7747 offset_int elt_size;
807e902e 7748 offset_int access_index;
6a636014 7749 tree domain_type = NULL_TREE;
cfef45c8
RG
7750 HOST_WIDE_INT inner_offset;
7751
7752 /* Compute low bound and elt size. */
eb8f1123
RG
7753 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7754 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
7755 if (domain_type && TYPE_MIN_VALUE (domain_type))
7756 {
6aa238a1 7757 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7758 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7759 return NULL_TREE;
807e902e 7760 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
7761 }
7762 else
807e902e 7763 low_bound = 0;
6aa238a1 7764 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7765 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7766 return NULL_TREE;
807e902e 7767 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 7768
35b4d3a6 7769 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 7770 access of a multiple of the array element size. Avoid division
6aa238a1
MS
7771 by zero below when ELT_SIZE is zero, such as with the result of
7772 an initializer for a zero-length array or an empty struct. */
7773 if (elt_size == 0
7774 || (type
7775 && (!TYPE_SIZE_UNIT (type)
831e688a 7776 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
7777 return NULL_TREE;
7778
7779 /* Compute the array index we look for. */
807e902e
KZ
7780 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7781 elt_size);
27bcd47c 7782 access_index += low_bound;
cfef45c8
RG
7783
7784 /* And offset within the access. */
27bcd47c 7785 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 7786
3c076c96
JJ
7787 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7788 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
7789 {
7790 /* native_encode_expr constraints. */
7791 if (size > MAX_BITSIZE_MODE_ANY_MODE
7792 || size % BITS_PER_UNIT != 0
3c076c96
JJ
7793 || inner_offset % BITS_PER_UNIT != 0
7794 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
7795 return NULL_TREE;
7796
7797 unsigned ctor_idx;
7798 tree val = get_array_ctor_element_at_index (ctor, access_index,
7799 &ctor_idx);
7800 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7801 return build_zero_cst (type);
7802
7803 /* native-encode adjacent ctor elements. */
7804 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7805 unsigned bufoff = 0;
7806 offset_int index = 0;
7807 offset_int max_index = access_index;
7808 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7809 if (!val)
7810 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7811 else if (!CONSTANT_CLASS_P (val))
7812 return NULL_TREE;
7813 if (!elt->index)
7814 ;
7815 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7816 {
7817 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7818 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7819 }
7820 else
7821 index = max_index = wi::to_offset (elt->index);
7822 index = wi::umax (index, access_index);
7823 do
7824 {
3c076c96
JJ
7825 if (bufoff + elt_sz > sizeof (buf))
7826 elt_sz = sizeof (buf) - bufoff;
7827 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 7828 inner_offset / BITS_PER_UNIT);
3c076c96 7829 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
7830 return NULL_TREE;
7831 inner_offset = 0;
7832 bufoff += len;
7833
7834 access_index += 1;
7835 if (wi::cmpu (access_index, index) == 0)
7836 val = elt->value;
7837 else if (wi::cmpu (access_index, max_index) > 0)
7838 {
7839 ctor_idx++;
7840 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7841 {
7842 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7843 ++max_index;
7844 }
7845 else
7846 {
7847 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7848 index = 0;
7849 max_index = access_index;
7850 if (!elt->index)
7851 ;
7852 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7853 {
7854 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7855 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7856 }
7857 else
7858 index = max_index = wi::to_offset (elt->index);
7859 index = wi::umax (index, access_index);
7860 if (wi::cmpu (access_index, index) == 0)
7861 val = elt->value;
7862 else
7863 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7864 }
7865 }
7866 }
7867 while (bufoff < size / BITS_PER_UNIT);
7868 *suboff += size;
7869 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7870 }
7871
6a636014 7872 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
7873 {
7874 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7875 {
7876 /* For the final reference to the entire accessed element
7877 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7878 may be null) in favor of the type of the element, and set
7879 SIZE to the size of the accessed element. */
7880 inner_offset = 0;
7881 type = TREE_TYPE (val);
6e41c27b 7882 size = elt_sz * BITS_PER_UNIT;
35b4d3a6 7883 }
6e41c27b
RB
7884 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7885 && TREE_CODE (val) == CONSTRUCTOR
7886 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7887 /* If this isn't the last element in the CTOR and a CTOR itself
7888 and it does not cover the whole object we are requesting give up
7889 since we're not set up for combining from multiple CTORs. */
7890 return NULL_TREE;
35b4d3a6 7891
6e41c27b 7892 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
35b4d3a6
MS
7893 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7894 suboff);
7895 }
cfef45c8 7896
35b4d3a6
MS
7897 /* Memory not explicitly mentioned in constructor is 0 (or
7898 the reference is out of range). */
7899 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
7900}
7901
35b4d3a6
MS
7902/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7903 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7904 is the expected type of the reference; otherwise the type of
7905 the referenced member is used instead. When SIZE is zero,
7906 attempt to fold a reference to the entire member which OFFSET
7907 refers to; in this case. Increment *SUBOFF by the bit offset
7908 of the accessed member. */
cfef45c8
RG
7909
7910static tree
7911fold_nonarray_ctor_reference (tree type, tree ctor,
7912 unsigned HOST_WIDE_INT offset,
c44c2088 7913 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7914 tree from_decl,
7915 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
7916{
7917 unsigned HOST_WIDE_INT cnt;
7918 tree cfield, cval;
7919
7920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7921 cval)
7922 {
7923 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7924 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7925 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
7926
7927 if (!field_size)
7928 {
7929 /* Determine the size of the flexible array member from
7930 the size of the initializer provided for it. */
7931 field_size = TYPE_SIZE (TREE_TYPE (cval));
7932 }
cfef45c8
RG
7933
7934 /* Variable sized objects in static constructors makes no sense,
7935 but field_size can be NULL for flexible array members. */
7936 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7937 && TREE_CODE (byte_offset) == INTEGER_CST
7938 && (field_size != NULL_TREE
7939 ? TREE_CODE (field_size) == INTEGER_CST
7940 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7941
7942 /* Compute bit offset of the field. */
35b4d3a6
MS
7943 offset_int bitoffset
7944 = (wi::to_offset (field_offset)
7945 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 7946 /* Compute bit offset where the field ends. */
35b4d3a6 7947 offset_int bitoffset_end;
cfef45c8 7948 if (field_size != NULL_TREE)
807e902e 7949 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 7950 else
807e902e 7951 bitoffset_end = 0;
cfef45c8 7952
35b4d3a6
MS
7953 /* Compute the bit offset of the end of the desired access.
7954 As a special case, if the size of the desired access is
7955 zero, assume the access is to the entire field (and let
7956 the caller make any necessary adjustments by storing
7957 the actual bounds of the field in FIELDBOUNDS). */
7958 offset_int access_end = offset_int (offset);
7959 if (size)
7960 access_end += size;
7961 else
7962 access_end = bitoffset_end;
b8b2b009 7963
35b4d3a6
MS
7964 /* Is there any overlap between the desired access at
7965 [OFFSET, OFFSET+SIZE) and the offset of the field within
7966 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 7967 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 7968 && (field_size == NULL_TREE
807e902e 7969 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 7970 {
35b4d3a6
MS
7971 *suboff += bitoffset.to_uhwi ();
7972
7973 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7974 {
7975 /* For the final reference to the entire accessed member
7976 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7977 be null) in favor of the type of the member, and set
7978 SIZE to the size of the accessed member. */
7979 offset = bitoffset.to_uhwi ();
7980 type = TREE_TYPE (cval);
7981 size = (bitoffset_end - bitoffset).to_uhwi ();
7982 }
7983
7984 /* We do have overlap. Now see if the field is large enough
7985 to cover the access. Give up for accesses that extend
7986 beyond the end of the object or that span multiple fields. */
807e902e 7987 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 7988 return NULL_TREE;
032c80e9 7989 if (offset < bitoffset)
b8b2b009 7990 return NULL_TREE;
35b4d3a6
MS
7991
7992 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 7993 return fold_ctor_reference (type, cval,
27bcd47c 7994 inner_offset.to_uhwi (), size,
35b4d3a6 7995 from_decl, suboff);
cfef45c8
RG
7996 }
7997 }
14b7950f
MS
7998
7999 if (!type)
8000 return NULL_TREE;
8001
8002 return build_zero_cst (type);
cfef45c8
RG
8003}
8004
35b4d3a6 8005/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 8006 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
8007 is zero, attempt to fold a reference to the entire subobject
8008 which OFFSET refers to. This is used when folding accesses to
8009 string members of aggregates. When non-null, set *SUBOFF to
8010 the bit offset of the accessed subobject. */
cfef45c8 8011
8403c2cf 8012tree
35b4d3a6
MS
8013fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8014 const poly_uint64 &poly_size, tree from_decl,
8015 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
8016{
8017 tree ret;
8018
8019 /* We found the field with exact match. */
35b4d3a6
MS
8020 if (type
8021 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 8022 && known_eq (poly_offset, 0U))
9d60be38 8023 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8024
30acf282
RS
8025 /* The remaining optimizations need a constant size and offset. */
8026 unsigned HOST_WIDE_INT size, offset;
8027 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8028 return NULL_TREE;
8029
cfef45c8
RG
8030 /* We are at the end of walk, see if we can view convert the
8031 result. */
8032 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8033 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
8034 && !compare_tree_int (TYPE_SIZE (type), size)
8035 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 8036 {
9d60be38 8037 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 8038 if (ret)
672d9f8e
RB
8039 {
8040 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8041 if (ret)
8042 STRIP_USELESS_TYPE_CONVERSION (ret);
8043 }
cfef45c8
RG
8044 return ret;
8045 }
b2505143
RB
8046 /* For constants and byte-aligned/sized reads try to go through
8047 native_encode/interpret. */
8048 if (CONSTANT_CLASS_P (ctor)
8049 && BITS_PER_UNIT == 8
8050 && offset % BITS_PER_UNIT == 0
ea69031c 8051 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 8052 && size % BITS_PER_UNIT == 0
ea69031c
JJ
8053 && size <= MAX_BITSIZE_MODE_ANY_MODE
8054 && can_native_interpret_type_p (type))
b2505143
RB
8055 {
8056 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
8057 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8058 offset / BITS_PER_UNIT);
8059 if (len > 0)
8060 return native_interpret_expr (type, buf, len);
b2505143 8061 }
cfef45c8
RG
8062 if (TREE_CODE (ctor) == CONSTRUCTOR)
8063 {
35b4d3a6
MS
8064 unsigned HOST_WIDE_INT dummy = 0;
8065 if (!suboff)
8066 suboff = &dummy;
cfef45c8 8067
ea69031c 8068 tree ret;
eb8f1123
RG
8069 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8070 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
8071 ret = fold_array_ctor_reference (type, ctor, offset, size,
8072 from_decl, suboff);
8073 else
8074 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8075 from_decl, suboff);
8076
8077 /* Fall back to native_encode_initializer. Needs to be done
8078 only in the outermost fold_ctor_reference call (because it itself
8079 recurses into CONSTRUCTORs) and doesn't update suboff. */
8080 if (ret == NULL_TREE
8081 && suboff == &dummy
8082 && BITS_PER_UNIT == 8
8083 && offset % BITS_PER_UNIT == 0
8084 && offset / BITS_PER_UNIT <= INT_MAX
8085 && size % BITS_PER_UNIT == 0
8086 && size <= MAX_BITSIZE_MODE_ANY_MODE
8087 && can_native_interpret_type_p (type))
8088 {
8089 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8090 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8091 offset / BITS_PER_UNIT);
8092 if (len > 0)
8093 return native_interpret_expr (type, buf, len);
8094 }
35b4d3a6 8095
ea69031c 8096 return ret;
cfef45c8
RG
8097 }
8098
8099 return NULL_TREE;
8100}
8101
8102/* Return the tree representing the element referenced by T if T is an
8103 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8104 names using VALUEIZE. Return NULL_TREE otherwise. */
8105
8106tree
8107fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8108{
8109 tree ctor, idx, base;
588db50c 8110 poly_int64 offset, size, max_size;
cfef45c8 8111 tree tem;
ee45a32d 8112 bool reverse;
cfef45c8 8113
f8a7df45
RG
8114 if (TREE_THIS_VOLATILE (t))
8115 return NULL_TREE;
8116
3a65ee74 8117 if (DECL_P (t))
cfef45c8
RG
8118 return get_symbol_constant_value (t);
8119
8120 tem = fold_read_from_constant_string (t);
8121 if (tem)
8122 return tem;
8123
8124 switch (TREE_CODE (t))
8125 {
8126 case ARRAY_REF:
8127 case ARRAY_RANGE_REF:
8128 /* Constant indexes are handled well by get_base_constructor.
8129 Only special case variable offsets.
8130 FIXME: This code can't handle nested references with variable indexes
8131 (they will be handled only by iteration of ccp). Perhaps we can bring
8132 get_ref_base_and_extent here and make it use a valueize callback. */
8133 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8134 && valueize
8135 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 8136 && poly_int_tree_p (idx))
cfef45c8
RG
8137 {
8138 tree low_bound, unit_size;
8139
8140 /* If the resulting bit-offset is constant, track it. */
8141 if ((low_bound = array_ref_low_bound (t),
588db50c 8142 poly_int_tree_p (low_bound))
cfef45c8 8143 && (unit_size = array_ref_element_size (t),
807e902e 8144 tree_fits_uhwi_p (unit_size)))
cfef45c8 8145 {
588db50c
RS
8146 poly_offset_int woffset
8147 = wi::sext (wi::to_poly_offset (idx)
8148 - wi::to_poly_offset (low_bound),
e287a2a1 8149 TYPE_PRECISION (sizetype));
a9e6359a
RB
8150 woffset *= tree_to_uhwi (unit_size);
8151 woffset *= BITS_PER_UNIT;
588db50c 8152 if (woffset.to_shwi (&offset))
807e902e 8153 {
807e902e
KZ
8154 base = TREE_OPERAND (t, 0);
8155 ctor = get_base_constructor (base, &offset, valueize);
8156 /* Empty constructor. Always fold to 0. */
8157 if (ctor == error_mark_node)
8158 return build_zero_cst (TREE_TYPE (t));
8159 /* Out of bound array access. Value is undefined,
8160 but don't fold. */
588db50c 8161 if (maybe_lt (offset, 0))
807e902e 8162 return NULL_TREE;
67914693 8163 /* We cannot determine ctor. */
807e902e
KZ
8164 if (!ctor)
8165 return NULL_TREE;
8166 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8167 tree_to_uhwi (unit_size)
8168 * BITS_PER_UNIT,
8169 base);
8170 }
cfef45c8
RG
8171 }
8172 }
8173 /* Fallthru. */
8174
8175 case COMPONENT_REF:
8176 case BIT_FIELD_REF:
8177 case TARGET_MEM_REF:
8178 case MEM_REF:
ee45a32d 8179 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
8180 ctor = get_base_constructor (base, &offset, valueize);
8181
8182 /* Empty constructor. Always fold to 0. */
8183 if (ctor == error_mark_node)
8184 return build_zero_cst (TREE_TYPE (t));
8185 /* We do not know precise address. */
588db50c 8186 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 8187 return NULL_TREE;
67914693 8188 /* We cannot determine ctor. */
cfef45c8
RG
8189 if (!ctor)
8190 return NULL_TREE;
8191
8192 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 8193 if (maybe_lt (offset, 0))
cfef45c8
RG
8194 return NULL_TREE;
8195
e4f1cbc3
JJ
8196 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8197 if (tem)
8198 return tem;
8199
8200 /* For bit field reads try to read the representative and
8201 adjust. */
8202 if (TREE_CODE (t) == COMPONENT_REF
8203 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8204 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8205 {
8206 HOST_WIDE_INT csize, coffset;
8207 tree field = TREE_OPERAND (t, 1);
8208 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8209 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8210 && size.is_constant (&csize)
8211 && offset.is_constant (&coffset)
8212 && (coffset % BITS_PER_UNIT != 0
8213 || csize % BITS_PER_UNIT != 0)
8214 && !reverse
8215 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8216 {
8217 poly_int64 bitoffset;
8218 poly_uint64 field_offset, repr_offset;
8219 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8220 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8221 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8222 else
8223 bitoffset = 0;
8224 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8225 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8226 HOST_WIDE_INT bitoff;
8227 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8228 - TYPE_PRECISION (TREE_TYPE (field)));
8229 if (bitoffset.is_constant (&bitoff)
8230 && bitoff >= 0
8231 && bitoff <= diff)
8232 {
8233 offset -= bitoff;
8234 size = tree_to_uhwi (DECL_SIZE (repr));
8235
8236 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8237 size, base);
8238 if (tem && TREE_CODE (tem) == INTEGER_CST)
8239 {
8240 if (!BYTES_BIG_ENDIAN)
8241 tem = wide_int_to_tree (TREE_TYPE (field),
8242 wi::lrshift (wi::to_wide (tem),
8243 bitoff));
8244 else
8245 tem = wide_int_to_tree (TREE_TYPE (field),
8246 wi::lrshift (wi::to_wide (tem),
8247 diff - bitoff));
8248 return tem;
8249 }
8250 }
8251 }
8252 }
8253 break;
cfef45c8
RG
8254
8255 case REALPART_EXPR:
8256 case IMAGPART_EXPR:
8257 {
8258 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8259 if (c && TREE_CODE (c) == COMPLEX_CST)
8260 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 8261 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
8262 break;
8263 }
8264
8265 default:
8266 break;
8267 }
8268
8269 return NULL_TREE;
8270}
8271
8272tree
8273fold_const_aggregate_ref (tree t)
8274{
8275 return fold_const_aggregate_ref_1 (t, NULL);
8276}
06bc3ec7 8277
85942f45 8278/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
8279 at OFFSET.
8280 Set CAN_REFER if non-NULL to false if method
8281 is not referable or if the virtual table is ill-formed (such as rewriten
8282 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
8283
8284tree
85942f45
JH
8285gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8286 tree v,
ec77d61f
JH
8287 unsigned HOST_WIDE_INT offset,
8288 bool *can_refer)
81fa35bd 8289{
85942f45
JH
8290 tree vtable = v, init, fn;
8291 unsigned HOST_WIDE_INT size;
8c311b50
JH
8292 unsigned HOST_WIDE_INT elt_size, access_index;
8293 tree domain_type;
81fa35bd 8294
ec77d61f
JH
8295 if (can_refer)
8296 *can_refer = true;
8297
9de2f554 8298 /* First of all double check we have virtual table. */
8813a647 8299 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 8300 {
ec77d61f
JH
8301 /* Pass down that we lost track of the target. */
8302 if (can_refer)
8303 *can_refer = false;
8304 return NULL_TREE;
8305 }
9de2f554 8306
2aa3da06
JH
8307 init = ctor_for_folding (v);
8308
9de2f554 8309 /* The virtual tables should always be born with constructors
2aa3da06
JH
8310 and we always should assume that they are avaialble for
8311 folding. At the moment we do not stream them in all cases,
8312 but it should never happen that ctor seem unreachable. */
8313 gcc_assert (init);
8314 if (init == error_mark_node)
8315 {
ec77d61f
JH
8316 /* Pass down that we lost track of the target. */
8317 if (can_refer)
8318 *can_refer = false;
2aa3da06
JH
8319 return NULL_TREE;
8320 }
81fa35bd 8321 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 8322 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 8323 offset *= BITS_PER_UNIT;
81fa35bd 8324 offset += token * size;
9de2f554 8325
8c311b50
JH
8326 /* Lookup the value in the constructor that is assumed to be array.
8327 This is equivalent to
8328 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8329 offset, size, NULL);
8330 but in a constant time. We expect that frontend produced a simple
8331 array without indexed initializers. */
8332
8333 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8334 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8335 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8336 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8337
8338 access_index = offset / BITS_PER_UNIT / elt_size;
8339 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8340
bf8d8309
MP
8341 /* The C++ FE can now produce indexed fields, and we check if the indexes
8342 match. */
8c311b50
JH
8343 if (access_index < CONSTRUCTOR_NELTS (init))
8344 {
8345 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
8346 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8347 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
8348 STRIP_NOPS (fn);
8349 }
8350 else
8351 fn = NULL;
9de2f554
JH
8352
8353 /* For type inconsistent program we may end up looking up virtual method
8354 in virtual table that does not contain TOKEN entries. We may overrun
8355 the virtual table and pick up a constant or RTTI info pointer.
8356 In any case the call is undefined. */
8357 if (!fn
8358 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8359 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8360 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8361 else
8362 {
8363 fn = TREE_OPERAND (fn, 0);
8364
8365 /* When cgraph node is missing and function is not public, we cannot
8366 devirtualize. This can happen in WHOPR when the actual method
8367 ends up in other partition, because we found devirtualization
8368 possibility too late. */
8369 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
8370 {
8371 if (can_refer)
8372 {
8373 *can_refer = false;
8374 return fn;
8375 }
8376 return NULL_TREE;
8377 }
9de2f554 8378 }
81fa35bd 8379
7501ca28
RG
8380 /* Make sure we create a cgraph node for functions we'll reference.
8381 They can be non-existent if the reference comes from an entry
8382 of an external vtable for example. */
d52f5295 8383 cgraph_node::get_create (fn);
7501ca28 8384
81fa35bd
MJ
8385 return fn;
8386}
8387
85942f45
JH
8388/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8389 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8390 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
8391 OBJ_TYPE_REF_OBJECT(REF).
8392 Set CAN_REFER if non-NULL to false if method
8393 is not referable or if the virtual table is ill-formed (such as rewriten
8394 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
8395
8396tree
ec77d61f
JH
8397gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8398 bool *can_refer)
85942f45
JH
8399{
8400 unsigned HOST_WIDE_INT offset;
8401 tree v;
8402
8403 v = BINFO_VTABLE (known_binfo);
8404 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8405 if (!v)
8406 return NULL_TREE;
8407
8408 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
8409 {
8410 if (can_refer)
8411 *can_refer = false;
8412 return NULL_TREE;
8413 }
8414 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
8415}
8416
737f500a
RB
8417/* Given a pointer value T, return a simplified version of an
8418 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
8419 possible. Note that the resulting type may be different from
8420 the type pointed to in the sense that it is still compatible
8421 from the langhooks point of view. */
8422
8423tree
8424gimple_fold_indirect_ref (tree t)
8425{
8426 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8427 tree sub = t;
8428 tree subtype;
8429
8430 STRIP_NOPS (sub);
8431 subtype = TREE_TYPE (sub);
737f500a
RB
8432 if (!POINTER_TYPE_P (subtype)
8433 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
8434 return NULL_TREE;
8435
8436 if (TREE_CODE (sub) == ADDR_EXPR)
8437 {
8438 tree op = TREE_OPERAND (sub, 0);
8439 tree optype = TREE_TYPE (op);
8440 /* *&p => p */
8441 if (useless_type_conversion_p (type, optype))
8442 return op;
8443
8444 /* *(foo *)&fooarray => fooarray[0] */
8445 if (TREE_CODE (optype) == ARRAY_TYPE
8446 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8447 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8448 {
8449 tree type_domain = TYPE_DOMAIN (optype);
8450 tree min_val = size_zero_node;
8451 if (type_domain && TYPE_MIN_VALUE (type_domain))
8452 min_val = TYPE_MIN_VALUE (type_domain);
8453 if (TREE_CODE (min_val) == INTEGER_CST)
8454 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8455 }
8456 /* *(foo *)&complexfoo => __real__ complexfoo */
8457 else if (TREE_CODE (optype) == COMPLEX_TYPE
8458 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8459 return fold_build1 (REALPART_EXPR, type, op);
8460 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8461 else if (TREE_CODE (optype) == VECTOR_TYPE
8462 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8463 {
8464 tree part_width = TYPE_SIZE (type);
8465 tree index = bitsize_int (0);
8466 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8467 }
8468 }
8469
8470 /* *(p + CST) -> ... */
8471 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8472 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8473 {
8474 tree addr = TREE_OPERAND (sub, 0);
8475 tree off = TREE_OPERAND (sub, 1);
8476 tree addrtype;
8477
8478 STRIP_NOPS (addr);
8479 addrtype = TREE_TYPE (addr);
8480
8481 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8482 if (TREE_CODE (addr) == ADDR_EXPR
8483 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8484 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 8485 && tree_fits_uhwi_p (off))
b184c8f1 8486 {
ae7e9ddd 8487 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
8488 tree part_width = TYPE_SIZE (type);
8489 unsigned HOST_WIDE_INT part_widthi
9439e9a1 8490 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
8491 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8492 tree index = bitsize_int (indexi);
928686b1
RS
8493 if (known_lt (offset / part_widthi,
8494 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
8495 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8496 part_width, index);
8497 }
8498
8499 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8500 if (TREE_CODE (addr) == ADDR_EXPR
8501 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8502 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8503 {
8504 tree size = TYPE_SIZE_UNIT (type);
8505 if (tree_int_cst_equal (size, off))
8506 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8507 }
8508
8509 /* *(p + CST) -> MEM_REF <p, CST>. */
8510 if (TREE_CODE (addr) != ADDR_EXPR
8511 || DECL_P (TREE_OPERAND (addr, 0)))
8512 return fold_build2 (MEM_REF, type,
8513 addr,
8e6cdc90 8514 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
8515 }
8516
8517 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8518 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8519 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8520 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8521 {
8522 tree type_domain;
8523 tree min_val = size_zero_node;
8524 tree osub = sub;
8525 sub = gimple_fold_indirect_ref (sub);
8526 if (! sub)
8527 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8528 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8529 if (type_domain && TYPE_MIN_VALUE (type_domain))
8530 min_val = TYPE_MIN_VALUE (type_domain);
8531 if (TREE_CODE (min_val) == INTEGER_CST)
8532 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8533 }
8534
8535 return NULL_TREE;
8536}
19e51b40
JJ
8537
8538/* Return true if CODE is an operation that when operating on signed
8539 integer types involves undefined behavior on overflow and the
8540 operation can be expressed with unsigned arithmetic. */
8541
8542bool
8543arith_code_with_undefined_signed_overflow (tree_code code)
8544{
8545 switch (code)
8546 {
8e2c037d 8547 case ABS_EXPR:
19e51b40
JJ
8548 case PLUS_EXPR:
8549 case MINUS_EXPR:
8550 case MULT_EXPR:
8551 case NEGATE_EXPR:
8552 case POINTER_PLUS_EXPR:
8553 return true;
8554 default:
8555 return false;
8556 }
8557}
8558
8559/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8560 operation that can be transformed to unsigned arithmetic by converting
8561 its operand, carrying out the operation in the corresponding unsigned
8562 type and converting the result back to the original type.
8563
8564 Returns a sequence of statements that replace STMT and also contain
8565 a modified form of STMT itself. */
8566
8567gimple_seq
355fe088 8568rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
8569{
8570 if (dump_file && (dump_flags & TDF_DETAILS))
8571 {
8572 fprintf (dump_file, "rewriting stmt with undefined signed "
8573 "overflow ");
8574 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8575 }
8576
8577 tree lhs = gimple_assign_lhs (stmt);
8578 tree type = unsigned_type_for (TREE_TYPE (lhs));
8579 gimple_seq stmts = NULL;
8e2c037d
RB
8580 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8581 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8582 else
8583 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8584 {
8585 tree op = gimple_op (stmt, i);
8586 op = gimple_convert (&stmts, type, op);
8587 gimple_set_op (stmt, i, op);
8588 }
19e51b40
JJ
8589 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8590 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8591 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 8592 gimple_set_modified (stmt, true);
19e51b40 8593 gimple_seq_add_stmt (&stmts, stmt);
355fe088 8594 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
8595 gimple_seq_add_stmt (&stmts, cvt);
8596
8597 return stmts;
8598}
d4f5cd5e 8599
3d2cf79f 8600
c26de36d
RB
8601/* The valueization hook we use for the gimple_build API simplification.
8602 This makes us match fold_buildN behavior by only combining with
8603 statements in the sequence(s) we are currently building. */
8604
8605static tree
8606gimple_build_valueize (tree op)
8607{
8608 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8609 return op;
8610 return NULL_TREE;
8611}
8612
3d2cf79f 8613/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 8614 simplifying it first if possible. Returns the built
3d2cf79f
RB
8615 expression value and appends statements possibly defining it
8616 to SEQ. */
8617
8618tree
8619gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8620 enum tree_code code, tree type, tree op0)
3d2cf79f 8621{
c26de36d 8622 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
8623 if (!res)
8624 {
a15ebbcd 8625 res = create_tmp_reg_or_ssa_name (type);
355fe088 8626 gimple *stmt;
3d2cf79f
RB
8627 if (code == REALPART_EXPR
8628 || code == IMAGPART_EXPR
8629 || code == VIEW_CONVERT_EXPR)
0d0e4a03 8630 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 8631 else
0d0e4a03 8632 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
8633 gimple_set_location (stmt, loc);
8634 gimple_seq_add_stmt_without_update (seq, stmt);
8635 }
8636 return res;
8637}
8638
8639/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 8640 simplifying it first if possible. Returns the built
3d2cf79f
RB
8641 expression value and appends statements possibly defining it
8642 to SEQ. */
8643
8644tree
8645gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8646 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 8647{
c26de36d 8648 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
8649 if (!res)
8650 {
a15ebbcd 8651 res = create_tmp_reg_or_ssa_name (type);
355fe088 8652 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
8653 gimple_set_location (stmt, loc);
8654 gimple_seq_add_stmt_without_update (seq, stmt);
8655 }
8656 return res;
8657}
8658
8659/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 8660 simplifying it first if possible. Returns the built
3d2cf79f
RB
8661 expression value and appends statements possibly defining it
8662 to SEQ. */
8663
8664tree
8665gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8666 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
8667{
8668 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 8669 seq, gimple_build_valueize);
3d2cf79f
RB
8670 if (!res)
8671 {
a15ebbcd 8672 res = create_tmp_reg_or_ssa_name (type);
355fe088 8673 gimple *stmt;
3d2cf79f 8674 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
8675 stmt = gimple_build_assign (res, code,
8676 build3 (code, type, op0, op1, op2));
3d2cf79f 8677 else
0d0e4a03 8678 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
8679 gimple_set_location (stmt, loc);
8680 gimple_seq_add_stmt_without_update (seq, stmt);
8681 }
8682 return res;
8683}
8684
93a73251
MM
8685/* Build the call FN () with a result of type TYPE (or no result if TYPE is
8686 void) with a location LOC. Returns the built expression value (or NULL_TREE
8687 if TYPE is void) and appends statements possibly defining it to SEQ. */
8688
8689tree
8690gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8691{
8692 tree res = NULL_TREE;
8693 gcall *stmt;
8694 if (internal_fn_p (fn))
8695 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8696 else
8697 {
8698 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8699 stmt = gimple_build_call (decl, 0);
8700 }
8701 if (!VOID_TYPE_P (type))
8702 {
8703 res = create_tmp_reg_or_ssa_name (type);
8704 gimple_call_set_lhs (stmt, res);
8705 }
8706 gimple_set_location (stmt, loc);
8707 gimple_seq_add_stmt_without_update (seq, stmt);
8708 return res;
8709}
8710
3d2cf79f
RB
8711/* Build the call FN (ARG0) with a result of type TYPE
8712 (or no result if TYPE is void) with location LOC,
c26de36d 8713 simplifying it first if possible. Returns the built
3d2cf79f
RB
8714 expression value (or NULL_TREE if TYPE is void) and appends
8715 statements possibly defining it to SEQ. */
8716
8717tree
eb69361d
RS
8718gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8719 tree type, tree arg0)
3d2cf79f 8720{
c26de36d 8721 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
8722 if (!res)
8723 {
eb69361d
RS
8724 gcall *stmt;
8725 if (internal_fn_p (fn))
8726 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8727 else
8728 {
8729 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8730 stmt = gimple_build_call (decl, 1, arg0);
8731 }
3d2cf79f
RB
8732 if (!VOID_TYPE_P (type))
8733 {
a15ebbcd 8734 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8735 gimple_call_set_lhs (stmt, res);
8736 }
8737 gimple_set_location (stmt, loc);
8738 gimple_seq_add_stmt_without_update (seq, stmt);
8739 }
8740 return res;
8741}
8742
8743/* Build the call FN (ARG0, ARG1) with a result of type TYPE
8744 (or no result if TYPE is void) with location LOC,
c26de36d 8745 simplifying it first if possible. Returns the built
3d2cf79f
RB
8746 expression value (or NULL_TREE if TYPE is void) and appends
8747 statements possibly defining it to SEQ. */
8748
8749tree
eb69361d
RS
8750gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8751 tree type, tree arg0, tree arg1)
3d2cf79f 8752{
c26de36d 8753 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
8754 if (!res)
8755 {
eb69361d
RS
8756 gcall *stmt;
8757 if (internal_fn_p (fn))
8758 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8759 else
8760 {
8761 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8762 stmt = gimple_build_call (decl, 2, arg0, arg1);
8763 }
3d2cf79f
RB
8764 if (!VOID_TYPE_P (type))
8765 {
a15ebbcd 8766 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8767 gimple_call_set_lhs (stmt, res);
8768 }
8769 gimple_set_location (stmt, loc);
8770 gimple_seq_add_stmt_without_update (seq, stmt);
8771 }
8772 return res;
8773}
8774
8775/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8776 (or no result if TYPE is void) with location LOC,
c26de36d 8777 simplifying it first if possible. Returns the built
3d2cf79f
RB
8778 expression value (or NULL_TREE if TYPE is void) and appends
8779 statements possibly defining it to SEQ. */
8780
8781tree
eb69361d
RS
8782gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8783 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 8784{
c26de36d
RB
8785 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8786 seq, gimple_build_valueize);
3d2cf79f
RB
8787 if (!res)
8788 {
eb69361d
RS
8789 gcall *stmt;
8790 if (internal_fn_p (fn))
8791 stmt = gimple_build_call_internal (as_internal_fn (fn),
8792 3, arg0, arg1, arg2);
8793 else
8794 {
8795 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8796 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8797 }
3d2cf79f
RB
8798 if (!VOID_TYPE_P (type))
8799 {
a15ebbcd 8800 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8801 gimple_call_set_lhs (stmt, res);
8802 }
8803 gimple_set_location (stmt, loc);
8804 gimple_seq_add_stmt_without_update (seq, stmt);
8805 }
8806 return res;
8807}
8808
8809/* Build the conversion (TYPE) OP with a result of type TYPE
8810 with location LOC if such conversion is neccesary in GIMPLE,
8811 simplifying it first.
8812 Returns the built expression value and appends
8813 statements possibly defining it to SEQ. */
d4f5cd5e
RB
8814
8815tree
8816gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8817{
8818 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8819 return op;
3d2cf79f 8820 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 8821}
68e57f04 8822
74e3c262
RB
8823/* Build the conversion (ptrofftype) OP with a result of a type
8824 compatible with ptrofftype with location LOC if such conversion
8825 is neccesary in GIMPLE, simplifying it first.
8826 Returns the built expression value and appends
8827 statements possibly defining it to SEQ. */
8828
8829tree
8830gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8831{
8832 if (ptrofftype_p (TREE_TYPE (op)))
8833 return op;
8834 return gimple_convert (seq, loc, sizetype, op);
8835}
8836
e7c45b66
RS
8837/* Build a vector of type TYPE in which each element has the value OP.
8838 Return a gimple value for the result, appending any new statements
8839 to SEQ. */
8840
8841tree
8842gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8843 tree op)
8844{
928686b1
RS
8845 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8846 && !CONSTANT_CLASS_P (op))
8847 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8848
e7c45b66
RS
8849 tree res, vec = build_vector_from_val (type, op);
8850 if (is_gimple_val (vec))
8851 return vec;
8852 if (gimple_in_ssa_p (cfun))
8853 res = make_ssa_name (type);
8854 else
8855 res = create_tmp_reg (type);
8856 gimple *stmt = gimple_build_assign (res, vec);
8857 gimple_set_location (stmt, loc);
8858 gimple_seq_add_stmt_without_update (seq, stmt);
8859 return res;
8860}
8861
abe73c3d
RS
8862/* Build a vector from BUILDER, handling the case in which some elements
8863 are non-constant. Return a gimple value for the result, appending any
8864 new instructions to SEQ.
8865
8866 BUILDER must not have a stepped encoding on entry. This is because
8867 the function is not geared up to handle the arithmetic that would
8868 be needed in the variable case, and any code building a vector that
8869 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
8870
8871tree
abe73c3d
RS
8872gimple_build_vector (gimple_seq *seq, location_t loc,
8873 tree_vector_builder *builder)
e7c45b66 8874{
abe73c3d
RS
8875 gcc_assert (builder->nelts_per_pattern () <= 2);
8876 unsigned int encoded_nelts = builder->encoded_nelts ();
8877 for (unsigned int i = 0; i < encoded_nelts; ++i)
8ebedfcd 8878 if (!CONSTANT_CLASS_P ((*builder)[i]))
e7c45b66 8879 {
abe73c3d 8880 tree type = builder->type ();
928686b1 8881 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
8882 vec<constructor_elt, va_gc> *v;
8883 vec_alloc (v, nelts);
8884 for (i = 0; i < nelts; ++i)
abe73c3d 8885 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
8886
8887 tree res;
8888 if (gimple_in_ssa_p (cfun))
8889 res = make_ssa_name (type);
8890 else
8891 res = create_tmp_reg (type);
8892 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8893 gimple_set_location (stmt, loc);
8894 gimple_seq_add_stmt_without_update (seq, stmt);
8895 return res;
8896 }
abe73c3d 8897 return builder->build ();
e7c45b66
RS
8898}
8899
93a73251
MM
8900/* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8901 and generate a value guaranteed to be rounded upwards to ALIGN.
8902
8903 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8904
8905tree
8906gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8907 tree old_size, unsigned HOST_WIDE_INT align)
8908{
8909 unsigned HOST_WIDE_INT tg_mask = align - 1;
8910 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8911 gcc_assert (INTEGRAL_TYPE_P (type));
8912 tree tree_mask = build_int_cst (type, tg_mask);
8913 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8914 tree_mask);
8915
8916 tree mask = build_int_cst (type, -align);
8917 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8918}
8919
68e57f04
RS
8920/* Return true if the result of assignment STMT is known to be non-negative.
8921 If the return value is based on the assumption that signed overflow is
8922 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8923 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8924
8925static bool
8926gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8927 int depth)
8928{
8929 enum tree_code code = gimple_assign_rhs_code (stmt);
ce777eae 8930 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
68e57f04
RS
8931 switch (get_gimple_rhs_class (code))
8932 {
8933 case GIMPLE_UNARY_RHS:
8934 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 8935 type,
68e57f04
RS
8936 gimple_assign_rhs1 (stmt),
8937 strict_overflow_p, depth);
8938 case GIMPLE_BINARY_RHS:
8939 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
ce777eae 8940 type,
68e57f04
RS
8941 gimple_assign_rhs1 (stmt),
8942 gimple_assign_rhs2 (stmt),
8943 strict_overflow_p, depth);
8944 case GIMPLE_TERNARY_RHS:
8945 return false;
8946 case GIMPLE_SINGLE_RHS:
8947 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8948 strict_overflow_p, depth);
8949 case GIMPLE_INVALID_RHS:
8950 break;
8951 }
8952 gcc_unreachable ();
8953}
8954
8955/* Return true if return value of call STMT is known to be non-negative.
8956 If the return value is based on the assumption that signed overflow is
8957 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8958 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8959
8960static bool
8961gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8962 int depth)
8963{
8964 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8965 gimple_call_arg (stmt, 0) : NULL_TREE;
8966 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8967 gimple_call_arg (stmt, 1) : NULL_TREE;
ce777eae
RB
8968 tree lhs = gimple_call_lhs (stmt);
8969 return (lhs
8970 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
8971 gimple_call_combined_fn (stmt),
8972 arg0, arg1,
8973 strict_overflow_p, depth));
68e57f04
RS
8974}
8975
4534c203
RB
8976/* Return true if return value of call STMT is known to be non-negative.
8977 If the return value is based on the assumption that signed overflow is
8978 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8979 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8980
8981static bool
8982gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8983 int depth)
8984{
8985 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8986 {
8987 tree arg = gimple_phi_arg_def (stmt, i);
8988 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8989 return false;
8990 }
8991 return true;
8992}
8993
68e57f04
RS
8994/* Return true if STMT is known to compute a non-negative value.
8995 If the return value is based on the assumption that signed overflow is
8996 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8997 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8998
8999bool
9000gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9001 int depth)
9002{
9003 switch (gimple_code (stmt))
9004 {
9005 case GIMPLE_ASSIGN:
9006 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9007 depth);
9008 case GIMPLE_CALL:
9009 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9010 depth);
4534c203
RB
9011 case GIMPLE_PHI:
9012 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9013 depth);
68e57f04
RS
9014 default:
9015 return false;
9016 }
9017}
67dbe582
RS
9018
9019/* Return true if the floating-point value computed by assignment STMT
9020 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 9021 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
9022
9023 DEPTH is the current nesting depth of the query. */
9024
9025static bool
9026gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9027{
9028 enum tree_code code = gimple_assign_rhs_code (stmt);
9029 switch (get_gimple_rhs_class (code))
9030 {
9031 case GIMPLE_UNARY_RHS:
9032 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9033 gimple_assign_rhs1 (stmt), depth);
9034 case GIMPLE_BINARY_RHS:
9035 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9036 gimple_assign_rhs1 (stmt),
9037 gimple_assign_rhs2 (stmt), depth);
9038 case GIMPLE_TERNARY_RHS:
9039 return false;
9040 case GIMPLE_SINGLE_RHS:
9041 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9042 case GIMPLE_INVALID_RHS:
9043 break;
9044 }
9045 gcc_unreachable ();
9046}
9047
9048/* Return true if the floating-point value computed by call STMT is known
9049 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9050 considered integer values. Return false for signaling NaN.
67dbe582
RS
9051
9052 DEPTH is the current nesting depth of the query. */
9053
9054static bool
9055gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9056{
9057 tree arg0 = (gimple_call_num_args (stmt) > 0
9058 ? gimple_call_arg (stmt, 0)
9059 : NULL_TREE);
9060 tree arg1 = (gimple_call_num_args (stmt) > 1
9061 ? gimple_call_arg (stmt, 1)
9062 : NULL_TREE);
1d9da71f 9063 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
9064 arg0, arg1, depth);
9065}
9066
9067/* Return true if the floating-point result of phi STMT is known to have
9068 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 9069 integer values. Return false for signaling NaN.
67dbe582
RS
9070
9071 DEPTH is the current nesting depth of the query. */
9072
9073static bool
9074gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9075{
9076 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9077 {
9078 tree arg = gimple_phi_arg_def (stmt, i);
9079 if (!integer_valued_real_single_p (arg, depth + 1))
9080 return false;
9081 }
9082 return true;
9083}
9084
9085/* Return true if the floating-point value computed by STMT is known
9086 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 9087 considered integer values. Return false for signaling NaN.
67dbe582
RS
9088
9089 DEPTH is the current nesting depth of the query. */
9090
9091bool
9092gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9093{
9094 switch (gimple_code (stmt))
9095 {
9096 case GIMPLE_ASSIGN:
9097 return gimple_assign_integer_valued_real_p (stmt, depth);
9098 case GIMPLE_CALL:
9099 return gimple_call_integer_valued_real_p (stmt, depth);
9100 case GIMPLE_PHI:
9101 return gimple_phi_integer_valued_real_p (stmt, depth);
9102 default:
9103 return false;
9104 }
9105}