]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
c++: consteval-defarg1.C test variant for templates
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
8d9254fc 2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
c7131fb2 24#include "backend.h"
957060b5
AM
25#include "target.h"
26#include "rtl.h"
cbdd87d4 27#include "tree.h"
c7131fb2 28#include "gimple.h"
957060b5 29#include "predict.h"
c7131fb2 30#include "ssa.h"
957060b5
AM
31#include "cgraph.h"
32#include "gimple-pretty-print.h"
cc8bea0a 33#include "gimple-ssa-warn-restrict.h"
c7131fb2 34#include "fold-const.h"
36566b39
PK
35#include "stmt.h"
36#include "expr.h"
37#include "stor-layout.h"
7ee2468b 38#include "dumpfile.h"
2fb9a547 39#include "gimple-fold.h"
45b0be94 40#include "gimplify.h"
5be5c238 41#include "gimple-iterator.h"
442b4905
AM
42#include "tree-into-ssa.h"
43#include "tree-dfa.h"
025d57f0 44#include "tree-object-size.h"
7a300452 45#include "tree-ssa.h"
cbdd87d4 46#include "tree-ssa-propagate.h"
450ad0cd 47#include "ipa-utils.h"
4484a35a 48#include "tree-ssa-address.h"
862d0b35 49#include "langhooks.h"
19e51b40 50#include "gimplify-me.h"
2b5f0895 51#include "dbgcnt.h"
9b2b7279 52#include "builtins.h"
e0ee10ed
RB
53#include "tree-eh.h"
54#include "gimple-match.h"
48126138 55#include "gomp-constants.h"
f869c12f 56#include "optabs-query.h"
629b3d75 57#include "omp-general.h"
abd3a68c 58#include "tree-cfg.h"
a918bfbf 59#include "fold-const-call.h"
314e6352
ML
60#include "stringpool.h"
61#include "attribs.h"
45b2222a 62#include "asan.h"
025d57f0
MS
63#include "diagnostic-core.h"
64#include "intl.h"
6a33d0ff 65#include "calls.h"
5ebaa477 66#include "tree-vector-builder.h"
5d0d5d68 67#include "tree-ssa-strlen.h"
e7868dc6 68#include "varasm.h"
cbdd87d4 69
598f7235
MS
70enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
598f7235
MS
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
83};
84
03c4a945
MS
85static bool
86get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
fb471a13 87
b3b9f3d0 88/* Return true when DECL can be referenced from current unit.
c44c2088
JH
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
1389294c 92
1389294c
JH
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
3e89949e 105 we devirtualize only during final compilation stage.
b3b9f3d0
JH
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
109
1389294c 110static bool
c44c2088 111can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 112{
2c8326a5 113 varpool_node *vnode;
1389294c 114 struct cgraph_node *node;
5e20cdc9 115 symtab_node *snode;
c44c2088 116
00de328a 117 if (DECL_ABSTRACT_P (decl))
1632a686
JH
118 return false;
119
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
8813a647 122 || !VAR_OR_FUNCTION_DECL_P (decl))
1632a686
JH
123 return true;
124
d4babd37
JM
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
1632a686 128 {
d4babd37
JM
129 if (DECL_EXTERNAL (decl))
130 return false;
3aaf0529
JH
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
3dafb85c 133 if (symtab->function_flags_ready)
3aaf0529 134 return true;
d52f5295 135 snode = symtab_node::get (decl);
3aaf0529 136 if (!snode || !snode->definition)
1632a686 137 return false;
7de90a6c 138 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 139 return !node || !node->inlined_to;
1632a686
JH
140 }
141
6da8be89 142 /* We will later output the initializer, so we can refer to it.
c44c2088 143 So we are concerned only when DECL comes from initializer of
3aaf0529 144 external var or var that has been optimized out. */
c44c2088 145 if (!from_decl
8813a647 146 || !VAR_P (from_decl)
3aaf0529 147 || (!DECL_EXTERNAL (from_decl)
9041d2e6 148 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 149 && vnode->definition)
6da8be89 150 || (flag_ltrans
9041d2e6 151 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 152 && vnode->in_other_partition))
c44c2088 153 return true;
c44c2088
JH
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
a33a931b 159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 161 return false;
b3b9f3d0
JH
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
3aaf0529
JH
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
170
171 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 172 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
3dafb85c 178 if (!symtab->function_flags_ready)
b3b9f3d0 179 return true;
c44c2088 180
d52f5295 181 snode = symtab_node::get (decl);
3aaf0529
JH
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
a62bfab5 188 return !node || !node->inlined_to;
1389294c
JH
189}
190
a15ebbcd
ML
191/* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
194
edc19e03
WS
195tree
196create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
a15ebbcd
ML
197{
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
202}
203
0038d4e0 204/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
207
208tree
c44c2088 209canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 210{
37f808c4
RB
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
213
50619002
EB
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
315f5f1b
RG
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 218 {
315f5f1b
RG
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
17f39a39
JH
227 }
228 if (TREE_CODE (cval) == ADDR_EXPR)
229 {
5a27a197
RG
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
232 {
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
236 }
5a27a197
RG
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
239 if (!base)
240 return NULL_TREE;
b3b9f3d0 241
8813a647 242 if (VAR_OR_FUNCTION_DECL_P (base)
c44c2088 243 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 244 return NULL_TREE;
13f92e8d
JJ
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
8813a647 247 if (VAR_P (base))
46eb666a 248 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
249 else if (TREE_CODE (base) == FUNCTION_DECL)
250 {
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
d52f5295 254 cgraph_node::get_create (base);
7501ca28 255 }
0038d4e0 256 /* Fixup types in global initializers. */
73aef89e
RG
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
259
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
17f39a39 263 }
37f808c4
RB
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
266 {
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
272 }
50619002 273 return orig_cval;
17f39a39 274}
cbdd87d4
RG
275
276/* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
278
279tree
280get_symbol_constant_value (tree sym)
281{
6a6dac52
JH
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
cbdd87d4 284 {
cbdd87d4
RG
285 if (val)
286 {
9d60be38 287 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 288 if (val && is_gimple_min_invariant (val))
17f39a39 289 return val;
1389294c
JH
290 else
291 return NULL_TREE;
cbdd87d4
RG
292 }
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
296 if (!val
b8a8c472 297 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 298 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
299 }
300
301 return NULL_TREE;
302}
303
304
cbdd87d4
RG
305
306/* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
310
311static tree
312maybe_fold_reference (tree expr, bool is_lhs)
313{
17f39a39 314 tree result;
cbdd87d4 315
f0eddb90
RG
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
332
f0eddb90
RG
333 if (!is_lhs
334 && (result = fold_const_aggregate_ref (expr))
335 && is_gimple_min_invariant (result))
336 return result;
cbdd87d4 337
cbdd87d4
RG
338 return NULL_TREE;
339}
340
341
342/* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
345 folded. */
346
347static tree
348fold_gimple_assign (gimple_stmt_iterator *si)
349{
355fe088 350 gimple *stmt = gsi_stmt (*si);
cbdd87d4
RG
351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
352 location_t loc = gimple_location (stmt);
353
354 tree result = NULL_TREE;
355
356 switch (get_gimple_rhs_class (subcode))
357 {
358 case GIMPLE_SINGLE_RHS:
359 {
360 tree rhs = gimple_assign_rhs1 (stmt);
361
8c00ba08
JW
362 if (TREE_CLOBBER_P (rhs))
363 return NULL_TREE;
364
4e71066d 365 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
366 return maybe_fold_reference (rhs, false);
367
bdf37f7a
JH
368 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
369 {
370 tree val = OBJ_TYPE_REF_EXPR (rhs);
371 if (is_gimple_min_invariant (val))
372 return val;
f8a39967 373 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
374 {
375 bool final;
376 vec <cgraph_node *>targets
f8a39967 377 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 378 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 379 {
2b5f0895
XDL
380 if (dump_enabled_p ())
381 {
4f5b9c80 382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets.length () == 1
386 ? targets[0]->name ()
3ef276e4 387 : "NULL");
2b5f0895 388 }
3ef276e4
RB
389 if (targets.length () == 1)
390 {
391 val = fold_convert (TREE_TYPE (val),
392 build_fold_addr_expr_loc
393 (loc, targets[0]->decl));
394 STRIP_USELESS_TYPE_CONVERSION (val);
395 }
396 else
67914693
SL
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
3ef276e4 399 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
400 return val;
401 }
402 }
bdf37f7a 403 }
7524f419 404
cbdd87d4
RG
405 else if (TREE_CODE (rhs) == ADDR_EXPR)
406 {
70f34814
RG
407 tree ref = TREE_OPERAND (rhs, 0);
408 tree tem = maybe_fold_reference (ref, true);
409 if (tem
410 && TREE_CODE (tem) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem, 1)))
412 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 else if (tem)
cbdd87d4
RG
414 result = fold_convert (TREE_TYPE (rhs),
415 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
416 else if (TREE_CODE (ref) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref, 1)))
418 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
7524f419
RB
419
420 if (result)
421 {
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result);
427
428 if (result != rhs && valid_gimple_rhs_p (result))
429 return result;
430 }
cbdd87d4
RG
431 }
432
433 else if (TREE_CODE (rhs) == CONSTRUCTOR
7524f419 434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
cbdd87d4
RG
435 {
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
437 unsigned i;
438 tree val;
439
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7524f419 441 if (! CONSTANT_CLASS_P (val))
cbdd87d4
RG
442 return NULL_TREE;
443
444 return build_vector_from_ctor (TREE_TYPE (rhs),
445 CONSTRUCTOR_ELTS (rhs));
446 }
447
448 else if (DECL_P (rhs))
9d60be38 449 return get_symbol_constant_value (rhs);
cbdd87d4
RG
450 }
451 break;
452
453 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
454 break;
455
456 case GIMPLE_BINARY_RHS:
cbdd87d4
RG
457 break;
458
0354c0c7 459 case GIMPLE_TERNARY_RHS:
5c099d40
RB
460 result = fold_ternary_loc (loc, subcode,
461 TREE_TYPE (gimple_assign_lhs (stmt)),
462 gimple_assign_rhs1 (stmt),
463 gimple_assign_rhs2 (stmt),
464 gimple_assign_rhs3 (stmt));
0354c0c7
BS
465
466 if (result)
467 {
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
0354c0c7
BS
471 }
472 break;
473
cbdd87d4
RG
474 case GIMPLE_INVALID_RHS:
475 gcc_unreachable ();
476 }
477
478 return NULL_TREE;
479}
480
fef5a0d9
RB
481
482/* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
486
487static void
488gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
489{
355fe088 490 gimple *stmt = gsi_stmt (*si_p);
fef5a0d9
RB
491
492 if (gimple_has_location (stmt))
493 annotate_all_with_location (stmts, gimple_location (stmt));
494
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
355fe088 497 gimple *laststore = NULL;
fef5a0d9
RB
498 for (gimple_stmt_iterator i = gsi_last (stmts);
499 !gsi_end_p (i); gsi_prev (&i))
500 {
355fe088 501 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
502 if ((gimple_assign_single_p (new_stmt)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 || (is_gimple_call (new_stmt)
505 && (gimple_call_flags (new_stmt)
506 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
507 {
508 tree vdef;
509 if (!laststore)
510 vdef = gimple_vdef (stmt);
511 else
512 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 gimple_set_vdef (new_stmt, vdef);
514 if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 laststore = new_stmt;
517 }
518 }
519
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse = gimple_vuse (stmt);
523 for (gimple_stmt_iterator i = gsi_start (stmts);
524 !gsi_end_p (i); gsi_next (&i))
525 {
355fe088 526 gimple *new_stmt = gsi_stmt (i);
fef5a0d9
RB
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt))
530 gimple_set_vuse (new_stmt, reaching_vuse);
531 gimple_set_modified (new_stmt, true);
532 if (gimple_vdef (new_stmt))
533 reaching_vuse = gimple_vdef (new_stmt);
534 }
535
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
538 if (reaching_vuse
539 && reaching_vuse == gimple_vuse (stmt))
540 {
541 tree vdef = gimple_vdef (stmt);
542 if (vdef
543 && TREE_CODE (vdef) == SSA_NAME)
544 {
545 unlink_stmt_vdef (stmt);
546 release_ssa_name (vdef);
547 }
548 }
549
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p, stmts, false);
552}
553
cbdd87d4
RG
554/* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
cbdd87d4
RG
563
564void
565gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
566{
567 tree lhs;
355fe088 568 gimple *stmt, *new_stmt;
cbdd87d4 569 gimple_stmt_iterator i;
355a7673 570 gimple_seq stmts = NULL;
cbdd87d4
RG
571
572 stmt = gsi_stmt (*si_p);
573
574 gcc_assert (is_gimple_call (stmt));
575
45852dcc 576 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 577
e256dfce 578 lhs = gimple_call_lhs (stmt);
cbdd87d4 579 if (lhs == NULL_TREE)
6e572326
RG
580 {
581 gimplify_and_add (expr, &stmts);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts))
585 {
9fdc58de 586 pop_gimplify_context (NULL);
6e572326
RG
587 if (gimple_in_ssa_p (cfun))
588 {
589 unlink_stmt_vdef (stmt);
590 release_defs (stmt);
591 }
f6b4dc28 592 gsi_replace (si_p, gimple_build_nop (), false);
6e572326
RG
593 return;
594 }
595 }
cbdd87d4 596 else
e256dfce 597 {
381cdae4 598 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
e256dfce
RG
599 new_stmt = gimple_build_assign (lhs, tmp);
600 i = gsi_last (stmts);
601 gsi_insert_after_without_update (&i, new_stmt,
602 GSI_CONTINUE_LINKING);
603 }
cbdd87d4
RG
604
605 pop_gimplify_context (NULL);
606
fef5a0d9
RB
607 gsi_replace_with_seq_vops (si_p, stmts);
608}
cbdd87d4 609
fef5a0d9
RB
610
611/* Replace the call at *GSI with the gimple value VAL. */
612
e3174bdf 613void
fef5a0d9
RB
614replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
615{
355fe088 616 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9 617 tree lhs = gimple_call_lhs (stmt);
355fe088 618 gimple *repl;
fef5a0d9 619 if (lhs)
e256dfce 620 {
fef5a0d9
RB
621 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 val = fold_convert (TREE_TYPE (lhs), val);
623 repl = gimple_build_assign (lhs, val);
624 }
625 else
626 repl = gimple_build_nop ();
627 tree vdef = gimple_vdef (stmt);
628 if (vdef && TREE_CODE (vdef) == SSA_NAME)
629 {
630 unlink_stmt_vdef (stmt);
631 release_ssa_name (vdef);
632 }
f6b4dc28 633 gsi_replace (gsi, repl, false);
fef5a0d9
RB
634}
635
636/* Replace the call at *GSI with the new call REPL and fold that
637 again. */
638
639static void
355fe088 640replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
fef5a0d9 641{
355fe088 642 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
643 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644 gimple_set_location (repl, gimple_location (stmt));
779724a5 645 gimple_move_vops (repl, stmt);
f6b4dc28 646 gsi_replace (gsi, repl, false);
fef5a0d9
RB
647 fold_stmt (gsi);
648}
649
650/* Return true if VAR is a VAR_DECL or a component thereof. */
651
652static bool
653var_decl_component_p (tree var)
654{
655 tree inner = var;
656 while (handled_component_p (inner))
657 inner = TREE_OPERAND (inner, 0);
47cac108
RB
658 return (DECL_P (inner)
659 || (TREE_CODE (inner) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
fef5a0d9
RB
661}
662
c89af696
AH
663/* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
6512c0f1
MS
665
666static bool
667size_must_be_zero_p (tree size)
668{
669 if (integer_zerop (size))
670 return true;
671
3f27391f 672 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
6512c0f1
MS
673 return false;
674
6512c0f1
MS
675 tree type = TREE_TYPE (size);
676 int prec = TYPE_PRECISION (type);
677
6512c0f1
MS
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
5d462877 681 value_range valid_range (build_int_cst (type, 0),
028d81b1
AH
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
c89af696
AH
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
6512c0f1
MS
687}
688
cc8bea0a
MS
689/* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
fef5a0d9
RB
695
696static bool
697gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
0d67a510 698 tree dest, tree src, enum built_in_function code)
fef5a0d9 699{
355fe088 700 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
fef5a0d9
RB
703 location_t loc = gimple_location (stmt);
704
6512c0f1
MS
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len))
fef5a0d9 708 {
355fe088 709 gimple *repl;
fef5a0d9
RB
710 if (gimple_call_lhs (stmt))
711 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 716 {
fef5a0d9
RB
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
f6b4dc28 720 gsi_replace (gsi, repl, false);
fef5a0d9
RB
721 return true;
722 }
723
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src, dest, 0))
727 {
cc8bea0a
MS
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
e9b9fa4c 730 32667). */
fef5a0d9
RB
731 unlink_stmt_vdef (stmt);
732 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
733 release_ssa_name (gimple_vdef (stmt));
734 if (!lhs)
735 {
f6b4dc28 736 gsi_replace (gsi, gimple_build_nop (), false);
fef5a0d9
RB
737 return true;
738 }
739 goto done;
740 }
741 else
742 {
b541b871
EB
743 /* We cannot (easily) change the type of the copy if it is a storage
744 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
745 modify the storage order of objects (see storage_order_barrier_p). */
746 tree srctype
747 = POINTER_TYPE_P (TREE_TYPE (src))
748 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
749 tree desttype
750 = POINTER_TYPE_P (TREE_TYPE (dest))
751 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
752 tree destvar, srcvar, srcoff;
fef5a0d9 753 unsigned int src_align, dest_align;
d01b568a 754 unsigned HOST_WIDE_INT tmp_len;
b541b871 755 const char *tmp_str;
fef5a0d9
RB
756
757 /* Build accesses at offset zero with a ref-all character type. */
b541b871
EB
758 tree off0
759 = build_int_cst (build_pointer_type_for_mode (char_type_node,
760 ptr_mode, true), 0);
fef5a0d9
RB
761
762 /* If we can perform the copy efficiently with first doing all loads
763 and then all stores inline it that way. Currently efficiently
764 means that we can load all the memory into a single integer
765 register which is what MOVE_MAX gives us. */
766 src_align = get_pointer_alignment (src);
767 dest_align = get_pointer_alignment (dest);
768 if (tree_fits_uhwi_p (len)
769 && compare_tree_int (len, MOVE_MAX) <= 0
14b7950f
MS
770 /* FIXME: Don't transform copies from strings with known length.
771 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
772 from being handled, and the case was XFAILed for that reason.
773 Now that it is handled and the XFAIL removed, as soon as other
774 strlenopt tests that rely on it for passing are adjusted, this
775 hack can be removed. */
776 && !c_strlen (src, 1)
866626ef 777 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
b541b871
EB
778 && memchr (tmp_str, 0, tmp_len) == NULL)
779 && !(srctype
780 && AGGREGATE_TYPE_P (srctype)
781 && TYPE_REVERSE_STORAGE_ORDER (srctype))
782 && !(desttype
783 && AGGREGATE_TYPE_P (desttype)
784 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
785 {
786 unsigned ilen = tree_to_uhwi (len);
146ec50f 787 if (pow2p_hwi (ilen))
fef5a0d9 788 {
213694e5
MS
789 /* Detect out-of-bounds accesses without issuing warnings.
790 Avoid folding out-of-bounds copies but to avoid false
791 positives for unreachable code defer warning until after
792 DCE has worked its magic.
793 -Wrestrict is still diagnosed. */
794 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
795 dest, src, len, len,
796 false, false))
797 if (warning != OPT_Wrestrict)
798 return false;
cc8bea0a 799
64ab8765 800 scalar_int_mode mode;
fef5a0d9
RB
801 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
802 if (type
64ab8765
RS
803 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
804 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
fef5a0d9
RB
805 /* If the destination pointer is not aligned we must be able
806 to emit an unaligned store. */
64ab8765 807 && (dest_align >= GET_MODE_ALIGNMENT (mode)
e0bd6c9f 808 || !targetm.slow_unaligned_access (mode, dest_align)
64ab8765 809 || (optab_handler (movmisalign_optab, mode)
f869c12f 810 != CODE_FOR_nothing)))
fef5a0d9
RB
811 {
812 tree srctype = type;
813 tree desttype = type;
64ab8765 814 if (src_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
815 srctype = build_aligned_type (type, src_align);
816 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
817 tree tem = fold_const_aggregate_ref (srcmem);
818 if (tem)
819 srcmem = tem;
64ab8765 820 else if (src_align < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 821 && targetm.slow_unaligned_access (mode, src_align)
64ab8765 822 && (optab_handler (movmisalign_optab, mode)
f869c12f 823 == CODE_FOR_nothing))
fef5a0d9
RB
824 srcmem = NULL_TREE;
825 if (srcmem)
826 {
355fe088 827 gimple *new_stmt;
fef5a0d9
RB
828 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
829 {
830 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
a15ebbcd
ML
831 srcmem
832 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
833 new_stmt);
fef5a0d9
RB
834 gimple_assign_set_lhs (new_stmt, srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 }
64ab8765 838 if (dest_align < GET_MODE_ALIGNMENT (mode))
fef5a0d9
RB
839 desttype = build_aligned_type (type, dest_align);
840 new_stmt
841 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
842 dest, off0),
843 srcmem);
779724a5 844 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
845 if (!lhs)
846 {
f6b4dc28 847 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
848 return true;
849 }
850 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
851 goto done;
852 }
853 }
854 }
855 }
856
0d67a510 857 if (code == BUILT_IN_MEMMOVE)
fef5a0d9
RB
858 {
859 /* Both DEST and SRC must be pointer types.
860 ??? This is what old code did. Is the testing for pointer types
861 really mandatory?
862
863 If either SRC is readonly or length is 1, we can use memcpy. */
864 if (!dest_align || !src_align)
865 return false;
866 if (readonly_data_expr (src)
867 || (tree_fits_uhwi_p (len)
868 && (MIN (src_align, dest_align) / BITS_PER_UNIT
869 >= tree_to_uhwi (len))))
870 {
871 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
872 if (!fn)
873 return false;
874 gimple_call_set_fndecl (stmt, fn);
875 gimple_call_set_arg (stmt, 0, dest);
876 gimple_call_set_arg (stmt, 1, src);
877 fold_stmt (gsi);
878 return true;
879 }
880
881 /* If *src and *dest can't overlap, optimize into memcpy as well. */
882 if (TREE_CODE (src) == ADDR_EXPR
883 && TREE_CODE (dest) == ADDR_EXPR)
884 {
885 tree src_base, dest_base, fn;
a90c8804
RS
886 poly_int64 src_offset = 0, dest_offset = 0;
887 poly_uint64 maxsize;
fef5a0d9
RB
888
889 srcvar = TREE_OPERAND (src, 0);
4fda19ef
JJ
890 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
891 if (src_base == NULL)
892 src_base = srcvar;
fef5a0d9 893 destvar = TREE_OPERAND (dest, 0);
4fda19ef
JJ
894 dest_base = get_addr_base_and_unit_offset (destvar,
895 &dest_offset);
896 if (dest_base == NULL)
897 dest_base = destvar;
a90c8804 898 if (!poly_int_tree_p (len, &maxsize))
fef5a0d9 899 maxsize = -1;
fef5a0d9
RB
900 if (SSA_VAR_P (src_base)
901 && SSA_VAR_P (dest_base))
902 {
903 if (operand_equal_p (src_base, dest_base, 0)
a90c8804
RS
904 && ranges_maybe_overlap_p (src_offset, maxsize,
905 dest_offset, maxsize))
fef5a0d9
RB
906 return false;
907 }
908 else if (TREE_CODE (src_base) == MEM_REF
909 && TREE_CODE (dest_base) == MEM_REF)
910 {
911 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
912 TREE_OPERAND (dest_base, 0), 0))
913 return false;
a90c8804
RS
914 poly_offset_int full_src_offset
915 = mem_ref_offset (src_base) + src_offset;
916 poly_offset_int full_dest_offset
917 = mem_ref_offset (dest_base) + dest_offset;
918 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
919 full_dest_offset, maxsize))
fef5a0d9
RB
920 return false;
921 }
922 else
923 return false;
924
925 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
926 if (!fn)
927 return false;
928 gimple_call_set_fndecl (stmt, fn);
929 gimple_call_set_arg (stmt, 0, dest);
930 gimple_call_set_arg (stmt, 1, src);
931 fold_stmt (gsi);
932 return true;
933 }
934
935 /* If the destination and source do not alias optimize into
936 memcpy as well. */
937 if ((is_gimple_min_invariant (dest)
938 || TREE_CODE (dest) == SSA_NAME)
939 && (is_gimple_min_invariant (src)
940 || TREE_CODE (src) == SSA_NAME))
941 {
942 ao_ref destr, srcr;
943 ao_ref_init_from_ptr_and_size (&destr, dest, len);
944 ao_ref_init_from_ptr_and_size (&srcr, src, len);
945 if (!refs_may_alias_p_1 (&destr, &srcr, false))
946 {
947 tree fn;
948 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
949 if (!fn)
950 return false;
951 gimple_call_set_fndecl (stmt, fn);
952 gimple_call_set_arg (stmt, 0, dest);
953 gimple_call_set_arg (stmt, 1, src);
954 fold_stmt (gsi);
955 return true;
956 }
957 }
958
959 return false;
960 }
961
962 if (!tree_fits_shwi_p (len))
963 return false;
b541b871
EB
964 if (!srctype
965 || (AGGREGATE_TYPE_P (srctype)
966 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
967 return false;
968 if (!desttype
969 || (AGGREGATE_TYPE_P (desttype)
970 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
fef5a0d9
RB
971 return false;
972 /* In the following try to find a type that is most natural to be
973 used for the memcpy source and destination and that allows
974 the most optimization when memcpy is turned into a plain assignment
975 using that type. In theory we could always use a char[len] type
976 but that only gains us that the destination and source possibly
977 no longer will have their address taken. */
fef5a0d9
RB
978 if (TREE_CODE (srctype) == ARRAY_TYPE
979 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
42f74245 980 srctype = TREE_TYPE (srctype);
fef5a0d9
RB
981 if (TREE_CODE (desttype) == ARRAY_TYPE
982 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
42f74245 983 desttype = TREE_TYPE (desttype);
fef5a0d9
RB
984 if (TREE_ADDRESSABLE (srctype)
985 || TREE_ADDRESSABLE (desttype))
986 return false;
987
988 /* Make sure we are not copying using a floating-point mode or
989 a type whose size possibly does not match its precision. */
990 if (FLOAT_MODE_P (TYPE_MODE (desttype))
991 || TREE_CODE (desttype) == BOOLEAN_TYPE
992 || TREE_CODE (desttype) == ENUMERAL_TYPE)
993 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
994 if (FLOAT_MODE_P (TYPE_MODE (srctype))
995 || TREE_CODE (srctype) == BOOLEAN_TYPE
996 || TREE_CODE (srctype) == ENUMERAL_TYPE)
997 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
998 if (!srctype)
999 srctype = desttype;
1000 if (!desttype)
1001 desttype = srctype;
1002 if (!srctype)
1003 return false;
1004
1005 src_align = get_pointer_alignment (src);
1006 dest_align = get_pointer_alignment (dest);
fef5a0d9 1007
5105b576
RB
1008 /* Choose between src and destination type for the access based
1009 on alignment, whether the access constitutes a register access
1010 and whether it may actually expose a declaration for SSA rewrite
e362a897
EB
1011 or SRA decomposition. Also try to expose a string constant, we
1012 might be able to concatenate several of them later into a single
1013 string store. */
42f74245 1014 destvar = NULL_TREE;
5105b576 1015 srcvar = NULL_TREE;
42f74245
RB
1016 if (TREE_CODE (dest) == ADDR_EXPR
1017 && var_decl_component_p (TREE_OPERAND (dest, 0))
5105b576
RB
1018 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1019 && dest_align >= TYPE_ALIGN (desttype)
1020 && (is_gimple_reg_type (desttype)
1021 || src_align >= TYPE_ALIGN (desttype)))
42f74245 1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
5105b576
RB
1023 else if (TREE_CODE (src) == ADDR_EXPR
1024 && var_decl_component_p (TREE_OPERAND (src, 0))
1025 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1026 && src_align >= TYPE_ALIGN (srctype)
1027 && (is_gimple_reg_type (srctype)
1028 || dest_align >= TYPE_ALIGN (srctype)))
1029 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
e362a897
EB
1030 /* FIXME: Don't transform copies from strings with known original length.
1031 As soon as strlenopt tests that rely on it for passing are adjusted,
1032 this hack can be removed. */
1033 else if (gimple_call_alloca_for_var_p (stmt)
1034 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1035 && integer_zerop (srcoff)
1036 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1037 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1038 srctype = TREE_TYPE (srcvar);
1039 else
fef5a0d9
RB
1040 return false;
1041
5105b576
RB
1042 /* Now that we chose an access type express the other side in
1043 terms of it if the target allows that with respect to alignment
1044 constraints. */
fef5a0d9
RB
1045 if (srcvar == NULL_TREE)
1046 {
fef5a0d9
RB
1047 if (src_align >= TYPE_ALIGN (desttype))
1048 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1049 else
1050 {
1051 if (STRICT_ALIGNMENT)
1052 return false;
1053 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1054 src_align);
1055 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1056 }
1057 }
1058 else if (destvar == NULL_TREE)
1059 {
fef5a0d9
RB
1060 if (dest_align >= TYPE_ALIGN (srctype))
1061 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1062 else
1063 {
1064 if (STRICT_ALIGNMENT)
1065 return false;
1066 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1067 dest_align);
1068 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1069 }
1070 }
1071
213694e5
MS
1072 /* Same as above, detect out-of-bounds accesses without issuing
1073 warnings. Avoid folding out-of-bounds copies but to avoid
1074 false positives for unreachable code defer warning until
1075 after DCE has worked its magic.
1076 -Wrestrict is still diagnosed. */
1077 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1078 dest, src, len, len,
1079 false, false))
1080 if (warning != OPT_Wrestrict)
1081 return false;
cc8bea0a 1082
355fe088 1083 gimple *new_stmt;
fef5a0d9
RB
1084 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1085 {
921b13d0
RB
1086 tree tem = fold_const_aggregate_ref (srcvar);
1087 if (tem)
1088 srcvar = tem;
1089 if (! is_gimple_min_invariant (srcvar))
1090 {
1091 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
a15ebbcd
ML
1092 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1093 new_stmt);
921b13d0
RB
1094 gimple_assign_set_lhs (new_stmt, srcvar);
1095 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1096 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1097 }
d7257171
RB
1098 new_stmt = gimple_build_assign (destvar, srcvar);
1099 goto set_vop_and_replace;
fef5a0d9 1100 }
d7257171 1101
e362a897
EB
1102 /* We get an aggregate copy. If the source is a STRING_CST, then
1103 directly use its type to perform the copy. */
1104 if (TREE_CODE (srcvar) == STRING_CST)
1105 desttype = srctype;
1106
1107 /* Or else, use an unsigned char[] type to perform the copy in order
1108 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1109 types or float modes behavior on copying. */
1110 else
1111 {
1112 desttype = build_array_type_nelts (unsigned_char_type_node,
1113 tree_to_uhwi (len));
1114 srctype = desttype;
1115 if (src_align > TYPE_ALIGN (srctype))
1116 srctype = build_aligned_type (srctype, src_align);
1117 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1118 }
1119
d7257171
RB
1120 if (dest_align > TYPE_ALIGN (desttype))
1121 desttype = build_aligned_type (desttype, dest_align);
e362a897
EB
1122 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1123 new_stmt = gimple_build_assign (destvar, srcvar);
1124
d7257171 1125set_vop_and_replace:
779724a5 1126 gimple_move_vops (new_stmt, stmt);
fef5a0d9
RB
1127 if (!lhs)
1128 {
f6b4dc28 1129 gsi_replace (gsi, new_stmt, false);
fef5a0d9
RB
1130 return true;
1131 }
1132 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1133 }
1134
1135done:
74e3c262 1136 gimple_seq stmts = NULL;
0d67a510 1137 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
fef5a0d9 1138 len = NULL_TREE;
0d67a510 1139 else if (code == BUILT_IN_MEMPCPY)
74e3c262
RB
1140 {
1141 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1142 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1143 TREE_TYPE (dest), dest, len);
1144 }
0d67a510
ML
1145 else
1146 gcc_unreachable ();
fef5a0d9 1147
74e3c262 1148 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 1149 gimple *repl = gimple_build_assign (lhs, dest);
f6b4dc28 1150 gsi_replace (gsi, repl, false);
fef5a0d9
RB
1151 return true;
1152}
1153
b3d8d88e
MS
1154/* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1155 to built-in memcmp (a, b, len). */
1156
1157static bool
1158gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1159{
1160 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1161
1162 if (!fn)
1163 return false;
1164
1165 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1166
1167 gimple *stmt = gsi_stmt (*gsi);
1168 tree a = gimple_call_arg (stmt, 0);
1169 tree b = gimple_call_arg (stmt, 1);
1170 tree len = gimple_call_arg (stmt, 2);
1171
1172 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1173 replace_call_with_call_and_fold (gsi, repl);
1174
1175 return true;
1176}
1177
1178/* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1179 to built-in memmove (dest, src, len). */
1180
1181static bool
1182gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1183{
1184 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1185
1186 if (!fn)
1187 return false;
1188
1189 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1190 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1191 len) into memmove (dest, src, len). */
1192
1193 gimple *stmt = gsi_stmt (*gsi);
1194 tree src = gimple_call_arg (stmt, 0);
1195 tree dest = gimple_call_arg (stmt, 1);
1196 tree len = gimple_call_arg (stmt, 2);
1197
1198 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1199 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1200 replace_call_with_call_and_fold (gsi, repl);
1201
1202 return true;
1203}
1204
1205/* Transform a call to built-in bzero (dest, len) at *GSI into one
1206 to built-in memset (dest, 0, len). */
1207
1208static bool
1209gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1210{
1211 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1212
1213 if (!fn)
1214 return false;
1215
1216 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1217
1218 gimple *stmt = gsi_stmt (*gsi);
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree len = gimple_call_arg (stmt, 1);
1221
1222 gimple_seq seq = NULL;
1223 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1224 gimple_seq_add_stmt_without_update (&seq, repl);
1225 gsi_replace_with_seq_vops (gsi, seq);
1226 fold_stmt (gsi);
1227
1228 return true;
1229}
1230
fef5a0d9
RB
1231/* Fold function call to builtin memset or bzero at *GSI setting the
1232 memory of size LEN to VAL. Return whether a simplification was made. */
1233
1234static bool
1235gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1236{
355fe088 1237 gimple *stmt = gsi_stmt (*gsi);
fef5a0d9
RB
1238 tree etype;
1239 unsigned HOST_WIDE_INT length, cval;
1240
1241 /* If the LEN parameter is zero, return DEST. */
1242 if (integer_zerop (len))
1243 {
1244 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1245 return true;
1246 }
1247
1248 if (! tree_fits_uhwi_p (len))
1249 return false;
1250
1251 if (TREE_CODE (c) != INTEGER_CST)
1252 return false;
1253
1254 tree dest = gimple_call_arg (stmt, 0);
1255 tree var = dest;
1256 if (TREE_CODE (var) != ADDR_EXPR)
1257 return false;
1258
1259 var = TREE_OPERAND (var, 0);
1260 if (TREE_THIS_VOLATILE (var))
1261 return false;
1262
1263 etype = TREE_TYPE (var);
1264 if (TREE_CODE (etype) == ARRAY_TYPE)
1265 etype = TREE_TYPE (etype);
1266
1267 if (!INTEGRAL_TYPE_P (etype)
1268 && !POINTER_TYPE_P (etype))
1269 return NULL_TREE;
1270
1271 if (! var_decl_component_p (var))
1272 return NULL_TREE;
1273
1274 length = tree_to_uhwi (len);
7a504f33 1275 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1ba9acb1
RB
1276 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1277 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
fef5a0d9
RB
1278 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1279 return NULL_TREE;
1280
1281 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1282 return NULL_TREE;
1283
1ba9acb1
RB
1284 if (!type_has_mode_precision_p (etype))
1285 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1286 TYPE_UNSIGNED (etype));
1287
fef5a0d9
RB
1288 if (integer_zerop (c))
1289 cval = 0;
1290 else
1291 {
1292 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1293 return NULL_TREE;
1294
1295 cval = TREE_INT_CST_LOW (c);
1296 cval &= 0xff;
1297 cval |= cval << 8;
1298 cval |= cval << 16;
1299 cval |= (cval << 31) << 1;
1300 }
1301
1302 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
355fe088 1303 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
779724a5 1304 gimple_move_vops (store, stmt);
fef5a0d9
RB
1305 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1306 if (gimple_call_lhs (stmt))
1307 {
355fe088 1308 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
f6b4dc28 1309 gsi_replace (gsi, asgn, false);
fef5a0d9
RB
1310 }
1311 else
1312 {
1313 gimple_stmt_iterator gsi2 = *gsi;
1314 gsi_prev (gsi);
1315 gsi_remove (&gsi2, true);
1316 }
1317
1318 return true;
1319}
1320
fb471a13 1321/* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
fef5a0d9
RB
1322
1323static bool
03c4a945
MS
1324get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1325 c_strlen_data *pdata, unsigned eltsize)
fef5a0d9 1326{
fb471a13 1327 gcc_assert (TREE_CODE (arg) != SSA_NAME);
e7868dc6 1328
fb471a13
MS
1329 /* The length computed by this invocation of the function. */
1330 tree val = NULL_TREE;
1331
eef2da67
MS
1332 /* True if VAL is an optimistic (tight) bound determined from
1333 the size of the character array in which the string may be
1334 stored. In that case, the computed VAL is used to set
1335 PDATA->MAXBOUND. */
1336 bool tight_bound = false;
1337
fb471a13
MS
1338 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1339 if (TREE_CODE (arg) == ADDR_EXPR
1340 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
fef5a0d9 1341 {
fb471a13
MS
1342 tree op = TREE_OPERAND (arg, 0);
1343 if (integer_zerop (TREE_OPERAND (op, 1)))
fef5a0d9 1344 {
fb471a13
MS
1345 tree aop0 = TREE_OPERAND (op, 0);
1346 if (TREE_CODE (aop0) == INDIRECT_REF
1347 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
03c4a945
MS
1348 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1349 pdata, eltsize);
fef5a0d9 1350 }
598f7235 1351 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
84de9426 1352 && rkind == SRK_LENRANGE)
fef5a0d9 1353 {
fb471a13
MS
1354 /* Fail if an array is the last member of a struct object
1355 since it could be treated as a (fake) flexible array
1356 member. */
1357 tree idx = TREE_OPERAND (op, 1);
1358
1359 arg = TREE_OPERAND (op, 0);
1360 tree optype = TREE_TYPE (arg);
1361 if (tree dom = TYPE_DOMAIN (optype))
1362 if (tree bound = TYPE_MAX_VALUE (dom))
1363 if (TREE_CODE (bound) == INTEGER_CST
1364 && TREE_CODE (idx) == INTEGER_CST
1365 && tree_int_cst_lt (bound, idx))
1366 return false;
fef5a0d9 1367 }
fb471a13 1368 }
7d583f42 1369
598f7235 1370 if (rkind == SRK_INT_VALUE)
fb471a13
MS
1371 {
1372 /* We are computing the maximum value (not string length). */
1373 val = arg;
1374 if (TREE_CODE (val) != INTEGER_CST
1375 || tree_int_cst_sgn (val) < 0)
1376 return false;
1377 }
1378 else
1379 {
1380 c_strlen_data lendata = { };
1381 val = c_strlen (arg, 1, &lendata, eltsize);
1382
fb471a13
MS
1383 if (!val && lendata.decl)
1384 {
03c4a945
MS
1385 /* ARG refers to an unterminated const character array.
1386 DATA.DECL with size DATA.LEN. */
1387 val = lendata.minlen;
730832cd 1388 pdata->decl = lendata.decl;
7d583f42 1389 }
fb471a13
MS
1390 }
1391
a7160771
MS
1392 /* Set if VAL represents the maximum length based on array size (set
1393 when exact length cannot be determined). */
1394 bool maxbound = false;
1395
84de9426 1396 if (!val && rkind == SRK_LENRANGE)
fb471a13
MS
1397 {
1398 if (TREE_CODE (arg) == ADDR_EXPR)
730832cd 1399 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
03c4a945 1400 pdata, eltsize);
88d0c3f0 1401
fb471a13 1402 if (TREE_CODE (arg) == ARRAY_REF)
88d0c3f0 1403 {
fb471a13 1404 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
88d0c3f0 1405
fb471a13
MS
1406 /* Determine the "innermost" array type. */
1407 while (TREE_CODE (optype) == ARRAY_TYPE
1408 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1409 optype = TREE_TYPE (optype);
c42d0aa0 1410
fb471a13
MS
1411 /* Avoid arrays of pointers. */
1412 tree eltype = TREE_TYPE (optype);
1413 if (TREE_CODE (optype) != ARRAY_TYPE
1414 || !INTEGRAL_TYPE_P (eltype))
1415 return false;
c42d0aa0 1416
fb471a13
MS
1417 /* Fail when the array bound is unknown or zero. */
1418 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1419 if (!val
1420 || TREE_CODE (val) != INTEGER_CST
1421 || integer_zerop (val))
fb471a13 1422 return false;
1bfd6a00 1423
fb471a13
MS
1424 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1425 integer_one_node);
c42d0aa0 1426
fb471a13
MS
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
730832cd 1429 pdata->minlen = ssize_int (0);
204a7ecb 1430
eef2da67 1431 tight_bound = true;
fb471a13
MS
1432 }
1433 else if (TREE_CODE (arg) == COMPONENT_REF
1434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1435 == ARRAY_TYPE))
1436 {
1437 /* Use the type of the member array to determine the upper
1438 bound on the length of the array. This may be overly
1439 optimistic if the array itself isn't NUL-terminated and
1440 the caller relies on the subsequent member to contain
1441 the NUL but that would only be considered valid if
03c4a945 1442 the array were the last member of a struct. */
fb471a13
MS
1443
1444 tree fld = TREE_OPERAND (arg, 1);
1445
1446 tree optype = TREE_TYPE (fld);
1447
1448 /* Determine the "innermost" array type. */
1449 while (TREE_CODE (optype) == ARRAY_TYPE
1450 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1451 optype = TREE_TYPE (optype);
1452
1453 /* Fail when the array bound is unknown or zero. */
1454 val = TYPE_SIZE_UNIT (optype);
05c13c43
MS
1455 if (!val
1456 || TREE_CODE (val) != INTEGER_CST
1457 || integer_zerop (val))
fb471a13
MS
1458 return false;
1459 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1460 integer_one_node);
1461
1462 /* Set the minimum size to zero since the string in
1463 the array could have zero length. */
730832cd 1464 pdata->minlen = ssize_int (0);
fb471a13 1465
eef2da67
MS
1466 /* The array size determined above is an optimistic bound
1467 on the length. If the array isn't nul-terminated the
1468 length computed by the library function would be greater.
1469 Even though using strlen to cross the subobject boundary
1470 is undefined, avoid drawing conclusions from the member
1471 type about the length here. */
1472 tight_bound = true;
1473 }
e7868dc6
MS
1474 else if (TREE_CODE (arg) == MEM_REF
1475 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1477 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1478 {
1479 /* Handle a MEM_REF into a DECL accessing an array of integers,
1480 being conservative about references to extern structures with
1481 flexible array members that can be initialized to arbitrary
1482 numbers of elements as an extension (static structs are okay).
1483 FIXME: Make this less conservative -- see
1484 component_ref_size in tree.c. */
1485 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1486 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1487 && (decl_binds_to_current_def_p (ref)
1488 || !array_at_struct_end_p (arg)))
1489 {
1490 /* Fail if the offset is out of bounds. Such accesses
1491 should be diagnosed at some point. */
1492 val = DECL_SIZE_UNIT (ref);
05c13c43
MS
1493 if (!val
1494 || TREE_CODE (val) != INTEGER_CST
1495 || integer_zerop (val))
e7868dc6
MS
1496 return false;
1497
1498 poly_offset_int psiz = wi::to_offset (val);
1499 poly_offset_int poff = mem_ref_offset (arg);
1500 if (known_le (psiz, poff))
1501 return false;
1502
1503 pdata->minlen = ssize_int (0);
1504
1505 /* Subtract the offset and one for the terminating nul. */
1506 psiz -= poff;
1507 psiz -= 1;
1508 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1509 /* Since VAL reflects the size of a declared object
1510 rather the type of the access it is not a tight bound. */
1511 }
1512 }
1513 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
fb471a13 1514 {
eef2da67
MS
1515 /* Avoid handling pointers to arrays. GCC might misuse
1516 a pointer to an array of one bound to point to an array
1517 object of a greater bound. */
1518 tree argtype = TREE_TYPE (arg);
1519 if (TREE_CODE (argtype) == ARRAY_TYPE)
88d0c3f0 1520 {
eef2da67 1521 val = TYPE_SIZE_UNIT (argtype);
fb471a13
MS
1522 if (!val
1523 || TREE_CODE (val) != INTEGER_CST
1524 || integer_zerop (val))
88d0c3f0 1525 return false;
fb471a13
MS
1526 val = wide_int_to_tree (TREE_TYPE (val),
1527 wi::sub (wi::to_wide (val), 1));
1528
e495e31a
MS
1529 /* Set the minimum size to zero since the string in
1530 the array could have zero length. */
730832cd 1531 pdata->minlen = ssize_int (0);
88d0c3f0
MS
1532 }
1533 }
a7160771 1534 maxbound = true;
fb471a13 1535 }
88d0c3f0 1536
fb471a13
MS
1537 if (!val)
1538 return false;
fef5a0d9 1539
fb471a13 1540 /* Adjust the lower bound on the string length as necessary. */
730832cd 1541 if (!pdata->minlen
598f7235 1542 || (rkind != SRK_STRLEN
730832cd 1543 && TREE_CODE (pdata->minlen) == INTEGER_CST
fb471a13 1544 && TREE_CODE (val) == INTEGER_CST
730832cd
MS
1545 && tree_int_cst_lt (val, pdata->minlen)))
1546 pdata->minlen = val;
88d0c3f0 1547
a7160771 1548 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
730832cd
MS
1549 {
1550 /* Adjust the tighter (more optimistic) string length bound
1551 if necessary and proceed to adjust the more conservative
1552 bound. */
1553 if (TREE_CODE (val) == INTEGER_CST)
1554 {
a7160771
MS
1555 if (tree_int_cst_lt (pdata->maxbound, val))
1556 pdata->maxbound = val;
730832cd
MS
1557 }
1558 else
1559 pdata->maxbound = val;
1560 }
a7160771
MS
1561 else if (pdata->maxbound || maxbound)
1562 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1563 if VAL corresponds to the maximum length determined based
1564 on the type of the object. */
730832cd
MS
1565 pdata->maxbound = val;
1566
eef2da67
MS
1567 if (tight_bound)
1568 {
1569 /* VAL computed above represents an optimistically tight bound
1570 on the length of the string based on the referenced object's
1571 or subobject's type. Determine the conservative upper bound
1572 based on the enclosing object's size if possible. */
84de9426 1573 if (rkind == SRK_LENRANGE)
eef2da67
MS
1574 {
1575 poly_int64 offset;
1576 tree base = get_addr_base_and_unit_offset (arg, &offset);
1577 if (!base)
1578 {
1579 /* When the call above fails due to a non-constant offset
1580 assume the offset is zero and use the size of the whole
1581 enclosing object instead. */
1582 base = get_base_address (arg);
1583 offset = 0;
1584 }
1585 /* If the base object is a pointer no upper bound on the length
1586 can be determined. Otherwise the maximum length is equal to
1587 the size of the enclosing object minus the offset of
1588 the referenced subobject minus 1 (for the terminating nul). */
1589 tree type = TREE_TYPE (base);
1590 if (TREE_CODE (type) == POINTER_TYPE
e7868dc6
MS
1591 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1592 || !(val = DECL_SIZE_UNIT (base)))
eef2da67
MS
1593 val = build_all_ones_cst (size_type_node);
1594 else
1595 {
1596 val = DECL_SIZE_UNIT (base);
1597 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1598 size_int (offset + 1));
1599 }
1600 }
1601 else
1602 return false;
1603 }
1604
730832cd 1605 if (pdata->maxlen)
fb471a13
MS
1606 {
1607 /* Adjust the more conservative bound if possible/necessary
1608 and fail otherwise. */
598f7235 1609 if (rkind != SRK_STRLEN)
fef5a0d9 1610 {
730832cd 1611 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
fb471a13 1612 || TREE_CODE (val) != INTEGER_CST)
fef5a0d9 1613 return false;
fef5a0d9 1614
730832cd
MS
1615 if (tree_int_cst_lt (pdata->maxlen, val))
1616 pdata->maxlen = val;
fb471a13
MS
1617 return true;
1618 }
730832cd 1619 else if (simple_cst_equal (val, pdata->maxlen) != 1)
fb471a13
MS
1620 {
1621 /* Fail if the length of this ARG is different from that
1622 previously determined from another ARG. */
1623 return false;
1624 }
fef5a0d9
RB
1625 }
1626
730832cd 1627 pdata->maxlen = val;
84de9426 1628 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
fb471a13
MS
1629}
1630
5d6655eb
MS
1631/* For an ARG referencing one or more strings, try to obtain the range
1632 of their lengths, or the size of the largest array ARG referes to if
1633 the range of lengths cannot be determined, and store all in *PDATA.
1634 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1635 the maximum constant value.
1636 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1637 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1638 length or if we are unable to determine the length, return false.
fb471a13 1639 VISITED is a bitmap of visited variables.
598f7235
MS
1640 RKIND determines the kind of value or range to obtain (see
1641 strlen_range_kind).
1642 Set PDATA->DECL if ARG refers to an unterminated constant array.
1643 On input, set ELTSIZE to 1 for normal single byte character strings,
1644 and either 2 or 4 for wide characer strings (the size of wchar_t).
1645 Return true if *PDATA was successfully populated and false otherwise. */
fb471a13
MS
1646
1647static bool
03c4a945
MS
1648get_range_strlen (tree arg, bitmap *visited,
1649 strlen_range_kind rkind,
1650 c_strlen_data *pdata, unsigned eltsize)
fb471a13
MS
1651{
1652
1653 if (TREE_CODE (arg) != SSA_NAME)
03c4a945 1654 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
fb471a13 1655
fef5a0d9
RB
1656 /* If ARG is registered for SSA update we cannot look at its defining
1657 statement. */
1658 if (name_registered_for_update_p (arg))
1659 return false;
1660
1661 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1662 if (!*visited)
1663 *visited = BITMAP_ALLOC (NULL);
1664 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1665 return true;
1666
fb471a13
MS
1667 tree var = arg;
1668 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1669
fef5a0d9
RB
1670 switch (gimple_code (def_stmt))
1671 {
1672 case GIMPLE_ASSIGN:
598f7235
MS
1673 /* The RHS of the statement defining VAR must either have a
1674 constant length or come from another SSA_NAME with a constant
1675 length. */
fef5a0d9
RB
1676 if (gimple_assign_single_p (def_stmt)
1677 || gimple_assign_unary_nop_p (def_stmt))
1678 {
598f7235 1679 tree rhs = gimple_assign_rhs1 (def_stmt);
03c4a945 1680 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
fef5a0d9
RB
1681 }
1682 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1683 {
c8602fe6
JJ
1684 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1685 gimple_assign_rhs3 (def_stmt) };
1686
1687 for (unsigned int i = 0; i < 2; i++)
03c4a945 1688 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
c8602fe6 1689 {
84de9426 1690 if (rkind != SRK_LENRANGE)
c8602fe6 1691 return false;
80c2bad6
MS
1692 /* Set the upper bound to the maximum to prevent
1693 it from being adjusted in the next iteration but
1694 leave MINLEN and the more conservative MAXBOUND
1695 determined so far alone (or leave them null if
1696 they haven't been set yet). That the MINLEN is
1697 in fact zero can be determined from MAXLEN being
1698 unbounded but the discovered minimum is used for
1699 diagnostics. */
730832cd 1700 pdata->maxlen = build_all_ones_cst (size_type_node);
c8602fe6
JJ
1701 }
1702 return true;
cc8bea0a 1703 }
fef5a0d9
RB
1704 return false;
1705
1706 case GIMPLE_PHI:
598f7235
MS
1707 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1708 must have a constant length. */
c8602fe6 1709 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
fef5a0d9
RB
1710 {
1711 tree arg = gimple_phi_arg (def_stmt, i)->def;
1712
1713 /* If this PHI has itself as an argument, we cannot
1714 determine the string length of this argument. However,
1715 if we can find a constant string length for the other
1716 PHI args then we can still be sure that this is a
1717 constant string length. So be optimistic and just
1718 continue with the next argument. */
1719 if (arg == gimple_phi_result (def_stmt))
1720 continue;
1721
03c4a945 1722 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
88d0c3f0 1723 {
84de9426 1724 if (rkind != SRK_LENRANGE)
88d0c3f0 1725 return false;
80c2bad6
MS
1726 /* Set the upper bound to the maximum to prevent
1727 it from being adjusted in the next iteration but
1728 leave MINLEN and the more conservative MAXBOUND
1729 determined so far alone (or leave them null if
1730 they haven't been set yet). That the MINLEN is
1731 in fact zero can be determined from MAXLEN being
1732 unbounded but the discovered minimum is used for
1733 diagnostics. */
730832cd 1734 pdata->maxlen = build_all_ones_cst (size_type_node);
88d0c3f0 1735 }
fef5a0d9 1736 }
fef5a0d9
RB
1737 return true;
1738
1739 default:
1740 return false;
1741 }
1742}
5d6655eb 1743
97623b52
MS
1744/* Try to obtain the range of the lengths of the string(s) referenced
1745 by ARG, or the size of the largest array ARG refers to if the range
a7160771
MS
1746 of lengths cannot be determined, and store all in *PDATA which must
1747 be zero-initialized on input except PDATA->MAXBOUND may be set to
1748 a non-null tree node other than INTEGER_CST to request to have it
1749 set to the length of the longest string in a PHI. ELTSIZE is
1750 the expected size of the string element in bytes: 1 for char and
97623b52
MS
1751 some power of 2 for wide characters.
1752 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1753 for optimization. Returning false means that a nonzero PDATA->MINLEN
1754 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1755 is -1 (in that case, the actual range is indeterminate, i.e.,
1756 [0, PTRDIFF_MAX - 2]. */
88d0c3f0 1757
3f343040 1758bool
84de9426 1759get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
88d0c3f0
MS
1760{
1761 bitmap visited = NULL;
a7160771 1762 tree maxbound = pdata->maxbound;
88d0c3f0 1763
84de9426 1764 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
730832cd 1765 {
5d6655eb
MS
1766 /* On failure extend the length range to an impossible maximum
1767 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1768 members can stay unchanged regardless. */
1769 pdata->minlen = ssize_int (0);
1770 pdata->maxlen = build_all_ones_cst (size_type_node);
730832cd 1771 }
5d6655eb
MS
1772 else if (!pdata->minlen)
1773 pdata->minlen = ssize_int (0);
1774
a7160771
MS
1775 /* If it's unchanged from it initial non-null value, set the conservative
1776 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1777 if (maxbound && pdata->maxbound == maxbound)
1778 pdata->maxbound = build_all_ones_cst (size_type_node);
88d0c3f0
MS
1779
1780 if (visited)
1781 BITMAP_FREE (visited);
3f343040 1782
03c4a945 1783 return !integer_all_onesp (pdata->maxlen);
88d0c3f0
MS
1784}
1785
5d6655eb
MS
1786/* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1787 For ARG of pointer types, NONSTR indicates if the caller is prepared
1788 to handle unterminated strings. For integer ARG and when RKIND ==
1789 SRK_INT_VALUE, NONSTR must be null.
e08341bb 1790
5d6655eb
MS
1791 If an unterminated array is discovered and our caller handles
1792 unterminated arrays, then bubble up the offending DECL and
e08341bb
MS
1793 return the maximum size. Otherwise return NULL. */
1794
598f7235
MS
1795static tree
1796get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
dcb7fae2 1797{
598f7235
MS
1798 /* A non-null NONSTR is meaningless when determining the maximum
1799 value of an integer ARG. */
1800 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1801 /* ARG must have an integral type when RKIND says so. */
1802 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1803
dcb7fae2 1804 bitmap visited = NULL;
3f343040 1805
5d6655eb
MS
1806 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1807 is unbounded. */
730832cd 1808 c_strlen_data lendata = { };
03c4a945 1809 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
730832cd 1810 lendata.maxlen = NULL_TREE;
5d6655eb
MS
1811 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1812 lendata.maxlen = NULL_TREE;
1813
dcb7fae2
RB
1814 if (visited)
1815 BITMAP_FREE (visited);
1816
e08341bb
MS
1817 if (nonstr)
1818 {
1819 /* For callers prepared to handle unterminated arrays set
1820 *NONSTR to point to the declaration of the array and return
1821 the maximum length/size. */
730832cd
MS
1822 *nonstr = lendata.decl;
1823 return lendata.maxlen;
e08341bb
MS
1824 }
1825
1826 /* Fail if the constant array isn't nul-terminated. */
730832cd 1827 return lendata.decl ? NULL_TREE : lendata.maxlen;
dcb7fae2
RB
1828}
1829
fef5a0d9
RB
1830
1831/* Fold function call to builtin strcpy with arguments DEST and SRC.
1832 If LEN is not NULL, it represents the length of the string to be
1833 copied. Return NULL_TREE if no simplification can be made. */
1834
1835static bool
1836gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1837 tree dest, tree src)
fef5a0d9 1838{
cc8bea0a
MS
1839 gimple *stmt = gsi_stmt (*gsi);
1840 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1841 tree fn;
1842
1843 /* If SRC and DEST are the same (and not volatile), return DEST. */
1844 if (operand_equal_p (src, dest, 0))
1845 {
8cd95cec
MS
1846 /* Issue -Wrestrict unless the pointers are null (those do
1847 not point to objects and so do not indicate an overlap;
1848 such calls could be the result of sanitization and jump
1849 threading). */
1850 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
1851 {
1852 tree func = gimple_call_fndecl (stmt);
cc8bea0a 1853
e9b9fa4c
MS
1854 warning_at (loc, OPT_Wrestrict,
1855 "%qD source argument is the same as destination",
1856 func);
1857 }
cc8bea0a 1858
fef5a0d9
RB
1859 replace_call_with_value (gsi, dest);
1860 return true;
1861 }
1862
1863 if (optimize_function_for_size_p (cfun))
1864 return false;
1865
1866 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1867 if (!fn)
1868 return false;
1869
e08341bb
MS
1870 /* Set to non-null if ARG refers to an unterminated array. */
1871 tree nonstr = NULL;
598f7235 1872 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
e08341bb
MS
1873
1874 if (nonstr)
1875 {
1876 /* Avoid folding calls with unterminated arrays. */
1877 if (!gimple_no_warning_p (stmt))
d14c547a 1878 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
e08341bb
MS
1879 gimple_set_no_warning (stmt, true);
1880 return false;
1881 }
1882
fef5a0d9 1883 if (!len)
dcb7fae2 1884 return false;
fef5a0d9
RB
1885
1886 len = fold_convert_loc (loc, size_type_node, len);
1887 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1888 len = force_gimple_operand_gsi (gsi, len, true,
1889 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1890 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1893}
1894
1895/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1896 If SLEN is not NULL, it represents the length of the source string.
1897 Return NULL_TREE if no simplification can be made. */
1898
1899static bool
dcb7fae2
RB
1900gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1901 tree dest, tree src, tree len)
fef5a0d9 1902{
025d57f0
MS
1903 gimple *stmt = gsi_stmt (*gsi);
1904 location_t loc = gimple_location (stmt);
6a33d0ff 1905 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
fef5a0d9
RB
1906
1907 /* If the LEN parameter is zero, return DEST. */
1908 if (integer_zerop (len))
1909 {
53b28abf 1910 /* Avoid warning if the destination refers to an array/pointer
6a33d0ff
MS
1911 decorate with attribute nonstring. */
1912 if (!nonstring)
1913 {
1914 tree fndecl = gimple_call_fndecl (stmt);
6a33d0ff
MS
1915
1916 /* Warn about the lack of nul termination: the result is not
1917 a (nul-terminated) string. */
598f7235 1918 tree slen = get_maxval_strlen (src, SRK_STRLEN);
6a33d0ff
MS
1919 if (slen && !integer_zerop (slen))
1920 warning_at (loc, OPT_Wstringop_truncation,
1921 "%G%qD destination unchanged after copying no bytes "
1922 "from a string of length %E",
8a45b051 1923 stmt, fndecl, slen);
6a33d0ff
MS
1924 else
1925 warning_at (loc, OPT_Wstringop_truncation,
1926 "%G%qD destination unchanged after copying no bytes",
8a45b051 1927 stmt, fndecl);
6a33d0ff 1928 }
025d57f0 1929
fef5a0d9
RB
1930 replace_call_with_value (gsi, dest);
1931 return true;
1932 }
1933
1934 /* We can't compare slen with len as constants below if len is not a
1935 constant. */
dcb7fae2 1936 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1937 return false;
1938
fef5a0d9 1939 /* Now, we must be passed a constant src ptr parameter. */
598f7235 1940 tree slen = get_maxval_strlen (src, SRK_STRLEN);
dcb7fae2 1941 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1942 return false;
1943
025d57f0
MS
1944 /* The size of the source string including the terminating nul. */
1945 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
fef5a0d9
RB
1946
1947 /* We do not support simplification of this case, though we do
1948 support it when expanding trees into RTL. */
1949 /* FIXME: generate a call to __builtin_memset. */
025d57f0 1950 if (tree_int_cst_lt (ssize, len))
fef5a0d9
RB
1951 return false;
1952
5d0d5d68
MS
1953 /* Diagnose truncation that leaves the copy unterminated. */
1954 maybe_diag_stxncpy_trunc (*gsi, src, len);
025d57f0 1955
fef5a0d9 1956 /* OK transform into builtin memcpy. */
025d57f0 1957 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
fef5a0d9
RB
1958 if (!fn)
1959 return false;
1960
1961 len = fold_convert_loc (loc, size_type_node, len);
1962 len = force_gimple_operand_gsi (gsi, len, true,
1963 NULL_TREE, true, GSI_SAME_STMT);
355fe088 1964 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9 1965 replace_call_with_call_and_fold (gsi, repl);
025d57f0 1966
fef5a0d9
RB
1967 return true;
1968}
1969
71dea1dd
WD
1970/* Fold function call to builtin strchr or strrchr.
1971 If both arguments are constant, evaluate and fold the result,
1972 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
912d9ec3
WD
1973 In general strlen is significantly faster than strchr
1974 due to being a simpler operation. */
1975static bool
71dea1dd 1976gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
912d9ec3
WD
1977{
1978 gimple *stmt = gsi_stmt (*gsi);
1979 tree str = gimple_call_arg (stmt, 0);
1980 tree c = gimple_call_arg (stmt, 1);
1981 location_t loc = gimple_location (stmt);
71dea1dd
WD
1982 const char *p;
1983 char ch;
912d9ec3 1984
71dea1dd 1985 if (!gimple_call_lhs (stmt))
912d9ec3
WD
1986 return false;
1987
b5338fb3
MS
1988 /* Avoid folding if the first argument is not a nul-terminated array.
1989 Defer warning until later. */
1990 if (!check_nul_terminated_array (NULL_TREE, str))
1991 return false;
1992
71dea1dd
WD
1993 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1994 {
1995 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1996
1997 if (p1 == NULL)
1998 {
1999 replace_call_with_value (gsi, integer_zero_node);
2000 return true;
2001 }
2002
2003 tree len = build_int_cst (size_type_node, p1 - p);
2004 gimple_seq stmts = NULL;
2005 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2006 POINTER_PLUS_EXPR, str, len);
2007 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2008 gsi_replace_with_seq_vops (gsi, stmts);
2009 return true;
2010 }
2011
2012 if (!integer_zerop (c))
912d9ec3
WD
2013 return false;
2014
71dea1dd 2015 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
c8952930 2016 if (is_strrchr && optimize_function_for_size_p (cfun))
71dea1dd
WD
2017 {
2018 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2019
c8952930 2020 if (strchr_fn)
71dea1dd
WD
2021 {
2022 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2025 }
2026
2027 return false;
2028 }
2029
912d9ec3
WD
2030 tree len;
2031 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2032
2033 if (!strlen_fn)
2034 return false;
2035
2036 /* Create newstr = strlen (str). */
2037 gimple_seq stmts = NULL;
2038 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2039 gimple_set_location (new_stmt, loc);
a15ebbcd 2040 len = create_tmp_reg_or_ssa_name (size_type_node);
912d9ec3
WD
2041 gimple_call_set_lhs (new_stmt, len);
2042 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2043
2044 /* Create (str p+ strlen (str)). */
2045 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2046 POINTER_PLUS_EXPR, str, len);
2047 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2048 gsi_replace_with_seq_vops (gsi, stmts);
2049 /* gsi now points at the assignment to the lhs, get a
2050 stmt iterator to the strlen.
2051 ??? We can't use gsi_for_stmt as that doesn't work when the
2052 CFG isn't built yet. */
2053 gimple_stmt_iterator gsi2 = *gsi;
2054 gsi_prev (&gsi2);
2055 fold_stmt (&gsi2);
2056 return true;
2057}
2058
c8952930
JJ
2059/* Fold function call to builtin strstr.
2060 If both arguments are constant, evaluate and fold the result,
2061 additionally fold strstr (x, "") into x and strstr (x, "c")
2062 into strchr (x, 'c'). */
2063static bool
2064gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2065{
2066 gimple *stmt = gsi_stmt (*gsi);
b5338fb3
MS
2067 if (!gimple_call_lhs (stmt))
2068 return false;
2069
c8952930
JJ
2070 tree haystack = gimple_call_arg (stmt, 0);
2071 tree needle = gimple_call_arg (stmt, 1);
c8952930 2072
b5338fb3
MS
2073 /* Avoid folding if either argument is not a nul-terminated array.
2074 Defer warning until later. */
2075 if (!check_nul_terminated_array (NULL_TREE, haystack)
2076 || !check_nul_terminated_array (NULL_TREE, needle))
c8952930
JJ
2077 return false;
2078
b5338fb3 2079 const char *q = c_getstr (needle);
c8952930
JJ
2080 if (q == NULL)
2081 return false;
2082
b5338fb3 2083 if (const char *p = c_getstr (haystack))
c8952930
JJ
2084 {
2085 const char *r = strstr (p, q);
2086
2087 if (r == NULL)
2088 {
2089 replace_call_with_value (gsi, integer_zero_node);
2090 return true;
2091 }
2092
2093 tree len = build_int_cst (size_type_node, r - p);
2094 gimple_seq stmts = NULL;
2095 gimple *new_stmt
2096 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2097 haystack, len);
2098 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2099 gsi_replace_with_seq_vops (gsi, stmts);
2100 return true;
2101 }
2102
2103 /* For strstr (x, "") return x. */
2104 if (q[0] == '\0')
2105 {
2106 replace_call_with_value (gsi, haystack);
2107 return true;
2108 }
2109
2110 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2111 if (q[1] == '\0')
2112 {
2113 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2114 if (strchr_fn)
2115 {
2116 tree c = build_int_cst (integer_type_node, q[0]);
2117 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2120 }
2121 }
2122
2123 return false;
2124}
2125
fef5a0d9
RB
2126/* Simplify a call to the strcat builtin. DST and SRC are the arguments
2127 to the call.
2128
2129 Return NULL_TREE if no simplification was possible, otherwise return the
2130 simplified form of the call as a tree.
2131
2132 The simplified form may be a constant or other expression which
2133 computes the same value, but in a more efficient manner (including
2134 calls to other builtin functions).
2135
2136 The call may contain arguments which need to be evaluated, but
2137 which are not useful to determine the result of the call. In
2138 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2139 COMPOUND_EXPR will be an argument which must be evaluated.
2140 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2141 COMPOUND_EXPR in the chain will contain the tree for the simplified
2142 form of the builtin function call. */
2143
2144static bool
dcb7fae2 2145gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9 2146{
355fe088 2147 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2148 location_t loc = gimple_location (stmt);
fef5a0d9
RB
2149
2150 const char *p = c_getstr (src);
2151
2152 /* If the string length is zero, return the dst parameter. */
2153 if (p && *p == '\0')
2154 {
2155 replace_call_with_value (gsi, dst);
2156 return true;
2157 }
2158
2159 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2160 return false;
2161
2162 /* See if we can store by pieces into (dst + strlen(dst)). */
2163 tree newdst;
2164 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2165 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2166
2167 if (!strlen_fn || !memcpy_fn)
2168 return false;
2169
2170 /* If the length of the source string isn't computable don't
2171 split strcat into strlen and memcpy. */
598f7235 2172 tree len = get_maxval_strlen (src, SRK_STRLEN);
fef5a0d9 2173 if (! len)
fef5a0d9
RB
2174 return false;
2175
2176 /* Create strlen (dst). */
2177 gimple_seq stmts = NULL, stmts2;
355fe088 2178 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
fef5a0d9 2179 gimple_set_location (repl, loc);
a15ebbcd 2180 newdst = create_tmp_reg_or_ssa_name (size_type_node);
fef5a0d9
RB
2181 gimple_call_set_lhs (repl, newdst);
2182 gimple_seq_add_stmt_without_update (&stmts, repl);
2183
2184 /* Create (dst p+ strlen (dst)). */
2185 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2186 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2187 gimple_seq_add_seq_without_update (&stmts, stmts2);
2188
2189 len = fold_convert_loc (loc, size_type_node, len);
2190 len = size_binop_loc (loc, PLUS_EXPR, len,
2191 build_int_cst (size_type_node, 1));
2192 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2193 gimple_seq_add_seq_without_update (&stmts, stmts2);
2194
2195 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 if (gimple_call_lhs (stmt))
2198 {
2199 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2200 gimple_seq_add_stmt_without_update (&stmts, repl);
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 /* gsi now points at the assignment to the lhs, get a
2203 stmt iterator to the memcpy call.
2204 ??? We can't use gsi_for_stmt as that doesn't work when the
2205 CFG isn't built yet. */
2206 gimple_stmt_iterator gsi2 = *gsi;
2207 gsi_prev (&gsi2);
2208 fold_stmt (&gsi2);
2209 }
2210 else
2211 {
2212 gsi_replace_with_seq_vops (gsi, stmts);
2213 fold_stmt (gsi);
2214 }
2215 return true;
2216}
2217
07f1cf56
RB
2218/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2219 are the arguments to the call. */
2220
2221static bool
2222gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2223{
355fe088 2224 gimple *stmt = gsi_stmt (*gsi);
07f1cf56
RB
2225 tree dest = gimple_call_arg (stmt, 0);
2226 tree src = gimple_call_arg (stmt, 1);
2227 tree size = gimple_call_arg (stmt, 2);
2228 tree fn;
2229 const char *p;
2230
2231
2232 p = c_getstr (src);
2233 /* If the SRC parameter is "", return DEST. */
2234 if (p && *p == '\0')
2235 {
2236 replace_call_with_value (gsi, dest);
2237 return true;
2238 }
2239
2240 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2241 return false;
2242
2243 /* If __builtin_strcat_chk is used, assume strcat is available. */
2244 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2245 if (!fn)
2246 return false;
2247
355fe088 2248 gimple *repl = gimple_build_call (fn, 2, dest, src);
07f1cf56
RB
2249 replace_call_with_call_and_fold (gsi, repl);
2250 return true;
2251}
2252
ad03a744
RB
2253/* Simplify a call to the strncat builtin. */
2254
2255static bool
2256gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2257{
8a45b051 2258 gimple *stmt = gsi_stmt (*gsi);
ad03a744
RB
2259 tree dst = gimple_call_arg (stmt, 0);
2260 tree src = gimple_call_arg (stmt, 1);
2261 tree len = gimple_call_arg (stmt, 2);
2262
2263 const char *p = c_getstr (src);
2264
2265 /* If the requested length is zero, or the src parameter string
2266 length is zero, return the dst parameter. */
2267 if (integer_zerop (len) || (p && *p == '\0'))
2268 {
2269 replace_call_with_value (gsi, dst);
2270 return true;
2271 }
2272
025d57f0
MS
2273 if (TREE_CODE (len) != INTEGER_CST || !p)
2274 return false;
2275
2276 unsigned srclen = strlen (p);
2277
2278 int cmpsrc = compare_tree_int (len, srclen);
2279
2280 /* Return early if the requested len is less than the string length.
2281 Warnings will be issued elsewhere later. */
2282 if (cmpsrc < 0)
2283 return false;
2284
2285 unsigned HOST_WIDE_INT dstsize;
2286
2287 bool nowarn = gimple_no_warning_p (stmt);
2288
2289 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
ad03a744 2290 {
025d57f0 2291 int cmpdst = compare_tree_int (len, dstsize);
ad03a744 2292
025d57f0
MS
2293 if (cmpdst >= 0)
2294 {
2295 tree fndecl = gimple_call_fndecl (stmt);
2296
2297 /* Strncat copies (at most) LEN bytes and always appends
2298 the terminating NUL so the specified bound should never
2299 be equal to (or greater than) the size of the destination.
2300 If it is, the copy could overflow. */
2301 location_t loc = gimple_location (stmt);
2302 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2303 cmpdst == 0
2304 ? G_("%G%qD specified bound %E equals "
2305 "destination size")
2306 : G_("%G%qD specified bound %E exceeds "
2307 "destination size %wu"),
2308 stmt, fndecl, len, dstsize);
2309 if (nowarn)
2310 gimple_set_no_warning (stmt, true);
2311 }
2312 }
ad03a744 2313
025d57f0
MS
2314 if (!nowarn && cmpsrc == 0)
2315 {
2316 tree fndecl = gimple_call_fndecl (stmt);
025d57f0 2317 location_t loc = gimple_location (stmt);
eec5f615
MS
2318
2319 /* To avoid possible overflow the specified bound should also
2320 not be equal to the length of the source, even when the size
2321 of the destination is unknown (it's not an uncommon mistake
2322 to specify as the bound to strncpy the length of the source). */
025d57f0
MS
2323 if (warning_at (loc, OPT_Wstringop_overflow_,
2324 "%G%qD specified bound %E equals source length",
2325 stmt, fndecl, len))
2326 gimple_set_no_warning (stmt, true);
ad03a744
RB
2327 }
2328
025d57f0
MS
2329 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2330
2331 /* If the replacement _DECL isn't initialized, don't do the
2332 transformation. */
2333 if (!fn)
2334 return false;
2335
2336 /* Otherwise, emit a call to strcat. */
2337 gcall *repl = gimple_build_call (fn, 2, dst, src);
2338 replace_call_with_call_and_fold (gsi, repl);
2339 return true;
ad03a744
RB
2340}
2341
745583f9
RB
2342/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2343 LEN, and SIZE. */
2344
2345static bool
2346gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2347{
355fe088 2348 gimple *stmt = gsi_stmt (*gsi);
745583f9
RB
2349 tree dest = gimple_call_arg (stmt, 0);
2350 tree src = gimple_call_arg (stmt, 1);
2351 tree len = gimple_call_arg (stmt, 2);
2352 tree size = gimple_call_arg (stmt, 3);
2353 tree fn;
2354 const char *p;
2355
2356 p = c_getstr (src);
2357 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2358 if ((p && *p == '\0')
2359 || integer_zerop (len))
2360 {
2361 replace_call_with_value (gsi, dest);
2362 return true;
2363 }
2364
2365 if (! tree_fits_uhwi_p (size))
2366 return false;
2367
2368 if (! integer_all_onesp (size))
2369 {
2370 tree src_len = c_strlen (src, 1);
2371 if (src_len
2372 && tree_fits_uhwi_p (src_len)
2373 && tree_fits_uhwi_p (len)
2374 && ! tree_int_cst_lt (len, src_len))
2375 {
2376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2377 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2378 if (!fn)
2379 return false;
2380
355fe088 2381 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
745583f9
RB
2382 replace_call_with_call_and_fold (gsi, repl);
2383 return true;
2384 }
2385 return false;
2386 }
2387
2388 /* If __builtin_strncat_chk is used, assume strncat is available. */
2389 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2390 if (!fn)
2391 return false;
2392
355fe088 2393 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
745583f9
RB
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2396}
2397
a918bfbf
ML
2398/* Build and append gimple statements to STMTS that would load a first
2399 character of a memory location identified by STR. LOC is location
2400 of the statement. */
2401
2402static tree
2403gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2404{
2405 tree var;
2406
2407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2408 tree cst_uchar_ptr_node
2409 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2410 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2411
2412 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2413 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2414 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2415
2416 gimple_assign_set_lhs (stmt, var);
2417 gimple_seq_add_stmt_without_update (stmts, stmt);
2418
2419 return var;
2420}
2421
d2f8402a 2422/* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
a918bfbf
ML
2423
2424static bool
2425gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2426{
2427 gimple *stmt = gsi_stmt (*gsi);
2428 tree callee = gimple_call_fndecl (stmt);
2429 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2430
2431 tree type = integer_type_node;
2432 tree str1 = gimple_call_arg (stmt, 0);
2433 tree str2 = gimple_call_arg (stmt, 1);
2434 tree lhs = gimple_call_lhs (stmt);
d86d8b35
MS
2435
2436 tree bound_node = NULL_TREE;
d2f8402a 2437 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
a918bfbf
ML
2438
2439 /* Handle strncmp and strncasecmp functions. */
2440 if (gimple_call_num_args (stmt) == 3)
2441 {
d86d8b35
MS
2442 bound_node = gimple_call_arg (stmt, 2);
2443 if (tree_fits_uhwi_p (bound_node))
2444 bound = tree_to_uhwi (bound_node);
a918bfbf
ML
2445 }
2446
d86d8b35 2447 /* If the BOUND parameter is zero, return zero. */
d2f8402a 2448 if (bound == 0)
a918bfbf
ML
2449 {
2450 replace_call_with_value (gsi, integer_zero_node);
2451 return true;
2452 }
2453
2454 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2455 if (operand_equal_p (str1, str2, 0))
2456 {
2457 replace_call_with_value (gsi, integer_zero_node);
2458 return true;
2459 }
2460
d2f8402a
MS
2461 /* Initially set to the number of characters, including the terminating
2462 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2463 the array Sx is not terminated by a nul.
2464 For nul-terminated strings then adjusted to their length so that
2465 LENx == NULPOSx holds. */
2466 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
866626ef
MS
2467 const char *p1 = getbyterep (str1, &len1);
2468 const char *p2 = getbyterep (str2, &len2);
d2f8402a
MS
2469
2470 /* The position of the terminating nul character if one exists, otherwise
2471 a value greater than LENx. */
2472 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2473
2474 if (p1)
2475 {
2476 size_t n = strnlen (p1, len1);
2477 if (n < len1)
2478 len1 = nulpos1 = n;
2479 }
2480
2481 if (p2)
2482 {
2483 size_t n = strnlen (p2, len2);
2484 if (n < len2)
2485 len2 = nulpos2 = n;
2486 }
a918bfbf
ML
2487
2488 /* For known strings, return an immediate value. */
2489 if (p1 && p2)
2490 {
2491 int r = 0;
2492 bool known_result = false;
2493
2494 switch (fcode)
2495 {
2496 case BUILT_IN_STRCMP:
8b0b334a 2497 case BUILT_IN_STRCMP_EQ:
d2f8402a 2498 if (len1 != nulpos1 || len2 != nulpos2)
a918bfbf 2499 break;
d2f8402a
MS
2500
2501 r = strcmp (p1, p2);
2502 known_result = true;
2503 break;
2504
a918bfbf 2505 case BUILT_IN_STRNCMP:
8b0b334a 2506 case BUILT_IN_STRNCMP_EQ:
a918bfbf 2507 {
d86d8b35
MS
2508 if (bound == HOST_WIDE_INT_M1U)
2509 break;
2510
d2f8402a
MS
2511 /* Reduce the bound to be no more than the length
2512 of the shorter of the two strings, or the sizes
2513 of the unterminated arrays. */
2514 unsigned HOST_WIDE_INT n = bound;
2515
2516 if (len1 == nulpos1 && len1 < n)
2517 n = len1 + 1;
2518 if (len2 == nulpos2 && len2 < n)
2519 n = len2 + 1;
2520
2521 if (MIN (nulpos1, nulpos2) + 1 < n)
a918bfbf 2522 break;
d2f8402a
MS
2523
2524 r = strncmp (p1, p2, n);
a918bfbf
ML
2525 known_result = true;
2526 break;
2527 }
2528 /* Only handleable situation is where the string are equal (result 0),
2529 which is already handled by operand_equal_p case. */
2530 case BUILT_IN_STRCASECMP:
2531 break;
2532 case BUILT_IN_STRNCASECMP:
2533 {
d2f8402a 2534 if (bound == HOST_WIDE_INT_M1U)
a918bfbf 2535 break;
d2f8402a 2536 r = strncmp (p1, p2, bound);
a918bfbf
ML
2537 if (r == 0)
2538 known_result = true;
5de73c05 2539 break;
a918bfbf
ML
2540 }
2541 default:
2542 gcc_unreachable ();
2543 }
2544
2545 if (known_result)
2546 {
2547 replace_call_with_value (gsi, build_cmp_result (type, r));
2548 return true;
2549 }
2550 }
2551
d2f8402a 2552 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
a918bfbf 2553 || fcode == BUILT_IN_STRCMP
8b0b334a 2554 || fcode == BUILT_IN_STRCMP_EQ
a918bfbf
ML
2555 || fcode == BUILT_IN_STRCASECMP;
2556
2557 location_t loc = gimple_location (stmt);
2558
2559 /* If the second arg is "", return *(const unsigned char*)arg1. */
d2f8402a 2560 if (p2 && *p2 == '\0' && nonzero_bound)
a918bfbf
ML
2561 {
2562 gimple_seq stmts = NULL;
2563 tree var = gimple_load_first_char (loc, str1, &stmts);
2564 if (lhs)
2565 {
2566 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2567 gimple_seq_add_stmt_without_update (&stmts, stmt);
2568 }
2569
2570 gsi_replace_with_seq_vops (gsi, stmts);
2571 return true;
2572 }
2573
2574 /* If the first arg is "", return -*(const unsigned char*)arg2. */
d2f8402a 2575 if (p1 && *p1 == '\0' && nonzero_bound)
a918bfbf
ML
2576 {
2577 gimple_seq stmts = NULL;
2578 tree var = gimple_load_first_char (loc, str2, &stmts);
2579
2580 if (lhs)
2581 {
2582 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2583 stmt = gimple_build_assign (c, NOP_EXPR, var);
2584 gimple_seq_add_stmt_without_update (&stmts, stmt);
2585
2586 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2587 gimple_seq_add_stmt_without_update (&stmts, stmt);
2588 }
2589
2590 gsi_replace_with_seq_vops (gsi, stmts);
2591 return true;
2592 }
2593
d2f8402a 2594 /* If BOUND is one, return an expression corresponding to
a918bfbf 2595 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
d2f8402a 2596 if (fcode == BUILT_IN_STRNCMP && bound == 1)
a918bfbf
ML
2597 {
2598 gimple_seq stmts = NULL;
2599 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2600 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2601
2602 if (lhs)
2603 {
2604 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2605 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2606 gimple_seq_add_stmt_without_update (&stmts, convert1);
2607
2608 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2609 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2610 gimple_seq_add_stmt_without_update (&stmts, convert2);
2611
2612 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2613 gimple_seq_add_stmt_without_update (&stmts, stmt);
2614 }
2615
2616 gsi_replace_with_seq_vops (gsi, stmts);
2617 return true;
2618 }
2619
d2f8402a
MS
2620 /* If BOUND is greater than the length of one constant string,
2621 and the other argument is also a nul-terminated string, replace
2622 strncmp with strcmp. */
2623 if (fcode == BUILT_IN_STRNCMP
2624 && bound > 0 && bound < HOST_WIDE_INT_M1U
2625 && ((p2 && len2 < bound && len2 == nulpos2)
2626 || (p1 && len1 < bound && len1 == nulpos1)))
caed5c92
QZ
2627 {
2628 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2629 if (!fn)
2630 return false;
2631 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2632 replace_call_with_call_and_fold (gsi, repl);
2633 return true;
2634 }
2635
a918bfbf
ML
2636 return false;
2637}
2638
488c6247
ML
2639/* Fold a call to the memchr pointed by GSI iterator. */
2640
2641static bool
2642gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2643{
2644 gimple *stmt = gsi_stmt (*gsi);
2645 tree lhs = gimple_call_lhs (stmt);
2646 tree arg1 = gimple_call_arg (stmt, 0);
2647 tree arg2 = gimple_call_arg (stmt, 1);
2648 tree len = gimple_call_arg (stmt, 2);
2649
2650 /* If the LEN parameter is zero, return zero. */
2651 if (integer_zerop (len))
2652 {
2653 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2654 return true;
2655 }
2656
2657 char c;
2658 if (TREE_CODE (arg2) != INTEGER_CST
2659 || !tree_fits_uhwi_p (len)
2660 || !target_char_cst_p (arg2, &c))
2661 return false;
2662
2663 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2664 unsigned HOST_WIDE_INT string_length;
866626ef 2665 const char *p1 = getbyterep (arg1, &string_length);
488c6247
ML
2666
2667 if (p1)
2668 {
2669 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2670 if (r == NULL)
2671 {
5fd336bb 2672 tree mem_size, offset_node;
bb04901d 2673 byte_representation (arg1, &offset_node, &mem_size, NULL);
5fd336bb
JM
2674 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2675 ? 0 : tree_to_uhwi (offset_node);
2676 /* MEM_SIZE is the size of the array the string literal
2677 is stored in. */
2678 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2679 gcc_checking_assert (string_length <= string_size);
2680 if (length <= string_size)
488c6247
ML
2681 {
2682 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2683 return true;
2684 }
2685 }
2686 else
2687 {
2688 unsigned HOST_WIDE_INT offset = r - p1;
2689 gimple_seq stmts = NULL;
2690 if (lhs != NULL_TREE)
2691 {
aec2d684 2692 tree offset_cst = build_int_cst (sizetype, offset);
488c6247
ML
2693 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2694 arg1, offset_cst);
2695 gimple_seq_add_stmt_without_update (&stmts, stmt);
2696 }
2697 else
2698 gimple_seq_add_stmt_without_update (&stmts,
2699 gimple_build_nop ());
2700
2701 gsi_replace_with_seq_vops (gsi, stmts);
2702 return true;
2703 }
2704 }
2705
2706 return false;
2707}
a918bfbf 2708
fef5a0d9
RB
2709/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2710 to the call. IGNORE is true if the value returned
2711 by the builtin will be ignored. UNLOCKED is true is true if this
2712 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2713 the known length of the string. Return NULL_TREE if no simplification
2714 was possible. */
2715
2716static bool
2717gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 2718 tree arg0, tree arg1,
dcb7fae2 2719 bool unlocked)
fef5a0d9 2720{
355fe088 2721 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 2722
fef5a0d9
RB
2723 /* If we're using an unlocked function, assume the other unlocked
2724 functions exist explicitly. */
2725 tree const fn_fputc = (unlocked
2726 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2727 : builtin_decl_implicit (BUILT_IN_FPUTC));
2728 tree const fn_fwrite = (unlocked
2729 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2730 : builtin_decl_implicit (BUILT_IN_FWRITE));
2731
2732 /* If the return value is used, don't do the transformation. */
dcb7fae2 2733 if (gimple_call_lhs (stmt))
fef5a0d9
RB
2734 return false;
2735
fef5a0d9
RB
2736 /* Get the length of the string passed to fputs. If the length
2737 can't be determined, punt. */
598f7235 2738 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
fef5a0d9
RB
2739 if (!len
2740 || TREE_CODE (len) != INTEGER_CST)
2741 return false;
2742
2743 switch (compare_tree_int (len, 1))
2744 {
2745 case -1: /* length is 0, delete the call entirely . */
2746 replace_call_with_value (gsi, integer_zero_node);
2747 return true;
2748
2749 case 0: /* length is 1, call fputc. */
2750 {
2751 const char *p = c_getstr (arg0);
2752 if (p != NULL)
2753 {
2754 if (!fn_fputc)
2755 return false;
2756
355fe088 2757 gimple *repl = gimple_build_call (fn_fputc, 2,
fef5a0d9
RB
2758 build_int_cst
2759 (integer_type_node, p[0]), arg1);
2760 replace_call_with_call_and_fold (gsi, repl);
2761 return true;
2762 }
2763 }
2764 /* FALLTHROUGH */
2765 case 1: /* length is greater than 1, call fwrite. */
2766 {
2767 /* If optimizing for size keep fputs. */
2768 if (optimize_function_for_size_p (cfun))
2769 return false;
2770 /* New argument list transforming fputs(string, stream) to
2771 fwrite(string, 1, len, stream). */
2772 if (!fn_fwrite)
2773 return false;
2774
355fe088 2775 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
fef5a0d9
RB
2776 size_one_node, len, arg1);
2777 replace_call_with_call_and_fold (gsi, repl);
2778 return true;
2779 }
2780 default:
2781 gcc_unreachable ();
2782 }
2783 return false;
2784}
2785
2786/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2787 DEST, SRC, LEN, and SIZE are the arguments to the call.
2788 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2789 code of the builtin. If MAXLEN is not NULL, it is maximum length
2790 passed as third argument. */
2791
2792static bool
2793gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 2794 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
2795 enum built_in_function fcode)
2796{
355fe088 2797 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2798 location_t loc = gimple_location (stmt);
2799 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2800 tree fn;
2801
2802 /* If SRC and DEST are the same (and not volatile), return DEST
2803 (resp. DEST+LEN for __mempcpy_chk). */
2804 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2805 {
2806 if (fcode != BUILT_IN_MEMPCPY_CHK)
2807 {
2808 replace_call_with_value (gsi, dest);
2809 return true;
2810 }
2811 else
2812 {
74e3c262
RB
2813 gimple_seq stmts = NULL;
2814 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
22518428
JJ
2815 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2816 TREE_TYPE (dest), dest, len);
74e3c262 2817 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
fef5a0d9
RB
2818 replace_call_with_value (gsi, temp);
2819 return true;
2820 }
2821 }
2822
2823 if (! tree_fits_uhwi_p (size))
2824 return false;
2825
598f7235 2826 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9
RB
2827 if (! integer_all_onesp (size))
2828 {
2829 if (! tree_fits_uhwi_p (len))
2830 {
2831 /* If LEN is not constant, try MAXLEN too.
2832 For MAXLEN only allow optimizing into non-_ocs function
2833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2834 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2835 {
2836 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2837 {
2838 /* (void) __mempcpy_chk () can be optimized into
2839 (void) __memcpy_chk (). */
2840 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2841 if (!fn)
2842 return false;
2843
355fe088 2844 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2845 replace_call_with_call_and_fold (gsi, repl);
2846 return true;
2847 }
2848 return false;
2849 }
2850 }
2851 else
2852 maxlen = len;
2853
2854 if (tree_int_cst_lt (size, maxlen))
2855 return false;
2856 }
2857
2858 fn = NULL_TREE;
2859 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2860 mem{cpy,pcpy,move,set} is available. */
2861 switch (fcode)
2862 {
2863 case BUILT_IN_MEMCPY_CHK:
2864 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2865 break;
2866 case BUILT_IN_MEMPCPY_CHK:
2867 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2868 break;
2869 case BUILT_IN_MEMMOVE_CHK:
2870 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2871 break;
2872 case BUILT_IN_MEMSET_CHK:
2873 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2874 break;
2875 default:
2876 break;
2877 }
2878
2879 if (!fn)
2880 return false;
2881
355fe088 2882 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
2883 replace_call_with_call_and_fold (gsi, repl);
2884 return true;
2885}
2886
2887/* Fold a call to the __st[rp]cpy_chk builtin.
2888 DEST, SRC, and SIZE are the arguments to the call.
2889 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2890 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2891 strings passed as second argument. */
2892
2893static bool
2894gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2895 tree dest,
fef5a0d9 2896 tree src, tree size,
fef5a0d9
RB
2897 enum built_in_function fcode)
2898{
355fe088 2899 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2
RB
2900 location_t loc = gimple_location (stmt);
2901 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2902 tree len, fn;
2903
2904 /* If SRC and DEST are the same (and not volatile), return DEST. */
2905 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2906 {
8cd95cec
MS
2907 /* Issue -Wrestrict unless the pointers are null (those do
2908 not point to objects and so do not indicate an overlap;
2909 such calls could be the result of sanitization and jump
2910 threading). */
2911 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
e9b9fa4c
MS
2912 {
2913 tree func = gimple_call_fndecl (stmt);
cc8bea0a 2914
e9b9fa4c
MS
2915 warning_at (loc, OPT_Wrestrict,
2916 "%qD source argument is the same as destination",
2917 func);
2918 }
cc8bea0a 2919
fef5a0d9
RB
2920 replace_call_with_value (gsi, dest);
2921 return true;
2922 }
2923
2924 if (! tree_fits_uhwi_p (size))
2925 return false;
2926
598f7235 2927 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
fef5a0d9
RB
2928 if (! integer_all_onesp (size))
2929 {
2930 len = c_strlen (src, 1);
2931 if (! len || ! tree_fits_uhwi_p (len))
2932 {
2933 /* If LEN is not constant, try MAXLEN too.
2934 For MAXLEN only allow optimizing into non-_ocs function
2935 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2936 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2937 {
2938 if (fcode == BUILT_IN_STPCPY_CHK)
2939 {
2940 if (! ignore)
2941 return false;
2942
2943 /* If return value of __stpcpy_chk is ignored,
2944 optimize into __strcpy_chk. */
2945 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2946 if (!fn)
2947 return false;
2948
355fe088 2949 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
fef5a0d9
RB
2950 replace_call_with_call_and_fold (gsi, repl);
2951 return true;
2952 }
2953
2954 if (! len || TREE_SIDE_EFFECTS (len))
2955 return false;
2956
2957 /* If c_strlen returned something, but not a constant,
2958 transform __strcpy_chk into __memcpy_chk. */
2959 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2960 if (!fn)
2961 return false;
2962
74e3c262 2963 gimple_seq stmts = NULL;
770fe3a3 2964 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
74e3c262
RB
2965 len = gimple_convert (&stmts, loc, size_type_node, len);
2966 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2967 build_int_cst (size_type_node, 1));
2968 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
355fe088 2969 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
2970 replace_call_with_call_and_fold (gsi, repl);
2971 return true;
2972 }
e256dfce 2973 }
fef5a0d9
RB
2974 else
2975 maxlen = len;
2976
2977 if (! tree_int_cst_lt (maxlen, size))
2978 return false;
e256dfce
RG
2979 }
2980
fef5a0d9
RB
2981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2982 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2983 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2984 if (!fn)
2985 return false;
2986
355fe088 2987 gimple *repl = gimple_build_call (fn, 2, dest, src);
fef5a0d9
RB
2988 replace_call_with_call_and_fold (gsi, repl);
2989 return true;
2990}
2991
2992/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2993 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2994 length passed as third argument. IGNORE is true if return value can be
2995 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2996
2997static bool
2998gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2999 tree dest, tree src,
dcb7fae2 3000 tree len, tree size,
fef5a0d9
RB
3001 enum built_in_function fcode)
3002{
355fe088 3003 gimple *stmt = gsi_stmt (*gsi);
dcb7fae2 3004 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
3005 tree fn;
3006
3007 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 3008 {
fef5a0d9
RB
3009 /* If return value of __stpncpy_chk is ignored,
3010 optimize into __strncpy_chk. */
3011 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3012 if (fn)
3013 {
355fe088 3014 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
fef5a0d9
RB
3015 replace_call_with_call_and_fold (gsi, repl);
3016 return true;
3017 }
cbdd87d4
RG
3018 }
3019
fef5a0d9
RB
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3022
598f7235 3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3024 if (! integer_all_onesp (size))
cbdd87d4 3025 {
fef5a0d9 3026 if (! tree_fits_uhwi_p (len))
fe2ef088 3027 {
fef5a0d9
RB
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 return false;
8a1561bc 3033 }
fef5a0d9
RB
3034 else
3035 maxlen = len;
3036
3037 if (tree_int_cst_lt (size, maxlen))
3038 return false;
cbdd87d4
RG
3039 }
3040
fef5a0d9
RB
3041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3042 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3043 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3044 if (!fn)
3045 return false;
3046
355fe088 3047 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
fef5a0d9
RB
3048 replace_call_with_call_and_fold (gsi, repl);
3049 return true;
cbdd87d4
RG
3050}
3051
2625bb5d
RB
3052/* Fold function call to builtin stpcpy with arguments DEST and SRC.
3053 Return NULL_TREE if no simplification can be made. */
3054
3055static bool
3056gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3057{
3058 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3059 location_t loc = gimple_location (stmt);
3060 tree dest = gimple_call_arg (stmt, 0);
3061 tree src = gimple_call_arg (stmt, 1);
01b0acb7 3062 tree fn, lenp1;
2625bb5d
RB
3063
3064 /* If the result is unused, replace stpcpy with strcpy. */
3065 if (gimple_call_lhs (stmt) == NULL_TREE)
3066 {
3067 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3068 if (!fn)
3069 return false;
3070 gimple_call_set_fndecl (stmt, fn);
3071 fold_stmt (gsi);
3072 return true;
3073 }
3074
01b0acb7 3075 /* Set to non-null if ARG refers to an unterminated array. */
3f46ef1f 3076 c_strlen_data data = { };
d14c547a
MS
3077 /* The size of the unterminated array if SRC referes to one. */
3078 tree size;
3079 /* True if the size is exact/constant, false if it's the lower bound
3080 of a range. */
3081 bool exact;
7d583f42 3082 tree len = c_strlen (src, 1, &data, 1);
2625bb5d
RB
3083 if (!len
3084 || TREE_CODE (len) != INTEGER_CST)
01b0acb7 3085 {
d14c547a 3086 data.decl = unterminated_array (src, &size, &exact);
7d583f42 3087 if (!data.decl)
01b0acb7
MS
3088 return false;
3089 }
3090
7d583f42 3091 if (data.decl)
01b0acb7
MS
3092 {
3093 /* Avoid folding calls with unterminated arrays. */
3094 if (!gimple_no_warning_p (stmt))
d14c547a
MS
3095 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3096 exact);
01b0acb7
MS
3097 gimple_set_no_warning (stmt, true);
3098 return false;
3099 }
2625bb5d
RB
3100
3101 if (optimize_function_for_size_p (cfun)
3102 /* If length is zero it's small enough. */
3103 && !integer_zerop (len))
3104 return false;
3105
3106 /* If the source has a known length replace stpcpy with memcpy. */
3107 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3108 if (!fn)
3109 return false;
3110
3111 gimple_seq stmts = NULL;
3112 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3113 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3114 tem, build_int_cst (size_type_node, 1));
3115 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3116 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
779724a5 3117 gimple_move_vops (repl, stmt);
2625bb5d
RB
3118 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3119 /* Replace the result with dest + len. */
3120 stmts = NULL;
3121 tem = gimple_convert (&stmts, loc, sizetype, len);
3122 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3123 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3124 POINTER_PLUS_EXPR, dest, tem);
f6b4dc28 3125 gsi_replace (gsi, ret, false);
2625bb5d
RB
3126 /* Finally fold the memcpy call. */
3127 gimple_stmt_iterator gsi2 = *gsi;
3128 gsi_prev (&gsi2);
3129 fold_stmt (&gsi2);
3130 return true;
3131}
3132
fef5a0d9
RB
3133/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3134 NULL_TREE if a normal call should be emitted rather than expanding
3135 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3136 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3137 passed as second argument. */
cbdd87d4
RG
3138
3139static bool
fef5a0d9 3140gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 3141 enum built_in_function fcode)
cbdd87d4 3142{
538dd0b7 3143 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3144 tree dest, size, len, fn, fmt, flag;
3145 const char *fmt_str;
cbdd87d4 3146
fef5a0d9
RB
3147 /* Verify the required arguments in the original call. */
3148 if (gimple_call_num_args (stmt) < 5)
3149 return false;
cbdd87d4 3150
fef5a0d9
RB
3151 dest = gimple_call_arg (stmt, 0);
3152 len = gimple_call_arg (stmt, 1);
3153 flag = gimple_call_arg (stmt, 2);
3154 size = gimple_call_arg (stmt, 3);
3155 fmt = gimple_call_arg (stmt, 4);
3156
3157 if (! tree_fits_uhwi_p (size))
3158 return false;
3159
3160 if (! integer_all_onesp (size))
3161 {
598f7235 3162 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
fef5a0d9 3163 if (! tree_fits_uhwi_p (len))
cbdd87d4 3164 {
fef5a0d9
RB
3165 /* If LEN is not constant, try MAXLEN too.
3166 For MAXLEN only allow optimizing into non-_ocs function
3167 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3168 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
3169 return false;
3170 }
3171 else
fef5a0d9 3172 maxlen = len;
cbdd87d4 3173
fef5a0d9
RB
3174 if (tree_int_cst_lt (size, maxlen))
3175 return false;
3176 }
cbdd87d4 3177
fef5a0d9
RB
3178 if (!init_target_chars ())
3179 return false;
cbdd87d4 3180
fef5a0d9
RB
3181 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3182 or if format doesn't contain % chars or is "%s". */
3183 if (! integer_zerop (flag))
3184 {
3185 fmt_str = c_getstr (fmt);
3186 if (fmt_str == NULL)
3187 return false;
3188 if (strchr (fmt_str, target_percent) != NULL
3189 && strcmp (fmt_str, target_percent_s))
3190 return false;
cbdd87d4
RG
3191 }
3192
fef5a0d9
RB
3193 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3194 available. */
3195 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3196 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3197 if (!fn)
491e0b9b
RG
3198 return false;
3199
fef5a0d9
RB
3200 /* Replace the called function and the first 5 argument by 3 retaining
3201 trailing varargs. */
3202 gimple_call_set_fndecl (stmt, fn);
3203 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3204 gimple_call_set_arg (stmt, 0, dest);
3205 gimple_call_set_arg (stmt, 1, len);
3206 gimple_call_set_arg (stmt, 2, fmt);
3207 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3208 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3209 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3210 fold_stmt (gsi);
3211 return true;
3212}
cbdd87d4 3213
fef5a0d9
RB
3214/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3215 Return NULL_TREE if a normal call should be emitted rather than
3216 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3217 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 3218
fef5a0d9
RB
3219static bool
3220gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3221 enum built_in_function fcode)
3222{
538dd0b7 3223 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
3224 tree dest, size, len, fn, fmt, flag;
3225 const char *fmt_str;
3226 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 3227
fef5a0d9
RB
3228 /* Verify the required arguments in the original call. */
3229 if (nargs < 4)
3230 return false;
3231 dest = gimple_call_arg (stmt, 0);
3232 flag = gimple_call_arg (stmt, 1);
3233 size = gimple_call_arg (stmt, 2);
3234 fmt = gimple_call_arg (stmt, 3);
3235
3236 if (! tree_fits_uhwi_p (size))
3237 return false;
3238
3239 len = NULL_TREE;
3240
3241 if (!init_target_chars ())
3242 return false;
3243
3244 /* Check whether the format is a literal string constant. */
3245 fmt_str = c_getstr (fmt);
3246 if (fmt_str != NULL)
3247 {
3248 /* If the format doesn't contain % args or %%, we know the size. */
3249 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 3250 {
fef5a0d9
RB
3251 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3252 len = build_int_cstu (size_type_node, strlen (fmt_str));
3253 }
3254 /* If the format is "%s" and first ... argument is a string literal,
3255 we know the size too. */
3256 else if (fcode == BUILT_IN_SPRINTF_CHK
3257 && strcmp (fmt_str, target_percent_s) == 0)
3258 {
3259 tree arg;
cbdd87d4 3260
fef5a0d9
RB
3261 if (nargs == 5)
3262 {
3263 arg = gimple_call_arg (stmt, 4);
3264 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3265 {
3266 len = c_strlen (arg, 1);
3267 if (! len || ! tree_fits_uhwi_p (len))
3268 len = NULL_TREE;
3269 }
3270 }
3271 }
3272 }
cbdd87d4 3273
fef5a0d9
RB
3274 if (! integer_all_onesp (size))
3275 {
3276 if (! len || ! tree_int_cst_lt (len, size))
3277 return false;
3278 }
cbdd87d4 3279
fef5a0d9
RB
3280 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3281 or if format doesn't contain % chars or is "%s". */
3282 if (! integer_zerop (flag))
3283 {
3284 if (fmt_str == NULL)
3285 return false;
3286 if (strchr (fmt_str, target_percent) != NULL
3287 && strcmp (fmt_str, target_percent_s))
3288 return false;
3289 }
cbdd87d4 3290
fef5a0d9
RB
3291 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3292 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3293 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3294 if (!fn)
3295 return false;
3296
3297 /* Replace the called function and the first 4 argument by 2 retaining
3298 trailing varargs. */
3299 gimple_call_set_fndecl (stmt, fn);
3300 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3301 gimple_call_set_arg (stmt, 0, dest);
3302 gimple_call_set_arg (stmt, 1, fmt);
3303 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3304 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3305 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3306 fold_stmt (gsi);
3307 return true;
3308}
3309
35770bb2
RB
3310/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3311 ORIG may be null if this is a 2-argument call. We don't attempt to
3312 simplify calls with more than 3 arguments.
3313
a104bd88 3314 Return true if simplification was possible, otherwise false. */
35770bb2 3315
a104bd88 3316bool
dcb7fae2 3317gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2 3318{
355fe088 3319 gimple *stmt = gsi_stmt (*gsi);
35770bb2
RB
3320 tree dest = gimple_call_arg (stmt, 0);
3321 tree fmt = gimple_call_arg (stmt, 1);
3322 tree orig = NULL_TREE;
3323 const char *fmt_str = NULL;
3324
3325 /* Verify the required arguments in the original call. We deal with two
3326 types of sprintf() calls: 'sprintf (str, fmt)' and
3327 'sprintf (dest, "%s", orig)'. */
3328 if (gimple_call_num_args (stmt) > 3)
3329 return false;
3330
3331 if (gimple_call_num_args (stmt) == 3)
3332 orig = gimple_call_arg (stmt, 2);
3333
3334 /* Check whether the format is a literal string constant. */
3335 fmt_str = c_getstr (fmt);
3336 if (fmt_str == NULL)
3337 return false;
3338
3339 if (!init_target_chars ())
3340 return false;
3341
3342 /* If the format doesn't contain % args or %%, use strcpy. */
3343 if (strchr (fmt_str, target_percent) == NULL)
3344 {
3345 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3346
3347 if (!fn)
3348 return false;
3349
3350 /* Don't optimize sprintf (buf, "abc", ptr++). */
3351 if (orig)
3352 return false;
3353
3354 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3355 'format' is known to contain no % formats. */
3356 gimple_seq stmts = NULL;
355fe088 3357 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
01b0acb7
MS
3358
3359 /* Propagate the NO_WARNING bit to avoid issuing the same
3360 warning more than once. */
3361 if (gimple_no_warning_p (stmt))
3362 gimple_set_no_warning (repl, true);
3363
35770bb2 3364 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3365 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3366 {
a73468e8
JJ
3367 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3368 strlen (fmt_str)));
35770bb2
RB
3369 gimple_seq_add_stmt_without_update (&stmts, repl);
3370 gsi_replace_with_seq_vops (gsi, stmts);
3371 /* gsi now points at the assignment to the lhs, get a
3372 stmt iterator to the memcpy call.
3373 ??? We can't use gsi_for_stmt as that doesn't work when the
3374 CFG isn't built yet. */
3375 gimple_stmt_iterator gsi2 = *gsi;
3376 gsi_prev (&gsi2);
3377 fold_stmt (&gsi2);
3378 }
3379 else
3380 {
3381 gsi_replace_with_seq_vops (gsi, stmts);
3382 fold_stmt (gsi);
3383 }
3384 return true;
3385 }
3386
3387 /* If the format is "%s", use strcpy if the result isn't used. */
3388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3389 {
3390 tree fn;
3391 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3392
3393 if (!fn)
3394 return false;
3395
3396 /* Don't crash on sprintf (str1, "%s"). */
3397 if (!orig)
3398 return false;
3399
dcb7fae2
RB
3400 tree orig_len = NULL_TREE;
3401 if (gimple_call_lhs (stmt))
35770bb2 3402 {
598f7235 3403 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
d7e78447 3404 if (!orig_len)
35770bb2
RB
3405 return false;
3406 }
3407
3408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3409 gimple_seq stmts = NULL;
355fe088 3410 gimple *repl = gimple_build_call (fn, 2, dest, orig);
01b0acb7
MS
3411
3412 /* Propagate the NO_WARNING bit to avoid issuing the same
3413 warning more than once. */
3414 if (gimple_no_warning_p (stmt))
3415 gimple_set_no_warning (repl, true);
3416
35770bb2 3417 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3418 if (tree lhs = gimple_call_lhs (stmt))
35770bb2 3419 {
a73468e8 3420 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3421 TREE_TYPE (orig_len)))
a73468e8
JJ
3422 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3423 repl = gimple_build_assign (lhs, orig_len);
35770bb2
RB
3424 gimple_seq_add_stmt_without_update (&stmts, repl);
3425 gsi_replace_with_seq_vops (gsi, stmts);
3426 /* gsi now points at the assignment to the lhs, get a
3427 stmt iterator to the memcpy call.
3428 ??? We can't use gsi_for_stmt as that doesn't work when the
3429 CFG isn't built yet. */
3430 gimple_stmt_iterator gsi2 = *gsi;
3431 gsi_prev (&gsi2);
3432 fold_stmt (&gsi2);
3433 }
3434 else
3435 {
3436 gsi_replace_with_seq_vops (gsi, stmts);
3437 fold_stmt (gsi);
3438 }
3439 return true;
3440 }
3441 return false;
3442}
3443
d7e78447
RB
3444/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3445 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3446 attempt to simplify calls with more than 4 arguments.
35770bb2 3447
a104bd88 3448 Return true if simplification was possible, otherwise false. */
d7e78447 3449
a104bd88 3450bool
dcb7fae2 3451gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 3452{
538dd0b7 3453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
3454 tree dest = gimple_call_arg (stmt, 0);
3455 tree destsize = gimple_call_arg (stmt, 1);
3456 tree fmt = gimple_call_arg (stmt, 2);
3457 tree orig = NULL_TREE;
3458 const char *fmt_str = NULL;
3459
3460 if (gimple_call_num_args (stmt) > 4)
3461 return false;
3462
3463 if (gimple_call_num_args (stmt) == 4)
3464 orig = gimple_call_arg (stmt, 3);
3465
3466 if (!tree_fits_uhwi_p (destsize))
3467 return false;
3468 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3469
3470 /* Check whether the format is a literal string constant. */
3471 fmt_str = c_getstr (fmt);
3472 if (fmt_str == NULL)
3473 return false;
3474
3475 if (!init_target_chars ())
3476 return false;
3477
3478 /* If the format doesn't contain % args or %%, use strcpy. */
3479 if (strchr (fmt_str, target_percent) == NULL)
3480 {
3481 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3482 if (!fn)
3483 return false;
3484
3485 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3486 if (orig)
3487 return false;
3488
3489 /* We could expand this as
3490 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3491 or to
3492 memcpy (str, fmt_with_nul_at_cstm1, cst);
3493 but in the former case that might increase code size
3494 and in the latter case grow .rodata section too much.
3495 So punt for now. */
3496 size_t len = strlen (fmt_str);
3497 if (len >= destlen)
3498 return false;
3499
3500 gimple_seq stmts = NULL;
355fe088 3501 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
d7e78447 3502 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3503 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3504 {
a73468e8
JJ
3505 repl = gimple_build_assign (lhs,
3506 build_int_cst (TREE_TYPE (lhs), len));
d7e78447
RB
3507 gimple_seq_add_stmt_without_update (&stmts, repl);
3508 gsi_replace_with_seq_vops (gsi, stmts);
3509 /* gsi now points at the assignment to the lhs, get a
3510 stmt iterator to the memcpy call.
3511 ??? We can't use gsi_for_stmt as that doesn't work when the
3512 CFG isn't built yet. */
3513 gimple_stmt_iterator gsi2 = *gsi;
3514 gsi_prev (&gsi2);
3515 fold_stmt (&gsi2);
3516 }
3517 else
3518 {
3519 gsi_replace_with_seq_vops (gsi, stmts);
3520 fold_stmt (gsi);
3521 }
3522 return true;
3523 }
3524
3525 /* If the format is "%s", use strcpy if the result isn't used. */
3526 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3527 {
3528 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3529 if (!fn)
3530 return false;
3531
3532 /* Don't crash on snprintf (str1, cst, "%s"). */
3533 if (!orig)
3534 return false;
3535
598f7235 3536 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
af9db3a7 3537 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
dcb7fae2 3538 return false;
d7e78447
RB
3539
3540 /* We could expand this as
3541 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3542 or to
3543 memcpy (str1, str2_with_nul_at_cstm1, cst);
3544 but in the former case that might increase code size
3545 and in the latter case grow .rodata section too much.
3546 So punt for now. */
3547 if (compare_tree_int (orig_len, destlen) >= 0)
3548 return false;
3549
3550 /* Convert snprintf (str1, cst, "%s", str2) into
3551 strcpy (str1, str2) if strlen (str2) < cst. */
3552 gimple_seq stmts = NULL;
355fe088 3553 gimple *repl = gimple_build_call (fn, 2, dest, orig);
d7e78447 3554 gimple_seq_add_stmt_without_update (&stmts, repl);
a73468e8 3555 if (tree lhs = gimple_call_lhs (stmt))
d7e78447 3556 {
a73468e8 3557 if (!useless_type_conversion_p (TREE_TYPE (lhs),
d7e78447 3558 TREE_TYPE (orig_len)))
a73468e8
JJ
3559 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3560 repl = gimple_build_assign (lhs, orig_len);
d7e78447
RB
3561 gimple_seq_add_stmt_without_update (&stmts, repl);
3562 gsi_replace_with_seq_vops (gsi, stmts);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2 = *gsi;
3568 gsi_prev (&gsi2);
3569 fold_stmt (&gsi2);
3570 }
3571 else
3572 {
3573 gsi_replace_with_seq_vops (gsi, stmts);
3574 fold_stmt (gsi);
3575 }
3576 return true;
3577 }
3578 return false;
3579}
35770bb2 3580
edd7ae68
RB
3581/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3582 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3583 more than 3 arguments, and ARG may be null in the 2-argument case.
3584
3585 Return NULL_TREE if no simplification was possible, otherwise return the
3586 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3587 code of the function to be simplified. */
3588
3589static bool
3590gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3591 tree fp, tree fmt, tree arg,
3592 enum built_in_function fcode)
3593{
3594 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3595 tree fn_fputc, fn_fputs;
3596 const char *fmt_str = NULL;
3597
3598 /* If the return value is used, don't do the transformation. */
3599 if (gimple_call_lhs (stmt) != NULL_TREE)
3600 return false;
3601
3602 /* Check whether the format is a literal string constant. */
3603 fmt_str = c_getstr (fmt);
3604 if (fmt_str == NULL)
3605 return false;
3606
3607 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3608 {
3609 /* If we're using an unlocked function, assume the other
3610 unlocked functions exist explicitly. */
3611 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3612 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3613 }
3614 else
3615 {
3616 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3617 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3618 }
3619
3620 if (!init_target_chars ())
3621 return false;
3622
3623 /* If the format doesn't contain % args or %%, use strcpy. */
3624 if (strchr (fmt_str, target_percent) == NULL)
3625 {
3626 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3627 && arg)
3628 return false;
3629
3630 /* If the format specifier was "", fprintf does nothing. */
3631 if (fmt_str[0] == '\0')
3632 {
3633 replace_call_with_value (gsi, NULL_TREE);
3634 return true;
3635 }
3636
3637 /* When "string" doesn't contain %, replace all cases of
3638 fprintf (fp, string) with fputs (string, fp). The fputs
3639 builtin will take care of special cases like length == 1. */
3640 if (fn_fputs)
3641 {
3642 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3643 replace_call_with_call_and_fold (gsi, repl);
3644 return true;
3645 }
3646 }
3647
3648 /* The other optimizations can be done only on the non-va_list variants. */
3649 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3650 return false;
3651
3652 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3653 else if (strcmp (fmt_str, target_percent_s) == 0)
3654 {
3655 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3656 return false;
3657 if (fn_fputs)
3658 {
3659 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3660 replace_call_with_call_and_fold (gsi, repl);
3661 return true;
3662 }
3663 }
3664
3665 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3666 else if (strcmp (fmt_str, target_percent_c) == 0)
3667 {
3668 if (!arg
3669 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3670 return false;
3671 if (fn_fputc)
3672 {
3673 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3674 replace_call_with_call_and_fold (gsi, repl);
3675 return true;
3676 }
3677 }
3678
3679 return false;
3680}
3681
ad03a744
RB
3682/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3683 FMT and ARG are the arguments to the call; we don't fold cases with
3684 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3685
3686 Return NULL_TREE if no simplification was possible, otherwise return the
3687 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3688 code of the function to be simplified. */
3689
3690static bool
3691gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3692 tree arg, enum built_in_function fcode)
3693{
3694 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3695 tree fn_putchar, fn_puts, newarg;
3696 const char *fmt_str = NULL;
3697
3698 /* If the return value is used, don't do the transformation. */
3699 if (gimple_call_lhs (stmt) != NULL_TREE)
3700 return false;
3701
3702 /* Check whether the format is a literal string constant. */
3703 fmt_str = c_getstr (fmt);
3704 if (fmt_str == NULL)
3705 return false;
3706
3707 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3708 {
3709 /* If we're using an unlocked function, assume the other
3710 unlocked functions exist explicitly. */
3711 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3712 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3713 }
3714 else
3715 {
3716 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3717 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3718 }
3719
3720 if (!init_target_chars ())
3721 return false;
3722
3723 if (strcmp (fmt_str, target_percent_s) == 0
3724 || strchr (fmt_str, target_percent) == NULL)
3725 {
3726 const char *str;
3727
3728 if (strcmp (fmt_str, target_percent_s) == 0)
3729 {
3730 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3731 return false;
3732
3733 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3734 return false;
3735
3736 str = c_getstr (arg);
3737 if (str == NULL)
3738 return false;
3739 }
3740 else
3741 {
3742 /* The format specifier doesn't contain any '%' characters. */
3743 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3744 && arg)
3745 return false;
3746 str = fmt_str;
3747 }
3748
3749 /* If the string was "", printf does nothing. */
3750 if (str[0] == '\0')
3751 {
3752 replace_call_with_value (gsi, NULL_TREE);
3753 return true;
3754 }
3755
3756 /* If the string has length of 1, call putchar. */
3757 if (str[1] == '\0')
3758 {
3759 /* Given printf("c"), (where c is any one character,)
3760 convert "c"[0] to an int and pass that to the replacement
3761 function. */
3762 newarg = build_int_cst (integer_type_node, str[0]);
3763 if (fn_putchar)
3764 {
3765 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3766 replace_call_with_call_and_fold (gsi, repl);
3767 return true;
3768 }
3769 }
3770 else
3771 {
3772 /* If the string was "string\n", call puts("string"). */
3773 size_t len = strlen (str);
3774 if ((unsigned char)str[len - 1] == target_newline
3775 && (size_t) (int) len == len
3776 && (int) len > 0)
3777 {
3778 char *newstr;
ad03a744
RB
3779
3780 /* Create a NUL-terminated string that's one char shorter
3781 than the original, stripping off the trailing '\n'. */
a353fec4 3782 newstr = xstrdup (str);
ad03a744 3783 newstr[len - 1] = '\0';
a353fec4
BE
3784 newarg = build_string_literal (len, newstr);
3785 free (newstr);
ad03a744
RB
3786 if (fn_puts)
3787 {
3788 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3789 replace_call_with_call_and_fold (gsi, repl);
3790 return true;
3791 }
3792 }
3793 else
3794 /* We'd like to arrange to call fputs(string,stdout) here,
3795 but we need stdout and don't have a way to get it yet. */
3796 return false;
3797 }
3798 }
3799
3800 /* The other optimizations can be done only on the non-va_list variants. */
3801 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3802 return false;
3803
3804 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3805 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3806 {
3807 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3808 return false;
3809 if (fn_puts)
3810 {
3811 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3812 replace_call_with_call_and_fold (gsi, repl);
3813 return true;
3814 }
3815 }
3816
3817 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3818 else if (strcmp (fmt_str, target_percent_c) == 0)
3819 {
3820 if (!arg || ! useless_type_conversion_p (integer_type_node,
3821 TREE_TYPE (arg)))
3822 return false;
3823 if (fn_putchar)
3824 {
3825 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3826 replace_call_with_call_and_fold (gsi, repl);
3827 return true;
3828 }
3829 }
3830
3831 return false;
3832}
3833
edd7ae68 3834
fef5a0d9
RB
3835
3836/* Fold a call to __builtin_strlen with known length LEN. */
3837
3838static bool
dcb7fae2 3839gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 3840{
355fe088 3841 gimple *stmt = gsi_stmt (*gsi);
e08341bb 3842 tree arg = gimple_call_arg (stmt, 0);
c42d0aa0
MS
3843
3844 wide_int minlen;
3845 wide_int maxlen;
3846
5d6655eb 3847 c_strlen_data lendata = { };
03c4a945 3848 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
5d6655eb
MS
3849 && !lendata.decl
3850 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3851 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
c42d0aa0
MS
3852 {
3853 /* The range of lengths refers to either a single constant
3854 string or to the longest and shortest constant string
3855 referenced by the argument of the strlen() call, or to
3856 the strings that can possibly be stored in the arrays
3857 the argument refers to. */
5d6655eb
MS
3858 minlen = wi::to_wide (lendata.minlen);
3859 maxlen = wi::to_wide (lendata.maxlen);
c42d0aa0
MS
3860 }
3861 else
3862 {
3863 unsigned prec = TYPE_PRECISION (sizetype);
3864
3865 minlen = wi::shwi (0, prec);
3866 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3867 }
3868
3869 if (minlen == maxlen)
3870 {
5d6655eb
MS
3871 /* Fold the strlen call to a constant. */
3872 tree type = TREE_TYPE (lendata.minlen);
3873 tree len = force_gimple_operand_gsi (gsi,
3874 wide_int_to_tree (type, minlen),
3875 true, NULL, true, GSI_SAME_STMT);
3876 replace_call_with_value (gsi, len);
c42d0aa0
MS
3877 return true;
3878 }
3879
d4bf6975 3880 /* Set the strlen() range to [0, MAXLEN]. */
a7bf6c08 3881 if (tree lhs = gimple_call_lhs (stmt))
34fcf41e 3882 set_strlen_range (lhs, minlen, maxlen);
c42d0aa0
MS
3883
3884 return false;
cbdd87d4
RG
3885}
3886
48126138
NS
3887/* Fold a call to __builtin_acc_on_device. */
3888
3889static bool
3890gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3891{
3892 /* Defer folding until we know which compiler we're in. */
3893 if (symtab->state != EXPANSION)
3894 return false;
3895
3896 unsigned val_host = GOMP_DEVICE_HOST;
3897 unsigned val_dev = GOMP_DEVICE_NONE;
3898
3899#ifdef ACCEL_COMPILER
3900 val_host = GOMP_DEVICE_NOT_HOST;
3901 val_dev = ACCEL_COMPILER_acc_device;
3902#endif
3903
3904 location_t loc = gimple_location (gsi_stmt (*gsi));
3905
3906 tree host_eq = make_ssa_name (boolean_type_node);
3907 gimple *host_ass = gimple_build_assign
3908 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3909 gimple_set_location (host_ass, loc);
3910 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3911
3912 tree dev_eq = make_ssa_name (boolean_type_node);
3913 gimple *dev_ass = gimple_build_assign
3914 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3915 gimple_set_location (dev_ass, loc);
3916 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3917
3918 tree result = make_ssa_name (boolean_type_node);
3919 gimple *result_ass = gimple_build_assign
3920 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3921 gimple_set_location (result_ass, loc);
3922 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3923
3924 replace_call_with_value (gsi, result);
3925
3926 return true;
3927}
cbdd87d4 3928
fe75f732
PK
3929/* Fold realloc (0, n) -> malloc (n). */
3930
3931static bool
3932gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3933{
3934 gimple *stmt = gsi_stmt (*gsi);
3935 tree arg = gimple_call_arg (stmt, 0);
3936 tree size = gimple_call_arg (stmt, 1);
3937
3938 if (operand_equal_p (arg, null_pointer_node, 0))
3939 {
3940 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3941 if (fn_malloc)
3942 {
3943 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3944 replace_call_with_call_and_fold (gsi, repl);
3945 return true;
3946 }
3947 }
3948 return false;
3949}
3950
1bea0d0a
JJ
3951/* Number of bytes into which any type but aggregate or vector types
3952 should fit. */
3953static constexpr size_t clear_padding_unit
3954 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
3955/* Buffer size on which __builtin_clear_padding folding code works. */
3956static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
3957
3958/* Data passed through __builtin_clear_padding folding. */
3959struct clear_padding_struct {
3960 location_t loc;
3961 tree base;
3962 tree alias_type;
3963 gimple_stmt_iterator *gsi;
3964 /* Alignment of buf->base + 0. */
3965 unsigned align;
3966 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
3967 HOST_WIDE_INT off;
3968 /* Number of padding bytes before buf->off that don't have padding clear
3969 code emitted yet. */
3970 HOST_WIDE_INT padding_bytes;
3971 /* The size of the whole object. Never emit code to touch
3972 buf->base + buf->sz or following bytes. */
3973 HOST_WIDE_INT sz;
3974 /* Number of bytes recorded in buf->buf. */
3975 size_t size;
3976 /* When inside union, instead of emitting code we and bits inside of
3977 the union_ptr array. */
3978 unsigned char *union_ptr;
3979 /* Set bits mean padding bits that need to be cleared by the builtin. */
3980 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
3981};
3982
3983/* Emit code to clear padding requested in BUF->buf - set bits
3984 in there stand for padding that should be cleared. FULL is true
3985 if everything from the buffer should be flushed, otherwise
3986 it can leave up to 2 * clear_padding_unit bytes for further
3987 processing. */
3988
3989static void
3990clear_padding_flush (clear_padding_struct *buf, bool full)
3991{
3992 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
3993 if (!full && buf->size < 2 * clear_padding_unit)
3994 return;
3995 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
3996 size_t end = buf->size;
3997 if (!full)
3998 end = ((end - clear_padding_unit - 1) / clear_padding_unit
3999 * clear_padding_unit);
4000 size_t padding_bytes = buf->padding_bytes;
4001 if (buf->union_ptr)
4002 {
4003 /* Inside of a union, instead of emitting any code, instead
4004 clear all bits in the union_ptr buffer that are clear
4005 in buf. Whole padding bytes don't clear anything. */
4006 for (size_t i = 0; i < end; i++)
4007 {
4008 if (buf->buf[i] == (unsigned char) ~0)
4009 padding_bytes++;
4010 else
4011 {
4012 padding_bytes = 0;
4013 buf->union_ptr[buf->off + i] &= buf->buf[i];
4014 }
4015 }
4016 if (full)
4017 {
4018 buf->off = 0;
4019 buf->size = 0;
4020 buf->padding_bytes = 0;
4021 }
4022 else
4023 {
4024 memmove (buf->buf, buf->buf + end, buf->size - end);
4025 buf->off += end;
4026 buf->size -= end;
4027 buf->padding_bytes = padding_bytes;
4028 }
4029 return;
4030 }
4031 size_t wordsize = UNITS_PER_WORD;
4032 for (size_t i = 0; i < end; i += wordsize)
4033 {
4034 size_t nonzero_first = wordsize;
4035 size_t nonzero_last = 0;
4adfcea0
JJ
4036 size_t zero_first = wordsize;
4037 size_t zero_last = 0;
4038 bool all_ones = true, bytes_only = true;
1bea0d0a
JJ
4039 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4040 > (unsigned HOST_WIDE_INT) buf->sz)
4041 {
4042 gcc_assert (wordsize > 1);
4043 wordsize /= 2;
4044 i -= wordsize;
4045 continue;
4046 }
4047 for (size_t j = i; j < i + wordsize && j < end; j++)
4048 {
4049 if (buf->buf[j])
4050 {
4051 if (nonzero_first == wordsize)
4052 {
4053 nonzero_first = j - i;
4054 nonzero_last = j - i;
4055 }
4056 if (nonzero_last != j - i)
4057 all_ones = false;
4058 nonzero_last = j + 1 - i;
4059 }
4adfcea0
JJ
4060 else
4061 {
4062 if (zero_first == wordsize)
4063 zero_first = j - i;
4064 zero_last = j + 1 - i;
4065 }
1bea0d0a 4066 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4adfcea0
JJ
4067 {
4068 all_ones = false;
4069 bytes_only = false;
4070 }
1bea0d0a 4071 }
4adfcea0 4072 size_t padding_end = i;
1bea0d0a
JJ
4073 if (padding_bytes)
4074 {
4075 if (nonzero_first == 0
4076 && nonzero_last == wordsize
4077 && all_ones)
4078 {
4079 /* All bits are padding and we had some padding
4080 before too. Just extend it. */
4081 padding_bytes += wordsize;
4082 continue;
4083 }
1bea0d0a
JJ
4084 if (all_ones && nonzero_first == 0)
4085 {
4086 padding_bytes += nonzero_last;
4087 padding_end += nonzero_last;
4088 nonzero_first = wordsize;
4089 nonzero_last = 0;
4090 }
4adfcea0
JJ
4091 else if (bytes_only && nonzero_first == 0)
4092 {
4093 gcc_assert (zero_first && zero_first != wordsize);
4094 padding_bytes += zero_first;
4095 padding_end += zero_first;
4096 }
4097 tree atype, src;
4098 if (padding_bytes == 1)
4099 {
4100 atype = char_type_node;
4101 src = build_zero_cst (char_type_node);
4102 }
4103 else
4104 {
4105 atype = build_array_type_nelts (char_type_node, padding_bytes);
4106 src = build_constructor (atype, NULL);
4107 }
1bea0d0a
JJ
4108 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4109 build_int_cst (buf->alias_type,
4110 buf->off + padding_end
4111 - padding_bytes));
1bea0d0a
JJ
4112 gimple *g = gimple_build_assign (dst, src);
4113 gimple_set_location (g, buf->loc);
4114 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4115 padding_bytes = 0;
4116 buf->padding_bytes = 0;
4117 }
4118 if (nonzero_first == wordsize)
4119 /* All bits in a word are 0, there are no padding bits. */
4120 continue;
4121 if (all_ones && nonzero_last == wordsize)
4122 {
4123 /* All bits between nonzero_first and end of word are padding
4124 bits, start counting padding_bytes. */
4125 padding_bytes = nonzero_last - nonzero_first;
4126 continue;
4127 }
4adfcea0
JJ
4128 if (bytes_only)
4129 {
4130 /* If bitfields aren't involved in this word, prefer storing
4131 individual bytes or groups of them over performing a RMW
4132 operation on the whole word. */
4133 gcc_assert (i + zero_last <= end);
4134 for (size_t j = padding_end; j < i + zero_last; j++)
4135 {
4136 if (buf->buf[j])
4137 {
4138 size_t k;
4139 for (k = j; k < i + zero_last; k++)
4140 if (buf->buf[k] == 0)
4141 break;
4142 HOST_WIDE_INT off = buf->off + j;
4143 tree atype, src;
4144 if (k - j == 1)
4145 {
4146 atype = char_type_node;
4147 src = build_zero_cst (char_type_node);
4148 }
4149 else
4150 {
4151 atype = build_array_type_nelts (char_type_node, k - j);
4152 src = build_constructor (atype, NULL);
4153 }
4154 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4155 buf->base,
4156 build_int_cst (buf->alias_type, off));
4157 gimple *g = gimple_build_assign (dst, src);
4158 gimple_set_location (g, buf->loc);
4159 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4160 j = k;
4161 }
4162 }
4163 if (nonzero_last == wordsize)
4164 padding_bytes = nonzero_last - zero_last;
4165 continue;
4166 }
1bea0d0a
JJ
4167 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4168 {
4169 if (nonzero_last - nonzero_first <= eltsz
4170 && ((nonzero_first & ~(eltsz - 1))
4171 == ((nonzero_last - 1) & ~(eltsz - 1))))
4172 {
4173 tree type;
4174 if (eltsz == 1)
4175 type = char_type_node;
4176 else
4177 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4178 0);
4179 size_t start = nonzero_first & ~(eltsz - 1);
4180 HOST_WIDE_INT off = buf->off + i + start;
4181 tree atype = type;
4182 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4183 atype = build_aligned_type (type, buf->align);
4184 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4185 build_int_cst (buf->alias_type, off));
4186 tree src;
4187 gimple *g;
4188 if (all_ones
4189 && nonzero_first == start
4190 && nonzero_last == start + eltsz)
4191 src = build_zero_cst (type);
4192 else
4193 {
4194 src = make_ssa_name (type);
4195 g = gimple_build_assign (src, unshare_expr (dst));
4196 gimple_set_location (g, buf->loc);
4197 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4198 tree mask = native_interpret_expr (type,
4199 buf->buf + i + start,
4200 eltsz);
4201 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4202 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4203 tree src_masked = make_ssa_name (type);
4204 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4205 src, mask);
4206 gimple_set_location (g, buf->loc);
4207 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4208 src = src_masked;
4209 }
4210 g = gimple_build_assign (dst, src);
4211 gimple_set_location (g, buf->loc);
4212 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4213 break;
4214 }
4215 }
4216 }
4217 if (full)
4218 {
4219 if (padding_bytes)
4220 {
4adfcea0
JJ
4221 tree atype, src;
4222 if (padding_bytes == 1)
4223 {
4224 atype = char_type_node;
4225 src = build_zero_cst (char_type_node);
4226 }
4227 else
4228 {
4229 atype = build_array_type_nelts (char_type_node, padding_bytes);
4230 src = build_constructor (atype, NULL);
4231 }
1bea0d0a
JJ
4232 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4233 build_int_cst (buf->alias_type,
4234 buf->off + end
4235 - padding_bytes));
1bea0d0a
JJ
4236 gimple *g = gimple_build_assign (dst, src);
4237 gimple_set_location (g, buf->loc);
4238 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4239 }
4240 size_t end_rem = end % UNITS_PER_WORD;
4241 buf->off += end - end_rem;
4242 buf->size = end_rem;
4243 memset (buf->buf, 0, buf->size);
4244 buf->padding_bytes = 0;
4245 }
4246 else
4247 {
4248 memmove (buf->buf, buf->buf + end, buf->size - end);
4249 buf->off += end;
4250 buf->size -= end;
4251 buf->padding_bytes = padding_bytes;
4252 }
4253}
4254
4255/* Append PADDING_BYTES padding bytes. */
4256
4257static void
4258clear_padding_add_padding (clear_padding_struct *buf,
4259 HOST_WIDE_INT padding_bytes)
4260{
4261 if (padding_bytes == 0)
4262 return;
4263 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4264 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4265 clear_padding_flush (buf, false);
4266 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4267 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4268 {
4269 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4270 padding_bytes -= clear_padding_buf_size - buf->size;
4271 buf->size = clear_padding_buf_size;
4272 clear_padding_flush (buf, false);
4273 gcc_assert (buf->padding_bytes);
4274 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4275 is guaranteed to be all ones. */
4276 padding_bytes += buf->size;
4277 buf->size = padding_bytes % UNITS_PER_WORD;
4278 memset (buf->buf, ~0, buf->size);
4279 buf->off += padding_bytes - buf->size;
4280 buf->padding_bytes += padding_bytes - buf->size;
4281 }
4282 else
4283 {
4284 memset (buf->buf + buf->size, ~0, padding_bytes);
4285 buf->size += padding_bytes;
4286 }
4287}
4288
4289static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4290
4291/* Clear padding bits of union type TYPE. */
4292
4293static void
4294clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4295{
4296 clear_padding_struct *union_buf;
4297 HOST_WIDE_INT start_off = 0, next_off = 0;
4298 size_t start_size = 0;
4299 if (buf->union_ptr)
4300 {
4301 start_off = buf->off + buf->size;
4302 next_off = start_off + sz;
4303 start_size = start_off % UNITS_PER_WORD;
4304 start_off -= start_size;
4305 clear_padding_flush (buf, true);
4306 union_buf = buf;
4307 }
4308 else
4309 {
4310 if (sz + buf->size > clear_padding_buf_size)
4311 clear_padding_flush (buf, false);
4312 union_buf = XALLOCA (clear_padding_struct);
4313 union_buf->loc = buf->loc;
4314 union_buf->base = NULL_TREE;
4315 union_buf->alias_type = NULL_TREE;
4316 union_buf->gsi = NULL;
4317 union_buf->align = 0;
4318 union_buf->off = 0;
4319 union_buf->padding_bytes = 0;
4320 union_buf->sz = sz;
4321 union_buf->size = 0;
4322 if (sz + buf->size <= clear_padding_buf_size)
4323 union_buf->union_ptr = buf->buf + buf->size;
4324 else
4325 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4326 memset (union_buf->union_ptr, ~0, sz);
4327 }
4328
4329 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4330 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4331 {
a7285c86
JJ
4332 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4333 {
4334 if (TREE_TYPE (field) == error_mark_node)
4335 continue;
4336 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4337 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4338 error_at (buf->loc, "flexible array member %qD does not have "
4339 "well defined padding bits for %qs",
4340 field, "__builtin_clear_padding");
4341 continue;
4342 }
1bea0d0a
JJ
4343 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4344 gcc_assert (union_buf->size == 0);
4345 union_buf->off = start_off;
4346 union_buf->size = start_size;
4347 memset (union_buf->buf, ~0, start_size);
4348 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4349 clear_padding_add_padding (union_buf, sz - fldsz);
4350 clear_padding_flush (union_buf, true);
4351 }
4352
4353 if (buf == union_buf)
4354 {
4355 buf->off = next_off;
4356 buf->size = next_off % UNITS_PER_WORD;
4357 buf->off -= buf->size;
4358 memset (buf->buf, ~0, buf->size);
4359 }
4360 else if (sz + buf->size <= clear_padding_buf_size)
4361 buf->size += sz;
4362 else
4363 {
4364 unsigned char *union_ptr = union_buf->union_ptr;
4365 while (sz)
4366 {
4367 clear_padding_flush (buf, false);
4368 HOST_WIDE_INT this_sz
4369 = MIN ((unsigned HOST_WIDE_INT) sz,
4370 clear_padding_buf_size - buf->size);
4371 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4372 buf->size += this_sz;
4373 union_ptr += this_sz;
4374 sz -= this_sz;
4375 }
4376 XDELETE (union_buf->union_ptr);
4377 }
4378}
4379
4380/* The only known floating point formats with padding bits are the
4381 IEEE extended ones. */
4382
4383static bool
4384clear_padding_real_needs_padding_p (tree type)
4385{
4386 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4387 return (fmt->b == 2
4388 && fmt->signbit_ro == fmt->signbit_rw
4389 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4390}
4391
4392/* Return true if TYPE might contain any padding bits. */
4393
4394static bool
4395clear_padding_type_may_have_padding_p (tree type)
4396{
4397 switch (TREE_CODE (type))
4398 {
4399 case RECORD_TYPE:
4400 case UNION_TYPE:
4401 return true;
4402 case ARRAY_TYPE:
4403 case COMPLEX_TYPE:
4404 case VECTOR_TYPE:
4405 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4406 case REAL_TYPE:
4407 return clear_padding_real_needs_padding_p (type);
4408 default:
4409 return false;
4410 }
4411}
4412
4413/* Emit a runtime loop:
4414 for (; buf.base != end; buf.base += sz)
4415 __builtin_clear_padding (buf.base); */
4416
4417static void
4418clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4419{
4420 tree l1 = create_artificial_label (buf->loc);
4421 tree l2 = create_artificial_label (buf->loc);
4422 tree l3 = create_artificial_label (buf->loc);
4423 gimple *g = gimple_build_goto (l2);
4424 gimple_set_location (g, buf->loc);
4425 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4426 g = gimple_build_label (l1);
4427 gimple_set_location (g, buf->loc);
4428 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4429 clear_padding_type (buf, type, buf->sz);
4430 clear_padding_flush (buf, true);
4431 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4432 size_int (buf->sz));
4433 gimple_set_location (g, buf->loc);
4434 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4435 g = gimple_build_label (l2);
4436 gimple_set_location (g, buf->loc);
4437 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4438 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4439 gimple_set_location (g, buf->loc);
4440 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4441 g = gimple_build_label (l3);
4442 gimple_set_location (g, buf->loc);
4443 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4444}
4445
4446/* Clear padding bits for TYPE. Called recursively from
4447 gimple_fold_builtin_clear_padding. */
4448
4449static void
4450clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4451{
4452 switch (TREE_CODE (type))
4453 {
4454 case RECORD_TYPE:
4455 HOST_WIDE_INT cur_pos;
4456 cur_pos = 0;
4457 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
a3865661 4458 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
1bea0d0a 4459 {
a7285c86 4460 tree ftype = TREE_TYPE (field);
1bea0d0a
JJ
4461 if (DECL_BIT_FIELD (field))
4462 {
a7285c86 4463 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
1bea0d0a
JJ
4464 if (fldsz == 0)
4465 continue;
4466 HOST_WIDE_INT pos = int_byte_position (field);
4467 HOST_WIDE_INT bpos
4468 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4469 bpos %= BITS_PER_UNIT;
4470 HOST_WIDE_INT end
4471 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4472 if (pos + end > cur_pos)
4473 {
4474 clear_padding_add_padding (buf, pos + end - cur_pos);
4475 cur_pos = pos + end;
4476 }
4477 gcc_assert (cur_pos > pos
4478 && ((unsigned HOST_WIDE_INT) buf->size
4479 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4480 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4481 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4482 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4483 " in %qs", "__builtin_clear_padding");
4484 else if (BYTES_BIG_ENDIAN)
4485 {
4486 /* Big endian. */
4487 if (bpos + fldsz <= BITS_PER_UNIT)
4488 *p &= ~(((1 << fldsz) - 1)
4489 << (BITS_PER_UNIT - bpos - fldsz));
4490 else
4491 {
4492 if (bpos)
4493 {
4494 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4495 p++;
4496 fldsz -= BITS_PER_UNIT - bpos;
4497 }
4498 memset (p, 0, fldsz / BITS_PER_UNIT);
4499 p += fldsz / BITS_PER_UNIT;
4500 fldsz %= BITS_PER_UNIT;
4501 if (fldsz)
4502 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4503 }
4504 }
4505 else
4506 {
4507 /* Little endian. */
4508 if (bpos + fldsz <= BITS_PER_UNIT)
4509 *p &= ~(((1 << fldsz) - 1) << bpos);
4510 else
4511 {
4512 if (bpos)
4513 {
4514 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4515 p++;
4516 fldsz -= BITS_PER_UNIT - bpos;
4517 }
4518 memset (p, 0, fldsz / BITS_PER_UNIT);
4519 p += fldsz / BITS_PER_UNIT;
4520 fldsz %= BITS_PER_UNIT;
4521 if (fldsz)
4522 *p &= ~((1 << fldsz) - 1);
4523 }
4524 }
4525 }
a7285c86
JJ
4526 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4527 {
4528 if (ftype == error_mark_node)
4529 continue;
4530 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4531 && !COMPLETE_TYPE_P (ftype));
4532 error_at (buf->loc, "flexible array member %qD does not have "
4533 "well defined padding bits for %qs",
4534 field, "__builtin_clear_padding");
4535 }
bf0a63a1
JJ
4536 else if (is_empty_type (TREE_TYPE (field)))
4537 continue;
1bea0d0a
JJ
4538 else
4539 {
4540 HOST_WIDE_INT pos = int_byte_position (field);
4541 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4542 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4543 clear_padding_add_padding (buf, pos - cur_pos);
4544 cur_pos = pos;
4545 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4546 cur_pos += fldsz;
4547 }
4548 }
4549 gcc_assert (sz >= cur_pos);
4550 clear_padding_add_padding (buf, sz - cur_pos);
4551 break;
4552 case ARRAY_TYPE:
4553 HOST_WIDE_INT nelts, fldsz;
4554 fldsz = int_size_in_bytes (TREE_TYPE (type));
337d6362
ML
4555 if (fldsz == 0)
4556 break;
1bea0d0a
JJ
4557 nelts = sz / fldsz;
4558 if (nelts > 1
4559 && sz > 8 * UNITS_PER_WORD
4560 && buf->union_ptr == NULL
4561 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4562 {
4563 /* For sufficiently large array of more than one elements,
4564 emit a runtime loop to keep code size manageable. */
4565 tree base = buf->base;
4566 unsigned int prev_align = buf->align;
4567 HOST_WIDE_INT off = buf->off + buf->size;
4568 HOST_WIDE_INT prev_sz = buf->sz;
4569 clear_padding_flush (buf, true);
4570 tree elttype = TREE_TYPE (type);
4571 buf->base = create_tmp_var (build_pointer_type (elttype));
4572 tree end = make_ssa_name (TREE_TYPE (buf->base));
4573 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4574 base, size_int (off));
4575 gimple_set_location (g, buf->loc);
4576 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4577 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4578 size_int (sz));
4579 gimple_set_location (g, buf->loc);
4580 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4581 buf->sz = fldsz;
4582 buf->align = TYPE_ALIGN (elttype);
4583 buf->off = 0;
4584 buf->size = 0;
4585 clear_padding_emit_loop (buf, elttype, end);
4586 buf->base = base;
4587 buf->sz = prev_sz;
4588 buf->align = prev_align;
4589 buf->size = off % UNITS_PER_WORD;
4590 buf->off = off - buf->size;
4591 memset (buf->buf, 0, buf->size);
4592 break;
4593 }
4594 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4595 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4596 break;
4597 case UNION_TYPE:
4598 clear_padding_union (buf, type, sz);
4599 break;
4600 case REAL_TYPE:
4601 gcc_assert ((size_t) sz <= clear_padding_unit);
4602 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4603 clear_padding_flush (buf, false);
4604 if (clear_padding_real_needs_padding_p (type))
4605 {
4606 /* Use native_interpret_expr + native_encode_expr to figure out
4607 which bits are padding. */
4608 memset (buf->buf + buf->size, ~0, sz);
4609 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4610 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4611 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4612 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4613 for (size_t i = 0; i < (size_t) sz; i++)
4614 buf->buf[buf->size + i] ^= ~0;
4615 }
4616 else
4617 memset (buf->buf + buf->size, 0, sz);
4618 buf->size += sz;
4619 break;
4620 case COMPLEX_TYPE:
4621 fldsz = int_size_in_bytes (TREE_TYPE (type));
4622 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4623 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4624 break;
4625 case VECTOR_TYPE:
4626 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4627 fldsz = int_size_in_bytes (TREE_TYPE (type));
4628 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4629 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4630 break;
4631 case NULLPTR_TYPE:
4632 gcc_assert ((size_t) sz <= clear_padding_unit);
4633 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4634 clear_padding_flush (buf, false);
4635 memset (buf->buf + buf->size, ~0, sz);
4636 buf->size += sz;
4637 break;
4638 default:
4639 gcc_assert ((size_t) sz <= clear_padding_unit);
4640 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4641 clear_padding_flush (buf, false);
4642 memset (buf->buf + buf->size, 0, sz);
4643 buf->size += sz;
4644 break;
4645 }
4646}
4647
4648/* Fold __builtin_clear_padding builtin. */
4649
4650static bool
4651gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4652{
4653 gimple *stmt = gsi_stmt (*gsi);
4654 gcc_assert (gimple_call_num_args (stmt) == 2);
4655 tree ptr = gimple_call_arg (stmt, 0);
4656 tree typearg = gimple_call_arg (stmt, 1);
4657 tree type = TREE_TYPE (TREE_TYPE (typearg));
4658 location_t loc = gimple_location (stmt);
4659 clear_padding_struct buf;
4660 gimple_stmt_iterator gsiprev = *gsi;
4661 /* This should be folded during the lower pass. */
4662 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4663 gcc_assert (COMPLETE_TYPE_P (type));
4664 gsi_prev (&gsiprev);
4665
4666 buf.loc = loc;
4667 buf.base = ptr;
4668 buf.alias_type = NULL_TREE;
4669 buf.gsi = gsi;
4670 buf.align = get_pointer_alignment (ptr);
4671 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4672 buf.align = MAX (buf.align, talign);
4673 buf.off = 0;
4674 buf.padding_bytes = 0;
4675 buf.size = 0;
4676 buf.sz = int_size_in_bytes (type);
4677 buf.union_ptr = NULL;
4678 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4679 sorry_at (loc, "%s not supported for variable length aggregates",
4680 "__builtin_clear_padding");
4681 /* The implementation currently assumes 8-bit host and target
4682 chars which is the case for all currently supported targets
4683 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4684 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4685 sorry_at (loc, "%s not supported on this target",
4686 "__builtin_clear_padding");
4687 else if (!clear_padding_type_may_have_padding_p (type))
4688 ;
4689 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4690 {
4691 tree sz = TYPE_SIZE_UNIT (type);
4692 tree elttype = type;
4693 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4694 while (TREE_CODE (elttype) == ARRAY_TYPE
4695 && int_size_in_bytes (elttype) < 0)
4696 elttype = TREE_TYPE (elttype);
4697 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4698 gcc_assert (eltsz >= 0);
4699 if (eltsz)
4700 {
4701 buf.base = create_tmp_var (build_pointer_type (elttype));
4702 tree end = make_ssa_name (TREE_TYPE (buf.base));
4703 gimple *g = gimple_build_assign (buf.base, ptr);
4704 gimple_set_location (g, loc);
4705 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4706 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4707 gimple_set_location (g, loc);
4708 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4709 buf.sz = eltsz;
4710 buf.align = TYPE_ALIGN (elttype);
4711 buf.alias_type = build_pointer_type (elttype);
4712 clear_padding_emit_loop (&buf, elttype, end);
4713 }
4714 }
4715 else
4716 {
4717 if (!is_gimple_mem_ref_addr (buf.base))
4718 {
4719 buf.base = make_ssa_name (TREE_TYPE (ptr));
4720 gimple *g = gimple_build_assign (buf.base, ptr);
4721 gimple_set_location (g, loc);
4722 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4723 }
4724 buf.alias_type = build_pointer_type (type);
4725 clear_padding_type (&buf, type, buf.sz);
4726 clear_padding_flush (&buf, true);
4727 }
4728
4729 gimple_stmt_iterator gsiprev2 = *gsi;
4730 gsi_prev (&gsiprev2);
4731 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4732 gsi_replace (gsi, gimple_build_nop (), true);
4733 else
4734 {
4735 gsi_remove (gsi, true);
4736 *gsi = gsiprev2;
4737 }
4738 return true;
4739}
4740
dcb7fae2
RB
4741/* Fold the non-target builtin at *GSI and return whether any simplification
4742 was made. */
cbdd87d4 4743
fef5a0d9 4744static bool
dcb7fae2 4745gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 4746{
538dd0b7 4747 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 4748 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 4749
dcb7fae2
RB
4750 /* Give up for always_inline inline builtins until they are
4751 inlined. */
4752 if (avoid_folding_inline_builtin (callee))
4753 return false;
cbdd87d4 4754
edd7ae68
RB
4755 unsigned n = gimple_call_num_args (stmt);
4756 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
4757 switch (fcode)
cbdd87d4 4758 {
b3d8d88e
MS
4759 case BUILT_IN_BCMP:
4760 return gimple_fold_builtin_bcmp (gsi);
4761 case BUILT_IN_BCOPY:
4762 return gimple_fold_builtin_bcopy (gsi);
dcb7fae2 4763 case BUILT_IN_BZERO:
b3d8d88e
MS
4764 return gimple_fold_builtin_bzero (gsi);
4765
dcb7fae2
RB
4766 case BUILT_IN_MEMSET:
4767 return gimple_fold_builtin_memset (gsi,
4768 gimple_call_arg (stmt, 1),
4769 gimple_call_arg (stmt, 2));
dcb7fae2 4770 case BUILT_IN_MEMCPY:
dcb7fae2 4771 case BUILT_IN_MEMPCPY:
dcb7fae2
RB
4772 case BUILT_IN_MEMMOVE:
4773 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
0d67a510 4774 gimple_call_arg (stmt, 1), fcode);
dcb7fae2
RB
4775 case BUILT_IN_SPRINTF_CHK:
4776 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 4777 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
4778 case BUILT_IN_STRCAT_CHK:
4779 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
4780 case BUILT_IN_STRNCAT_CHK:
4781 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 4782 case BUILT_IN_STRLEN:
dcb7fae2 4783 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 4784 case BUILT_IN_STRCPY:
dcb7fae2 4785 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 4786 gimple_call_arg (stmt, 0),
dcb7fae2 4787 gimple_call_arg (stmt, 1));
cbdd87d4 4788 case BUILT_IN_STRNCPY:
dcb7fae2 4789 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
4790 gimple_call_arg (stmt, 0),
4791 gimple_call_arg (stmt, 1),
dcb7fae2 4792 gimple_call_arg (stmt, 2));
9a7eefec 4793 case BUILT_IN_STRCAT:
dcb7fae2
RB
4794 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
4795 gimple_call_arg (stmt, 1));
ad03a744
RB
4796 case BUILT_IN_STRNCAT:
4797 return gimple_fold_builtin_strncat (gsi);
71dea1dd 4798 case BUILT_IN_INDEX:
912d9ec3 4799 case BUILT_IN_STRCHR:
71dea1dd
WD
4800 return gimple_fold_builtin_strchr (gsi, false);
4801 case BUILT_IN_RINDEX:
4802 case BUILT_IN_STRRCHR:
4803 return gimple_fold_builtin_strchr (gsi, true);
c8952930
JJ
4804 case BUILT_IN_STRSTR:
4805 return gimple_fold_builtin_strstr (gsi);
a918bfbf 4806 case BUILT_IN_STRCMP:
8b0b334a 4807 case BUILT_IN_STRCMP_EQ:
a918bfbf
ML
4808 case BUILT_IN_STRCASECMP:
4809 case BUILT_IN_STRNCMP:
8b0b334a 4810 case BUILT_IN_STRNCMP_EQ:
a918bfbf
ML
4811 case BUILT_IN_STRNCASECMP:
4812 return gimple_fold_builtin_string_compare (gsi);
488c6247
ML
4813 case BUILT_IN_MEMCHR:
4814 return gimple_fold_builtin_memchr (gsi);
cbdd87d4 4815 case BUILT_IN_FPUTS:
dcb7fae2
RB
4816 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4817 gimple_call_arg (stmt, 1), false);
cbdd87d4 4818 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
4819 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4820 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
4821 case BUILT_IN_MEMCPY_CHK:
4822 case BUILT_IN_MEMPCPY_CHK:
4823 case BUILT_IN_MEMMOVE_CHK:
4824 case BUILT_IN_MEMSET_CHK:
dcb7fae2 4825 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
4826 gimple_call_arg (stmt, 0),
4827 gimple_call_arg (stmt, 1),
4828 gimple_call_arg (stmt, 2),
4829 gimple_call_arg (stmt, 3),
edd7ae68 4830 fcode);
2625bb5d
RB
4831 case BUILT_IN_STPCPY:
4832 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
4833 case BUILT_IN_STRCPY_CHK:
4834 case BUILT_IN_STPCPY_CHK:
dcb7fae2 4835 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
4836 gimple_call_arg (stmt, 0),
4837 gimple_call_arg (stmt, 1),
4838 gimple_call_arg (stmt, 2),
edd7ae68 4839 fcode);
cbdd87d4 4840 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 4841 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
4842 return gimple_fold_builtin_stxncpy_chk (gsi,
4843 gimple_call_arg (stmt, 0),
4844 gimple_call_arg (stmt, 1),
4845 gimple_call_arg (stmt, 2),
4846 gimple_call_arg (stmt, 3),
edd7ae68 4847 fcode);
cbdd87d4
RG
4848 case BUILT_IN_SNPRINTF_CHK:
4849 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 4850 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
a104bd88 4851
edd7ae68
RB
4852 case BUILT_IN_FPRINTF:
4853 case BUILT_IN_FPRINTF_UNLOCKED:
4854 case BUILT_IN_VFPRINTF:
4855 if (n == 2 || n == 3)
4856 return gimple_fold_builtin_fprintf (gsi,
4857 gimple_call_arg (stmt, 0),
4858 gimple_call_arg (stmt, 1),
4859 n == 3
4860 ? gimple_call_arg (stmt, 2)
4861 : NULL_TREE,
4862 fcode);
4863 break;
4864 case BUILT_IN_FPRINTF_CHK:
4865 case BUILT_IN_VFPRINTF_CHK:
4866 if (n == 3 || n == 4)
4867 return gimple_fold_builtin_fprintf (gsi,
4868 gimple_call_arg (stmt, 0),
4869 gimple_call_arg (stmt, 2),
4870 n == 4
4871 ? gimple_call_arg (stmt, 3)
4872 : NULL_TREE,
4873 fcode);
4874 break;
ad03a744
RB
4875 case BUILT_IN_PRINTF:
4876 case BUILT_IN_PRINTF_UNLOCKED:
4877 case BUILT_IN_VPRINTF:
4878 if (n == 1 || n == 2)
4879 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4880 n == 2
4881 ? gimple_call_arg (stmt, 1)
4882 : NULL_TREE, fcode);
4883 break;
4884 case BUILT_IN_PRINTF_CHK:
4885 case BUILT_IN_VPRINTF_CHK:
4886 if (n == 2 || n == 3)
4887 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4888 n == 3
4889 ? gimple_call_arg (stmt, 2)
4890 : NULL_TREE, fcode);
242a37f1 4891 break;
48126138
NS
4892 case BUILT_IN_ACC_ON_DEVICE:
4893 return gimple_fold_builtin_acc_on_device (gsi,
4894 gimple_call_arg (stmt, 0));
fe75f732
PK
4895 case BUILT_IN_REALLOC:
4896 return gimple_fold_builtin_realloc (gsi);
4897
1bea0d0a
JJ
4898 case BUILT_IN_CLEAR_PADDING:
4899 return gimple_fold_builtin_clear_padding (gsi);
4900
fef5a0d9
RB
4901 default:;
4902 }
4903
4904 /* Try the generic builtin folder. */
4905 bool ignore = (gimple_call_lhs (stmt) == NULL);
4906 tree result = fold_call_stmt (stmt, ignore);
4907 if (result)
4908 {
4909 if (ignore)
4910 STRIP_NOPS (result);
4911 else
4912 result = fold_convert (gimple_call_return_type (stmt), result);
4913 if (!update_call_from_tree (gsi, result))
4914 gimplify_and_update_call_from_tree (gsi, result);
4915 return true;
4916 }
4917
4918 return false;
4919}
4920
451e8dae
NS
4921/* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4922 function calls to constants, where possible. */
4923
4924static tree
4925fold_internal_goacc_dim (const gimple *call)
4926{
629b3d75
MJ
4927 int axis = oacc_get_ifn_dim_arg (call);
4928 int size = oacc_get_fn_dim_size (current_function_decl, axis);
451e8dae 4929 tree result = NULL_TREE;
67d2229e 4930 tree type = TREE_TYPE (gimple_call_lhs (call));
451e8dae 4931
67d2229e 4932 switch (gimple_call_internal_fn (call))
451e8dae 4933 {
67d2229e
TV
4934 case IFN_GOACC_DIM_POS:
4935 /* If the size is 1, we know the answer. */
4936 if (size == 1)
4937 result = build_int_cst (type, 0);
4938 break;
4939 case IFN_GOACC_DIM_SIZE:
4940 /* If the size is not dynamic, we know the answer. */
4941 if (size)
4942 result = build_int_cst (type, size);
4943 break;
4944 default:
4945 break;
451e8dae
NS
4946 }
4947
4948 return result;
4949}
4950
849a76a5
JJ
4951/* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4952 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4953 &var where var is only addressable because of such calls. */
4954
4955bool
4956optimize_atomic_compare_exchange_p (gimple *stmt)
4957{
4958 if (gimple_call_num_args (stmt) != 6
4959 || !flag_inline_atomics
4960 || !optimize
45b2222a 4961 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
849a76a5
JJ
4962 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4963 || !gimple_vdef (stmt)
4964 || !gimple_vuse (stmt))
4965 return false;
4966
4967 tree fndecl = gimple_call_fndecl (stmt);
4968 switch (DECL_FUNCTION_CODE (fndecl))
4969 {
4970 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4971 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4972 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4973 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4974 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4975 break;
4976 default:
4977 return false;
4978 }
4979
4980 tree expected = gimple_call_arg (stmt, 1);
4981 if (TREE_CODE (expected) != ADDR_EXPR
1de3c940
JJ
4982 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4983 return false;
4984
4985 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4986 if (!is_gimple_reg_type (etype)
849a76a5 4987 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
1de3c940
JJ
4988 || TREE_THIS_VOLATILE (etype)
4989 || VECTOR_TYPE_P (etype)
4990 || TREE_CODE (etype) == COMPLEX_TYPE
4991 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4992 might not preserve all the bits. See PR71716. */
4993 || SCALAR_FLOAT_TYPE_P (etype)
73a699ae
RS
4994 || maybe_ne (TYPE_PRECISION (etype),
4995 GET_MODE_BITSIZE (TYPE_MODE (etype))))
849a76a5
JJ
4996 return false;
4997
4998 tree weak = gimple_call_arg (stmt, 3);
4999 if (!integer_zerop (weak) && !integer_onep (weak))
5000 return false;
5001
5002 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5003 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5004 machine_mode mode = TYPE_MODE (itype);
5005
5006 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5007 == CODE_FOR_nothing
5008 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5009 return false;
5010
cf098191 5011 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
849a76a5
JJ
5012 return false;
5013
5014 return true;
5015}
5016
5017/* Fold
5018 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5019 into
5020 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5021 i = IMAGPART_EXPR <t>;
5022 r = (_Bool) i;
5023 e = REALPART_EXPR <t>; */
5024
5025void
5026fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5027{
5028 gimple *stmt = gsi_stmt (*gsi);
5029 tree fndecl = gimple_call_fndecl (stmt);
5030 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5031 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5032 tree ctype = build_complex_type (itype);
5033 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
cc195d46
JJ
5034 bool throws = false;
5035 edge e = NULL;
849a76a5
JJ
5036 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5037 expected);
5038 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5039 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5040 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5041 {
5042 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5043 build1 (VIEW_CONVERT_EXPR, itype,
5044 gimple_assign_lhs (g)));
5045 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5046 }
5047 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5048 + int_size_in_bytes (itype);
5049 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5050 gimple_call_arg (stmt, 0),
5051 gimple_assign_lhs (g),
5052 gimple_call_arg (stmt, 2),
5053 build_int_cst (integer_type_node, flag),
5054 gimple_call_arg (stmt, 4),
5055 gimple_call_arg (stmt, 5));
5056 tree lhs = make_ssa_name (ctype);
5057 gimple_call_set_lhs (g, lhs);
779724a5 5058 gimple_move_vops (g, stmt);
cc195d46 5059 tree oldlhs = gimple_call_lhs (stmt);
36bbc05d 5060 if (stmt_can_throw_internal (cfun, stmt))
cc195d46
JJ
5061 {
5062 throws = true;
5063 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5064 }
5065 gimple_call_set_nothrow (as_a <gcall *> (g),
5066 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5067 gimple_call_set_lhs (stmt, NULL_TREE);
5068 gsi_replace (gsi, g, true);
5069 if (oldlhs)
849a76a5 5070 {
849a76a5
JJ
5071 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5072 build1 (IMAGPART_EXPR, itype, lhs));
cc195d46
JJ
5073 if (throws)
5074 {
5075 gsi_insert_on_edge_immediate (e, g);
5076 *gsi = gsi_for_stmt (g);
5077 }
5078 else
5079 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5080 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5081 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5 5082 }
849a76a5
JJ
5083 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5084 build1 (REALPART_EXPR, itype, lhs));
cc195d46
JJ
5085 if (throws && oldlhs == NULL_TREE)
5086 {
5087 gsi_insert_on_edge_immediate (e, g);
5088 *gsi = gsi_for_stmt (g);
5089 }
5090 else
5091 gsi_insert_after (gsi, g, GSI_NEW_STMT);
849a76a5
JJ
5092 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5093 {
5094 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5095 VIEW_CONVERT_EXPR,
5096 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5097 gimple_assign_lhs (g)));
5098 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5099 }
5100 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5101 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5102 *gsi = gsiret;
5103}
5104
1304953e
JJ
5105/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5106 doesn't fit into TYPE. The test for overflow should be regardless of
5107 -fwrapv, and even for unsigned types. */
5108
5109bool
5110arith_overflowed_p (enum tree_code code, const_tree type,
5111 const_tree arg0, const_tree arg1)
5112{
1304953e
JJ
5113 widest2_int warg0 = widest2_int_cst (arg0);
5114 widest2_int warg1 = widest2_int_cst (arg1);
5115 widest2_int wres;
5116 switch (code)
5117 {
5118 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5119 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5120 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5121 default: gcc_unreachable ();
5122 }
5123 signop sign = TYPE_SIGN (type);
5124 if (sign == UNSIGNED && wi::neg_p (wres))
5125 return true;
5126 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5127}
5128
868363d4
RS
5129/* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5130 for the memory it references, otherwise return null. VECTYPE is the
5131 type of the memory vector. */
5132
5133static tree
5134gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5135{
5136 tree ptr = gimple_call_arg (call, 0);
5137 tree alias_align = gimple_call_arg (call, 1);
5138 tree mask = gimple_call_arg (call, 2);
5139 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5140 return NULL_TREE;
5141
5142 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
5143 if (TYPE_ALIGN (vectype) != align)
5144 vectype = build_aligned_type (vectype, align);
5145 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5146 return fold_build2 (MEM_REF, vectype, ptr, offset);
5147}
5148
5149/* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5150
5151static bool
5152gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5153{
5154 tree lhs = gimple_call_lhs (call);
5155 if (!lhs)
5156 return false;
5157
5158 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5159 {
5160 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5161 gimple_set_location (new_stmt, gimple_location (call));
5162 gimple_move_vops (new_stmt, call);
5163 gsi_replace (gsi, new_stmt, false);
5164 return true;
5165 }
5166 return false;
5167}
5168
5169/* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5170
5171static bool
5172gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5173{
5174 tree rhs = gimple_call_arg (call, 3);
5175 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5176 {
5177 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5178 gimple_set_location (new_stmt, gimple_location (call));
5179 gimple_move_vops (new_stmt, call);
5180 gsi_replace (gsi, new_stmt, false);
5181 return true;
5182 }
5183 return false;
5184}
5185
cbdd87d4
RG
5186/* Attempt to fold a call statement referenced by the statement iterator GSI.
5187 The statement may be replaced by another statement, e.g., if the call
5188 simplifies to a constant value. Return true if any changes were made.
5189 It is assumed that the operands have been previously folded. */
5190
e021c122 5191static bool
ceeffab0 5192gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 5193{
538dd0b7 5194 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 5195 tree callee;
e021c122
RG
5196 bool changed = false;
5197 unsigned i;
cbdd87d4 5198
e021c122
RG
5199 /* Fold *& in call arguments. */
5200 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5201 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
5202 {
5203 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
5204 if (tmp)
5205 {
5206 gimple_call_set_arg (stmt, i, tmp);
5207 changed = true;
5208 }
5209 }
3b45a007
RG
5210
5211 /* Check for virtual calls that became direct calls. */
5212 callee = gimple_call_fn (stmt);
25583c4f 5213 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 5214 {
49c471e3
MJ
5215 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5216 {
450ad0cd
JH
5217 if (dump_file && virtual_method_call_p (callee)
5218 && !possible_polymorphic_call_target_p
6f8091fc
JH
5219 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5220 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
5221 {
5222 fprintf (dump_file,
a70e9985 5223 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
5224 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5225 fprintf (dump_file, " to ");
5226 print_generic_expr (dump_file, callee, TDF_SLIM);
5227 fprintf (dump_file, "\n");
5228 }
5229
49c471e3 5230 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
5231 changed = true;
5232 }
a70e9985 5233 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 5234 {
61dd6a2e
JH
5235 bool final;
5236 vec <cgraph_node *>targets
058d0a90 5237 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 5238 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 5239 {
a70e9985 5240 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
5241 if (dump_enabled_p ())
5242 {
4f5b9c80 5243 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
2b5f0895
XDL
5244 "folding virtual function call to %s\n",
5245 targets.length () == 1
5246 ? targets[0]->name ()
5247 : "__builtin_unreachable");
5248 }
61dd6a2e 5249 if (targets.length () == 1)
cf3e5a89 5250 {
18954840
JJ
5251 tree fndecl = targets[0]->decl;
5252 gimple_call_set_fndecl (stmt, fndecl);
cf3e5a89 5253 changed = true;
18954840
JJ
5254 /* If changing the call to __cxa_pure_virtual
5255 or similar noreturn function, adjust gimple_call_fntype
5256 too. */
865f7046 5257 if (gimple_call_noreturn_p (stmt)
18954840
JJ
5258 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5259 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5260 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5261 == void_type_node))
5262 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
a70e9985 5263 /* If the call becomes noreturn, remove the lhs. */
3cee7e4e
MP
5264 if (lhs
5265 && gimple_call_noreturn_p (stmt)
18954840 5266 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
abd3a68c 5267 || should_remove_lhs_p (lhs)))
a70e9985
JJ
5268 {
5269 if (TREE_CODE (lhs) == SSA_NAME)
5270 {
b731b390 5271 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 5272 tree def = get_or_create_ssa_default_def (cfun, var);
355fe088 5273 gimple *new_stmt = gimple_build_assign (lhs, def);
a70e9985
JJ
5274 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5275 }
5276 gimple_call_set_lhs (stmt, NULL_TREE);
5277 }
0b986c6a 5278 maybe_remove_unused_call_args (cfun, stmt);
cf3e5a89 5279 }
a70e9985 5280 else
cf3e5a89
JJ
5281 {
5282 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
355fe088 5283 gimple *new_stmt = gimple_build_call (fndecl, 0);
cf3e5a89 5284 gimple_set_location (new_stmt, gimple_location (stmt));
2da6996c
RB
5285 /* If the call had a SSA name as lhs morph that into
5286 an uninitialized value. */
a70e9985
JJ
5287 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5288 {
b731b390 5289 tree var = create_tmp_var (TREE_TYPE (lhs));
2da6996c
RB
5290 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5291 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5292 set_ssa_default_def (cfun, var, lhs);
42e52a51 5293 }
779724a5 5294 gimple_move_vops (new_stmt, stmt);
2da6996c 5295 gsi_replace (gsi, new_stmt, false);
cf3e5a89
JJ
5296 return true;
5297 }
e021c122 5298 }
49c471e3 5299 }
e021c122 5300 }
49c471e3 5301
f2d3d07e
RH
5302 /* Check for indirect calls that became direct calls, and then
5303 no longer require a static chain. */
5304 if (gimple_call_chain (stmt))
5305 {
5306 tree fn = gimple_call_fndecl (stmt);
5307 if (fn && !DECL_STATIC_CHAIN (fn))
5308 {
5309 gimple_call_set_chain (stmt, NULL);
5310 changed = true;
5311 }
5312 else
5313 {
5314 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
5315 if (tmp)
5316 {
5317 gimple_call_set_chain (stmt, tmp);
5318 changed = true;
5319 }
5320 }
5321 }
5322
e021c122
RG
5323 if (inplace)
5324 return changed;
5325
5326 /* Check for builtins that CCP can handle using information not
5327 available in the generic fold routines. */
fef5a0d9
RB
5328 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5329 {
5330 if (gimple_fold_builtin (gsi))
5331 changed = true;
5332 }
5333 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 5334 {
ea679d55 5335 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 5336 }
368b454d 5337 else if (gimple_call_internal_p (stmt))
ed9c79e1 5338 {
368b454d
JJ
5339 enum tree_code subcode = ERROR_MARK;
5340 tree result = NULL_TREE;
1304953e
JJ
5341 bool cplx_result = false;
5342 tree overflow = NULL_TREE;
368b454d
JJ
5343 switch (gimple_call_internal_fn (stmt))
5344 {
5345 case IFN_BUILTIN_EXPECT:
5346 result = fold_builtin_expect (gimple_location (stmt),
5347 gimple_call_arg (stmt, 0),
5348 gimple_call_arg (stmt, 1),
1e9168b2
ML
5349 gimple_call_arg (stmt, 2),
5350 NULL_TREE);
368b454d 5351 break;
0e82f089 5352 case IFN_UBSAN_OBJECT_SIZE:
ca1150f0
JJ
5353 {
5354 tree offset = gimple_call_arg (stmt, 1);
5355 tree objsize = gimple_call_arg (stmt, 2);
5356 if (integer_all_onesp (objsize)
5357 || (TREE_CODE (offset) == INTEGER_CST
5358 && TREE_CODE (objsize) == INTEGER_CST
5359 && tree_int_cst_le (offset, objsize)))
5360 {
5361 replace_call_with_value (gsi, NULL_TREE);
5362 return true;
5363 }
5364 }
5365 break;
5366 case IFN_UBSAN_PTR:
5367 if (integer_zerop (gimple_call_arg (stmt, 1)))
0e82f089 5368 {
ca1150f0 5369 replace_call_with_value (gsi, NULL_TREE);
0e82f089
MP
5370 return true;
5371 }
5372 break;
ca1150f0
JJ
5373 case IFN_UBSAN_BOUNDS:
5374 {
5375 tree index = gimple_call_arg (stmt, 1);
5376 tree bound = gimple_call_arg (stmt, 2);
5377 if (TREE_CODE (index) == INTEGER_CST
5378 && TREE_CODE (bound) == INTEGER_CST)
5379 {
5380 index = fold_convert (TREE_TYPE (bound), index);
5381 if (TREE_CODE (index) == INTEGER_CST
5382 && tree_int_cst_le (index, bound))
5383 {
5384 replace_call_with_value (gsi, NULL_TREE);
5385 return true;
5386 }
5387 }
5388 }
5389 break;
451e8dae
NS
5390 case IFN_GOACC_DIM_SIZE:
5391 case IFN_GOACC_DIM_POS:
5392 result = fold_internal_goacc_dim (stmt);
5393 break;
368b454d
JJ
5394 case IFN_UBSAN_CHECK_ADD:
5395 subcode = PLUS_EXPR;
5396 break;
5397 case IFN_UBSAN_CHECK_SUB:
5398 subcode = MINUS_EXPR;
5399 break;
5400 case IFN_UBSAN_CHECK_MUL:
5401 subcode = MULT_EXPR;
5402 break;
1304953e
JJ
5403 case IFN_ADD_OVERFLOW:
5404 subcode = PLUS_EXPR;
5405 cplx_result = true;
5406 break;
5407 case IFN_SUB_OVERFLOW:
5408 subcode = MINUS_EXPR;
5409 cplx_result = true;
5410 break;
5411 case IFN_MUL_OVERFLOW:
5412 subcode = MULT_EXPR;
5413 cplx_result = true;
5414 break;
868363d4
RS
5415 case IFN_MASK_LOAD:
5416 changed |= gimple_fold_mask_load (gsi, stmt);
5417 break;
5418 case IFN_MASK_STORE:
5419 changed |= gimple_fold_mask_store (gsi, stmt);
5420 break;
368b454d
JJ
5421 default:
5422 break;
5423 }
5424 if (subcode != ERROR_MARK)
5425 {
5426 tree arg0 = gimple_call_arg (stmt, 0);
5427 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
5428 tree type = TREE_TYPE (arg0);
5429 if (cplx_result)
5430 {
5431 tree lhs = gimple_call_lhs (stmt);
5432 if (lhs == NULL_TREE)
5433 type = NULL_TREE;
5434 else
5435 type = TREE_TYPE (TREE_TYPE (lhs));
5436 }
5437 if (type == NULL_TREE)
5438 ;
368b454d 5439 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
5440 else if (integer_zerop (arg1))
5441 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
5442 /* x = 0 + y; x = 0 * y; */
5443 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 5444 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
5445 /* x = y - y; */
5446 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 5447 result = integer_zero_node;
368b454d 5448 /* x = y * 1; x = 1 * y; */
1304953e
JJ
5449 else if (subcode == MULT_EXPR && integer_onep (arg1))
5450 result = arg0;
5451 else if (subcode == MULT_EXPR && integer_onep (arg0))
5452 result = arg1;
5453 else if (TREE_CODE (arg0) == INTEGER_CST
5454 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 5455 {
1304953e
JJ
5456 if (cplx_result)
5457 result = int_const_binop (subcode, fold_convert (type, arg0),
5458 fold_convert (type, arg1));
5459 else
5460 result = int_const_binop (subcode, arg0, arg1);
5461 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5462 {
5463 if (cplx_result)
5464 overflow = build_one_cst (type);
5465 else
5466 result = NULL_TREE;
5467 }
5468 }
5469 if (result)
5470 {
5471 if (result == integer_zero_node)
5472 result = build_zero_cst (type);
5473 else if (cplx_result && TREE_TYPE (result) != type)
5474 {
5475 if (TREE_CODE (result) == INTEGER_CST)
5476 {
5477 if (arith_overflowed_p (PLUS_EXPR, type, result,
5478 integer_zero_node))
5479 overflow = build_one_cst (type);
5480 }
5481 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5482 && TYPE_UNSIGNED (type))
5483 || (TYPE_PRECISION (type)
5484 < (TYPE_PRECISION (TREE_TYPE (result))
5485 + (TYPE_UNSIGNED (TREE_TYPE (result))
5486 && !TYPE_UNSIGNED (type)))))
5487 result = NULL_TREE;
5488 if (result)
5489 result = fold_convert (type, result);
5490 }
368b454d
JJ
5491 }
5492 }
1304953e 5493
ed9c79e1
JJ
5494 if (result)
5495 {
1304953e
JJ
5496 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5497 result = drop_tree_overflow (result);
5498 if (cplx_result)
5499 {
5500 if (overflow == NULL_TREE)
5501 overflow = build_zero_cst (TREE_TYPE (result));
5502 tree ctype = build_complex_type (TREE_TYPE (result));
5503 if (TREE_CODE (result) == INTEGER_CST
5504 && TREE_CODE (overflow) == INTEGER_CST)
5505 result = build_complex (ctype, result, overflow);
5506 else
5507 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5508 ctype, result, overflow);
5509 }
ed9c79e1
JJ
5510 if (!update_call_from_tree (gsi, result))
5511 gimplify_and_update_call_from_tree (gsi, result);
5512 changed = true;
5513 }
5514 }
3b45a007 5515
e021c122 5516 return changed;
cbdd87d4
RG
5517}
5518
e0ee10ed 5519
89a79e96
RB
5520/* Return true whether NAME has a use on STMT. */
5521
5522static bool
355fe088 5523has_use_on_stmt (tree name, gimple *stmt)
89a79e96
RB
5524{
5525 imm_use_iterator iter;
5526 use_operand_p use_p;
5527 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5528 if (USE_STMT (use_p) == stmt)
5529 return true;
5530 return false;
5531}
5532
e0ee10ed
RB
5533/* Worker for fold_stmt_1 dispatch to pattern based folding with
5534 gimple_simplify.
5535
5536 Replaces *GSI with the simplification result in RCODE and OPS
5537 and the associated statements in *SEQ. Does the replacement
5538 according to INPLACE and returns true if the operation succeeded. */
5539
5540static bool
5541replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5d75ad95 5542 gimple_match_op *res_op,
e0ee10ed
RB
5543 gimple_seq *seq, bool inplace)
5544{
355fe088 5545 gimple *stmt = gsi_stmt (*gsi);
5d75ad95
RS
5546 tree *ops = res_op->ops;
5547 unsigned int num_ops = res_op->num_ops;
e0ee10ed
RB
5548
5549 /* Play safe and do not allow abnormals to be mentioned in
89a79e96
RB
5550 newly created statements. See also maybe_push_res_to_seq.
5551 As an exception allow such uses if there was a use of the
5552 same SSA name on the old stmt. */
5d75ad95
RS
5553 for (unsigned int i = 0; i < num_ops; ++i)
5554 if (TREE_CODE (ops[i]) == SSA_NAME
5555 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5556 && !has_use_on_stmt (ops[i], stmt))
5557 return false;
5558
5559 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5560 for (unsigned int i = 0; i < 2; ++i)
5561 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5562 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5563 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5564 return false;
e0ee10ed 5565
fec40d06
RS
5566 /* Don't insert new statements when INPLACE is true, even if we could
5567 reuse STMT for the final statement. */
5568 if (inplace && !gimple_seq_empty_p (*seq))
5569 return false;
5570
538dd0b7 5571 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed 5572 {
5d75ad95
RS
5573 gcc_assert (res_op->code.is_tree_code ());
5574 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
e0ee10ed
RB
5575 /* GIMPLE_CONDs condition may not throw. */
5576 && (!flag_exceptions
5577 || !cfun->can_throw_non_call_exceptions
5d75ad95 5578 || !operation_could_trap_p (res_op->code,
e0ee10ed
RB
5579 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5580 false, NULL_TREE)))
5d75ad95
RS
5581 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5582 else if (res_op->code == SSA_NAME)
538dd0b7 5583 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed 5584 build_zero_cst (TREE_TYPE (ops[0])));
5d75ad95 5585 else if (res_op->code == INTEGER_CST)
e0ee10ed
RB
5586 {
5587 if (integer_zerop (ops[0]))
538dd0b7 5588 gimple_cond_make_false (cond_stmt);
e0ee10ed 5589 else
538dd0b7 5590 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
5591 }
5592 else if (!inplace)
5593 {
5d75ad95 5594 tree res = maybe_push_res_to_seq (res_op, seq);
e0ee10ed
RB
5595 if (!res)
5596 return false;
538dd0b7 5597 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
5598 build_zero_cst (TREE_TYPE (res)));
5599 }
5600 else
5601 return false;
5602 if (dump_file && (dump_flags & TDF_DETAILS))
5603 {
5604 fprintf (dump_file, "gimple_simplified to ");
5605 if (!gimple_seq_empty_p (*seq))
5606 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5607 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5608 0, TDF_SLIM);
5609 }
5610 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5611 return true;
5612 }
5613 else if (is_gimple_assign (stmt)
5d75ad95 5614 && res_op->code.is_tree_code ())
e0ee10ed
RB
5615 {
5616 if (!inplace
5d75ad95 5617 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
e0ee10ed 5618 {
5d75ad95
RS
5619 maybe_build_generic_op (res_op);
5620 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5621 res_op->op_or_null (0),
5622 res_op->op_or_null (1),
5623 res_op->op_or_null (2));
e0ee10ed
RB
5624 if (dump_file && (dump_flags & TDF_DETAILS))
5625 {
5626 fprintf (dump_file, "gimple_simplified to ");
5627 if (!gimple_seq_empty_p (*seq))
5628 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5629 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5630 0, TDF_SLIM);
5631 }
5632 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5633 return true;
5634 }
5635 }
5d75ad95
RS
5636 else if (res_op->code.is_fn_code ()
5637 && gimple_call_combined_fn (stmt) == res_op->code)
37d486ab 5638 {
5d75ad95
RS
5639 gcc_assert (num_ops == gimple_call_num_args (stmt));
5640 for (unsigned int i = 0; i < num_ops; ++i)
5641 gimple_call_set_arg (stmt, i, ops[i]);
fec40d06
RS
5642 if (dump_file && (dump_flags & TDF_DETAILS))
5643 {
5644 fprintf (dump_file, "gimple_simplified to ");
5645 if (!gimple_seq_empty_p (*seq))
5646 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5647 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5648 }
5649 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
37d486ab
RB
5650 return true;
5651 }
e0ee10ed
RB
5652 else if (!inplace)
5653 {
5654 if (gimple_has_lhs (stmt))
5655 {
5656 tree lhs = gimple_get_lhs (stmt);
5d75ad95 5657 if (!maybe_push_res_to_seq (res_op, seq, lhs))
de665bbd 5658 return false;
e0ee10ed
RB
5659 if (dump_file && (dump_flags & TDF_DETAILS))
5660 {
5661 fprintf (dump_file, "gimple_simplified to ");
5662 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5663 }
5664 gsi_replace_with_seq_vops (gsi, *seq);
5665 return true;
5666 }
5667 else
5668 gcc_unreachable ();
5669 }
5670
5671 return false;
5672}
5673
040292e7
RB
5674/* Canonicalize MEM_REFs invariant address operand after propagation. */
5675
5676static bool
fabe0ede 5677maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
040292e7
RB
5678{
5679 bool res = false;
fe8c8f1e 5680 tree *orig_t = t;
040292e7
RB
5681
5682 if (TREE_CODE (*t) == ADDR_EXPR)
5683 t = &TREE_OPERAND (*t, 0);
5684
f17a223d
RB
5685 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5686 generic vector extension. The actual vector referenced is
5687 view-converted to an array type for this purpose. If the index
5688 is constant the canonical representation in the middle-end is a
5689 BIT_FIELD_REF so re-write the former to the latter here. */
5690 if (TREE_CODE (*t) == ARRAY_REF
5691 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5692 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5693 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5694 {
5695 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5696 if (VECTOR_TYPE_P (vtype))
5697 {
5698 tree low = array_ref_low_bound (*t);
5699 if (TREE_CODE (low) == INTEGER_CST)
5700 {
5701 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5702 {
5703 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5704 wi::to_widest (low));
5705 idx = wi::mul (idx, wi::to_widest
5706 (TYPE_SIZE (TREE_TYPE (*t))));
5707 widest_int ext
5708 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5709 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5710 {
5711 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5712 TREE_TYPE (*t),
5713 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5714 TYPE_SIZE (TREE_TYPE (*t)),
92e29a5e 5715 wide_int_to_tree (bitsizetype, idx));
f17a223d
RB
5716 res = true;
5717 }
5718 }
5719 }
5720 }
5721 }
5722
040292e7
RB
5723 while (handled_component_p (*t))
5724 t = &TREE_OPERAND (*t, 0);
5725
5726 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5727 of invariant addresses into a SSA name MEM_REF address. */
5728 if (TREE_CODE (*t) == MEM_REF
5729 || TREE_CODE (*t) == TARGET_MEM_REF)
5730 {
5731 tree addr = TREE_OPERAND (*t, 0);
5732 if (TREE_CODE (addr) == ADDR_EXPR
5733 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5734 || handled_component_p (TREE_OPERAND (addr, 0))))
5735 {
5736 tree base;
a90c8804 5737 poly_int64 coffset;
040292e7
RB
5738 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5739 &coffset);
5740 if (!base)
fabe0ede
JJ
5741 {
5742 if (is_debug)
5743 return false;
5744 gcc_unreachable ();
5745 }
040292e7
RB
5746
5747 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5748 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5749 TREE_OPERAND (*t, 1),
5750 size_int (coffset));
5751 res = true;
5752 }
5753 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5754 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5755 }
5756
5757 /* Canonicalize back MEM_REFs to plain reference trees if the object
5758 accessed is a decl that has the same access semantics as the MEM_REF. */
5759 if (TREE_CODE (*t) == MEM_REF
5760 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
5761 && integer_zerop (TREE_OPERAND (*t, 1))
5762 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
5763 {
5764 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5765 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5766 if (/* Same volatile qualification. */
5767 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5768 /* Same TBAA behavior with -fstrict-aliasing. */
5769 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5770 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5771 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5772 /* Same alignment. */
5773 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5774 /* We have to look out here to not drop a required conversion
5775 from the rhs to the lhs if *t appears on the lhs or vice-versa
5776 if it appears on the rhs. Thus require strict type
5777 compatibility. */
5778 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
5779 {
5780 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5781 res = true;
5782 }
5783 }
5784
fe8c8f1e
RB
5785 else if (TREE_CODE (*orig_t) == ADDR_EXPR
5786 && TREE_CODE (*t) == MEM_REF
5787 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
5788 {
5789 tree base;
5790 poly_int64 coffset;
5791 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
5792 &coffset);
5793 if (base)
5794 {
5795 gcc_assert (TREE_CODE (base) == MEM_REF);
5796 poly_int64 moffset;
5797 if (mem_ref_offset (base).to_shwi (&moffset))
5798 {
5799 coffset += moffset;
5800 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
5801 {
5802 coffset += moffset;
5803 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
5804 return true;
5805 }
5806 }
5807 }
5808 }
5809
040292e7
RB
5810 /* Canonicalize TARGET_MEM_REF in particular with respect to
5811 the indexes becoming constant. */
5812 else if (TREE_CODE (*t) == TARGET_MEM_REF)
5813 {
5814 tree tem = maybe_fold_tmr (*t);
5815 if (tem)
5816 {
5817 *t = tem;
5818 res = true;
5819 }
5820 }
5821
5822 return res;
5823}
5824
cbdd87d4
RG
5825/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
5826 distinguishes both cases. */
5827
5828static bool
e0ee10ed 5829fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
5830{
5831 bool changed = false;
355fe088 5832 gimple *stmt = gsi_stmt (*gsi);
a8b85ce9 5833 bool nowarning = gimple_no_warning_p (stmt);
cbdd87d4 5834 unsigned i;
a8b85ce9 5835 fold_defer_overflow_warnings ();
cbdd87d4 5836
040292e7
RB
5837 /* First do required canonicalization of [TARGET_]MEM_REF addresses
5838 after propagation.
5839 ??? This shouldn't be done in generic folding but in the
5840 propagation helpers which also know whether an address was
89a79e96
RB
5841 propagated.
5842 Also canonicalize operand order. */
040292e7
RB
5843 switch (gimple_code (stmt))
5844 {
5845 case GIMPLE_ASSIGN:
5846 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
5847 {
5848 tree *rhs = gimple_assign_rhs1_ptr (stmt);
5849 if ((REFERENCE_CLASS_P (*rhs)
5850 || TREE_CODE (*rhs) == ADDR_EXPR)
5851 && maybe_canonicalize_mem_ref_addr (rhs))
5852 changed = true;
5853 tree *lhs = gimple_assign_lhs_ptr (stmt);
5854 if (REFERENCE_CLASS_P (*lhs)
5855 && maybe_canonicalize_mem_ref_addr (lhs))
5856 changed = true;
5857 }
89a79e96
RB
5858 else
5859 {
5860 /* Canonicalize operand order. */
5861 enum tree_code code = gimple_assign_rhs_code (stmt);
5862 if (TREE_CODE_CLASS (code) == tcc_comparison
5863 || commutative_tree_code (code)
5864 || commutative_ternary_tree_code (code))
5865 {
5866 tree rhs1 = gimple_assign_rhs1 (stmt);
5867 tree rhs2 = gimple_assign_rhs2 (stmt);
14e72812 5868 if (tree_swap_operands_p (rhs1, rhs2))
89a79e96
RB
5869 {
5870 gimple_assign_set_rhs1 (stmt, rhs2);
5871 gimple_assign_set_rhs2 (stmt, rhs1);
5872 if (TREE_CODE_CLASS (code) == tcc_comparison)
5873 gimple_assign_set_rhs_code (stmt,
5874 swap_tree_comparison (code));
5875 changed = true;
5876 }
5877 }
5878 }
040292e7
RB
5879 break;
5880 case GIMPLE_CALL:
5881 {
5882 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5883 {
5884 tree *arg = gimple_call_arg_ptr (stmt, i);
5885 if (REFERENCE_CLASS_P (*arg)
5886 && maybe_canonicalize_mem_ref_addr (arg))
5887 changed = true;
5888 }
5889 tree *lhs = gimple_call_lhs_ptr (stmt);
5890 if (*lhs
5891 && REFERENCE_CLASS_P (*lhs)
5892 && maybe_canonicalize_mem_ref_addr (lhs))
5893 changed = true;
5894 break;
5895 }
5896 case GIMPLE_ASM:
5897 {
538dd0b7
DM
5898 gasm *asm_stmt = as_a <gasm *> (stmt);
5899 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 5900 {
538dd0b7 5901 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
5902 tree op = TREE_VALUE (link);
5903 if (REFERENCE_CLASS_P (op)
5904 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5905 changed = true;
5906 }
538dd0b7 5907 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 5908 {
538dd0b7 5909 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
5910 tree op = TREE_VALUE (link);
5911 if ((REFERENCE_CLASS_P (op)
5912 || TREE_CODE (op) == ADDR_EXPR)
5913 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5914 changed = true;
5915 }
5916 }
5917 break;
5918 case GIMPLE_DEBUG:
5919 if (gimple_debug_bind_p (stmt))
5920 {
5921 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5922 if (*val
5923 && (REFERENCE_CLASS_P (*val)
5924 || TREE_CODE (*val) == ADDR_EXPR)
fabe0ede 5925 && maybe_canonicalize_mem_ref_addr (val, true))
040292e7
RB
5926 changed = true;
5927 }
5928 break;
89a79e96
RB
5929 case GIMPLE_COND:
5930 {
5931 /* Canonicalize operand order. */
5932 tree lhs = gimple_cond_lhs (stmt);
5933 tree rhs = gimple_cond_rhs (stmt);
14e72812 5934 if (tree_swap_operands_p (lhs, rhs))
89a79e96
RB
5935 {
5936 gcond *gc = as_a <gcond *> (stmt);
5937 gimple_cond_set_lhs (gc, rhs);
5938 gimple_cond_set_rhs (gc, lhs);
5939 gimple_cond_set_code (gc,
5940 swap_tree_comparison (gimple_cond_code (gc)));
5941 changed = true;
5942 }
5943 }
040292e7
RB
5944 default:;
5945 }
5946
e0ee10ed
RB
5947 /* Dispatch to pattern-based folding. */
5948 if (!inplace
5949 || is_gimple_assign (stmt)
5950 || gimple_code (stmt) == GIMPLE_COND)
5951 {
5952 gimple_seq seq = NULL;
5d75ad95
RS
5953 gimple_match_op res_op;
5954 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
0ff093d8 5955 valueize, valueize))
e0ee10ed 5956 {
5d75ad95 5957 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
e0ee10ed
RB
5958 changed = true;
5959 else
5960 gimple_seq_discard (seq);
5961 }
5962 }
5963
5964 stmt = gsi_stmt (*gsi);
5965
cbdd87d4
RG
5966 /* Fold the main computation performed by the statement. */
5967 switch (gimple_code (stmt))
5968 {
5969 case GIMPLE_ASSIGN:
5970 {
819ec64c
RB
5971 /* Try to canonicalize for boolean-typed X the comparisons
5972 X == 0, X == 1, X != 0, and X != 1. */
5973 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5974 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5fbcc0ed 5975 {
819ec64c
RB
5976 tree lhs = gimple_assign_lhs (stmt);
5977 tree op1 = gimple_assign_rhs1 (stmt);
5978 tree op2 = gimple_assign_rhs2 (stmt);
5979 tree type = TREE_TYPE (op1);
5980
5981 /* Check whether the comparison operands are of the same boolean
5982 type as the result type is.
5983 Check that second operand is an integer-constant with value
5984 one or zero. */
5985 if (TREE_CODE (op2) == INTEGER_CST
5986 && (integer_zerop (op2) || integer_onep (op2))
5987 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5988 {
5989 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5990 bool is_logical_not = false;
5991
5992 /* X == 0 and X != 1 is a logical-not.of X
5993 X == 1 and X != 0 is X */
5994 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5995 || (cmp_code == NE_EXPR && integer_onep (op2)))
5996 is_logical_not = true;
5997
5998 if (is_logical_not == false)
5999 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6000 /* Only for one-bit precision typed X the transformation
6001 !X -> ~X is valied. */
6002 else if (TYPE_PRECISION (type) == 1)
6003 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6004 /* Otherwise we use !X -> X ^ 1. */
6005 else
6006 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6007 build_int_cst (type, 1));
6008 changed = true;
6009 break;
6010 }
5fbcc0ed 6011 }
819ec64c
RB
6012
6013 unsigned old_num_ops = gimple_num_ops (stmt);
6014 tree lhs = gimple_assign_lhs (stmt);
6015 tree new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
6016 if (new_rhs
6017 && !useless_type_conversion_p (TREE_TYPE (lhs),
6018 TREE_TYPE (new_rhs)))
6019 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6020 if (new_rhs
6021 && (!inplace
6022 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6023 {
6024 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6025 changed = true;
6026 }
6027 break;
6028 }
6029
cbdd87d4 6030 case GIMPLE_CALL:
ceeffab0 6031 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
6032 break;
6033
6034 case GIMPLE_ASM:
6035 /* Fold *& in asm operands. */
38384150 6036 {
538dd0b7 6037 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
6038 size_t noutputs;
6039 const char **oconstraints;
6040 const char *constraint;
6041 bool allows_mem, allows_reg;
6042
538dd0b7 6043 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
6044 oconstraints = XALLOCAVEC (const char *, noutputs);
6045
538dd0b7 6046 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 6047 {
538dd0b7 6048 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
6049 tree op = TREE_VALUE (link);
6050 oconstraints[i]
6051 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6052 if (REFERENCE_CLASS_P (op)
6053 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
6054 {
6055 TREE_VALUE (link) = op;
6056 changed = true;
6057 }
6058 }
538dd0b7 6059 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 6060 {
538dd0b7 6061 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
6062 tree op = TREE_VALUE (link);
6063 constraint
6064 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6065 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6066 oconstraints, &allows_mem, &allows_reg);
6067 if (REFERENCE_CLASS_P (op)
6068 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
6069 != NULL_TREE)
6070 {
6071 TREE_VALUE (link) = op;
6072 changed = true;
6073 }
6074 }
6075 }
cbdd87d4
RG
6076 break;
6077
bd422c4a
RG
6078 case GIMPLE_DEBUG:
6079 if (gimple_debug_bind_p (stmt))
6080 {
6081 tree val = gimple_debug_bind_get_value (stmt);
6082 if (val
6083 && REFERENCE_CLASS_P (val))
6084 {
6085 tree tem = maybe_fold_reference (val, false);
6086 if (tem)
6087 {
6088 gimple_debug_bind_set_value (stmt, tem);
6089 changed = true;
6090 }
6091 }
3e888a5e
RG
6092 else if (val
6093 && TREE_CODE (val) == ADDR_EXPR)
6094 {
6095 tree ref = TREE_OPERAND (val, 0);
6096 tree tem = maybe_fold_reference (ref, false);
6097 if (tem)
6098 {
6099 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6100 gimple_debug_bind_set_value (stmt, tem);
6101 changed = true;
6102 }
6103 }
bd422c4a
RG
6104 }
6105 break;
6106
cfe3d653
PK
6107 case GIMPLE_RETURN:
6108 {
6109 greturn *ret_stmt = as_a<greturn *> (stmt);
6110 tree ret = gimple_return_retval(ret_stmt);
6111
6112 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6113 {
6114 tree val = valueize (ret);
1af928db
RB
6115 if (val && val != ret
6116 && may_propagate_copy (ret, val))
cfe3d653
PK
6117 {
6118 gimple_return_set_retval (ret_stmt, val);
6119 changed = true;
6120 }
6121 }
6122 }
6123 break;
6124
cbdd87d4
RG
6125 default:;
6126 }
6127
6128 stmt = gsi_stmt (*gsi);
6129
37376165
RB
6130 /* Fold *& on the lhs. */
6131 if (gimple_has_lhs (stmt))
cbdd87d4
RG
6132 {
6133 tree lhs = gimple_get_lhs (stmt);
6134 if (lhs && REFERENCE_CLASS_P (lhs))
6135 {
6136 tree new_lhs = maybe_fold_reference (lhs, true);
6137 if (new_lhs)
6138 {
6139 gimple_set_lhs (stmt, new_lhs);
6140 changed = true;
6141 }
6142 }
6143 }
6144
a8b85ce9 6145 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
cbdd87d4
RG
6146 return changed;
6147}
6148
e0ee10ed
RB
6149/* Valueziation callback that ends up not following SSA edges. */
6150
6151tree
6152no_follow_ssa_edges (tree)
6153{
6154 return NULL_TREE;
6155}
6156
45cc9f96
RB
6157/* Valueization callback that ends up following single-use SSA edges only. */
6158
6159tree
6160follow_single_use_edges (tree val)
6161{
6162 if (TREE_CODE (val) == SSA_NAME
6163 && !has_single_use (val))
6164 return NULL_TREE;
6165 return val;
6166}
6167
c566cc9f
RS
6168/* Valueization callback that follows all SSA edges. */
6169
6170tree
6171follow_all_ssa_edges (tree val)
6172{
6173 return val;
6174}
6175
cbdd87d4
RG
6176/* Fold the statement pointed to by GSI. In some cases, this function may
6177 replace the whole statement with a new one. Returns true iff folding
6178 makes any changes.
6179 The statement pointed to by GSI should be in valid gimple form but may
6180 be in unfolded state as resulting from for example constant propagation
6181 which can produce *&x = 0. */
6182
6183bool
6184fold_stmt (gimple_stmt_iterator *gsi)
6185{
e0ee10ed
RB
6186 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6187}
6188
6189bool
6190fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6191{
6192 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
6193}
6194
59401b92 6195/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
6196 *&x created by constant propagation are handled. The statement cannot
6197 be replaced with a new one. Return true if the statement was
6198 changed, false otherwise.
59401b92 6199 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
6200 be in unfolded state as resulting from for example constant propagation
6201 which can produce *&x = 0. */
6202
6203bool
59401b92 6204fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 6205{
355fe088 6206 gimple *stmt = gsi_stmt (*gsi);
e0ee10ed 6207 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 6208 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
6209 return changed;
6210}
6211
e89065a1
SL
6212/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6213 if EXPR is null or we don't know how.
6214 If non-null, the result always has boolean type. */
6215
6216static tree
6217canonicalize_bool (tree expr, bool invert)
6218{
6219 if (!expr)
6220 return NULL_TREE;
6221 else if (invert)
6222 {
6223 if (integer_nonzerop (expr))
6224 return boolean_false_node;
6225 else if (integer_zerop (expr))
6226 return boolean_true_node;
6227 else if (TREE_CODE (expr) == SSA_NAME)
6228 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6229 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6230 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6231 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6232 boolean_type_node,
6233 TREE_OPERAND (expr, 0),
6234 TREE_OPERAND (expr, 1));
6235 else
6236 return NULL_TREE;
6237 }
6238 else
6239 {
6240 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6241 return expr;
6242 if (integer_nonzerop (expr))
6243 return boolean_true_node;
6244 else if (integer_zerop (expr))
6245 return boolean_false_node;
6246 else if (TREE_CODE (expr) == SSA_NAME)
6247 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6248 build_int_cst (TREE_TYPE (expr), 0));
98209db3 6249 else if (COMPARISON_CLASS_P (expr))
e89065a1
SL
6250 return fold_build2 (TREE_CODE (expr),
6251 boolean_type_node,
6252 TREE_OPERAND (expr, 0),
6253 TREE_OPERAND (expr, 1));
6254 else
6255 return NULL_TREE;
6256 }
6257}
6258
6259/* Check to see if a boolean expression EXPR is logically equivalent to the
6260 comparison (OP1 CODE OP2). Check for various identities involving
6261 SSA_NAMEs. */
6262
6263static bool
6264same_bool_comparison_p (const_tree expr, enum tree_code code,
6265 const_tree op1, const_tree op2)
6266{
355fe088 6267 gimple *s;
e89065a1
SL
6268
6269 /* The obvious case. */
6270 if (TREE_CODE (expr) == code
6271 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6272 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6273 return true;
6274
6275 /* Check for comparing (name, name != 0) and the case where expr
6276 is an SSA_NAME with a definition matching the comparison. */
6277 if (TREE_CODE (expr) == SSA_NAME
6278 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6279 {
6280 if (operand_equal_p (expr, op1, 0))
6281 return ((code == NE_EXPR && integer_zerop (op2))
6282 || (code == EQ_EXPR && integer_nonzerop (op2)));
6283 s = SSA_NAME_DEF_STMT (expr);
6284 if (is_gimple_assign (s)
6285 && gimple_assign_rhs_code (s) == code
6286 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6287 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6288 return true;
6289 }
6290
6291 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6292 of name is a comparison, recurse. */
6293 if (TREE_CODE (op1) == SSA_NAME
6294 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6295 {
6296 s = SSA_NAME_DEF_STMT (op1);
6297 if (is_gimple_assign (s)
6298 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6299 {
6300 enum tree_code c = gimple_assign_rhs_code (s);
6301 if ((c == NE_EXPR && integer_zerop (op2))
6302 || (c == EQ_EXPR && integer_nonzerop (op2)))
6303 return same_bool_comparison_p (expr, c,
6304 gimple_assign_rhs1 (s),
6305 gimple_assign_rhs2 (s));
6306 if ((c == EQ_EXPR && integer_zerop (op2))
6307 || (c == NE_EXPR && integer_nonzerop (op2)))
6308 return same_bool_comparison_p (expr,
6309 invert_tree_comparison (c, false),
6310 gimple_assign_rhs1 (s),
6311 gimple_assign_rhs2 (s));
6312 }
6313 }
6314 return false;
6315}
6316
6317/* Check to see if two boolean expressions OP1 and OP2 are logically
6318 equivalent. */
6319
6320static bool
6321same_bool_result_p (const_tree op1, const_tree op2)
6322{
6323 /* Simple cases first. */
6324 if (operand_equal_p (op1, op2, 0))
6325 return true;
6326
6327 /* Check the cases where at least one of the operands is a comparison.
6328 These are a bit smarter than operand_equal_p in that they apply some
6329 identifies on SSA_NAMEs. */
98209db3 6330 if (COMPARISON_CLASS_P (op2)
e89065a1
SL
6331 && same_bool_comparison_p (op1, TREE_CODE (op2),
6332 TREE_OPERAND (op2, 0),
6333 TREE_OPERAND (op2, 1)))
6334 return true;
98209db3 6335 if (COMPARISON_CLASS_P (op1)
e89065a1
SL
6336 && same_bool_comparison_p (op2, TREE_CODE (op1),
6337 TREE_OPERAND (op1, 0),
6338 TREE_OPERAND (op1, 1)))
6339 return true;
6340
6341 /* Default case. */
6342 return false;
6343}
6344
6345/* Forward declarations for some mutually recursive functions. */
6346
6347static tree
5f487a34 6348and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6349 enum tree_code code2, tree op2a, tree op2b);
6350static tree
5f487a34 6351and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6352 enum tree_code code2, tree op2a, tree op2b);
6353static tree
5f487a34 6354and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6355 enum tree_code code2, tree op2a, tree op2b);
6356static tree
5f487a34 6357or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6358 enum tree_code code2, tree op2a, tree op2b);
6359static tree
5f487a34 6360or_var_with_comparison (tree, tree var, bool invert,
e89065a1
SL
6361 enum tree_code code2, tree op2a, tree op2b);
6362static tree
5f487a34 6363or_var_with_comparison_1 (tree, gimple *stmt,
e89065a1
SL
6364 enum tree_code code2, tree op2a, tree op2b);
6365
6366/* Helper function for and_comparisons_1: try to simplify the AND of the
6367 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6368 If INVERT is true, invert the value of the VAR before doing the AND.
6369 Return NULL_EXPR if we can't simplify this to a single expression. */
6370
6371static tree
5f487a34 6372and_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6373 enum tree_code code2, tree op2a, tree op2b)
6374{
6375 tree t;
355fe088 6376 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6377
6378 /* We can only deal with variables whose definitions are assignments. */
6379 if (!is_gimple_assign (stmt))
6380 return NULL_TREE;
6381
6382 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6383 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6384 Then we only have to consider the simpler non-inverted cases. */
6385 if (invert)
5f487a34 6386 t = or_var_with_comparison_1 (type, stmt,
e89065a1
SL
6387 invert_tree_comparison (code2, false),
6388 op2a, op2b);
6389 else
5f487a34 6390 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6391 return canonicalize_bool (t, invert);
6392}
6393
6394/* Try to simplify the AND of the ssa variable defined by the assignment
6395 STMT with the comparison specified by (OP2A CODE2 OP2B).
6396 Return NULL_EXPR if we can't simplify this to a single expression. */
6397
6398static tree
5f487a34 6399and_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6400 enum tree_code code2, tree op2a, tree op2b)
6401{
6402 tree var = gimple_assign_lhs (stmt);
6403 tree true_test_var = NULL_TREE;
6404 tree false_test_var = NULL_TREE;
6405 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6406
6407 /* Check for identities like (var AND (var == 0)) => false. */
6408 if (TREE_CODE (op2a) == SSA_NAME
6409 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6410 {
6411 if ((code2 == NE_EXPR && integer_zerop (op2b))
6412 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6413 {
6414 true_test_var = op2a;
6415 if (var == true_test_var)
6416 return var;
6417 }
6418 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6419 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6420 {
6421 false_test_var = op2a;
6422 if (var == false_test_var)
6423 return boolean_false_node;
6424 }
6425 }
6426
6427 /* If the definition is a comparison, recurse on it. */
6428 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6429 {
5f487a34 6430 tree t = and_comparisons_1 (type, innercode,
e89065a1
SL
6431 gimple_assign_rhs1 (stmt),
6432 gimple_assign_rhs2 (stmt),
6433 code2,
6434 op2a,
6435 op2b);
6436 if (t)
6437 return t;
6438 }
6439
6440 /* If the definition is an AND or OR expression, we may be able to
6441 simplify by reassociating. */
eb9820c0
KT
6442 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6443 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6444 {
6445 tree inner1 = gimple_assign_rhs1 (stmt);
6446 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6447 gimple *s;
e89065a1
SL
6448 tree t;
6449 tree partial = NULL_TREE;
eb9820c0 6450 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
6451
6452 /* Check for boolean identities that don't require recursive examination
6453 of inner1/inner2:
6454 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6455 inner1 AND (inner1 OR inner2) => inner1
6456 !inner1 AND (inner1 AND inner2) => false
6457 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6458 Likewise for similar cases involving inner2. */
6459 if (inner1 == true_test_var)
6460 return (is_and ? var : inner1);
6461 else if (inner2 == true_test_var)
6462 return (is_and ? var : inner2);
6463 else if (inner1 == false_test_var)
6464 return (is_and
6465 ? boolean_false_node
5f487a34
LJH
6466 : and_var_with_comparison (type, inner2, false, code2, op2a,
6467 op2b));
e89065a1
SL
6468 else if (inner2 == false_test_var)
6469 return (is_and
6470 ? boolean_false_node
5f487a34
LJH
6471 : and_var_with_comparison (type, inner1, false, code2, op2a,
6472 op2b));
e89065a1
SL
6473
6474 /* Next, redistribute/reassociate the AND across the inner tests.
6475 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6476 if (TREE_CODE (inner1) == SSA_NAME
6477 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6478 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6479 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6480 gimple_assign_rhs1 (s),
6481 gimple_assign_rhs2 (s),
6482 code2, op2a, op2b)))
6483 {
6484 /* Handle the AND case, where we are reassociating:
6485 (inner1 AND inner2) AND (op2a code2 op2b)
6486 => (t AND inner2)
6487 If the partial result t is a constant, we win. Otherwise
6488 continue on to try reassociating with the other inner test. */
6489 if (is_and)
6490 {
6491 if (integer_onep (t))
6492 return inner2;
6493 else if (integer_zerop (t))
6494 return boolean_false_node;
6495 }
6496
6497 /* Handle the OR case, where we are redistributing:
6498 (inner1 OR inner2) AND (op2a code2 op2b)
6499 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
6500 else if (integer_onep (t))
6501 return boolean_true_node;
6502
6503 /* Save partial result for later. */
6504 partial = t;
e89065a1
SL
6505 }
6506
6507 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6508 if (TREE_CODE (inner2) == SSA_NAME
6509 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6510 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6511 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6512 gimple_assign_rhs1 (s),
6513 gimple_assign_rhs2 (s),
6514 code2, op2a, op2b)))
6515 {
6516 /* Handle the AND case, where we are reassociating:
6517 (inner1 AND inner2) AND (op2a code2 op2b)
6518 => (inner1 AND t) */
6519 if (is_and)
6520 {
6521 if (integer_onep (t))
6522 return inner1;
6523 else if (integer_zerop (t))
6524 return boolean_false_node;
8236c8eb
JJ
6525 /* If both are the same, we can apply the identity
6526 (x AND x) == x. */
6527 else if (partial && same_bool_result_p (t, partial))
6528 return t;
e89065a1
SL
6529 }
6530
6531 /* Handle the OR case. where we are redistributing:
6532 (inner1 OR inner2) AND (op2a code2 op2b)
6533 => (t OR (inner1 AND (op2a code2 op2b)))
6534 => (t OR partial) */
6535 else
6536 {
6537 if (integer_onep (t))
6538 return boolean_true_node;
6539 else if (partial)
6540 {
6541 /* We already got a simplification for the other
6542 operand to the redistributed OR expression. The
6543 interesting case is when at least one is false.
6544 Or, if both are the same, we can apply the identity
6545 (x OR x) == x. */
6546 if (integer_zerop (partial))
6547 return t;
6548 else if (integer_zerop (t))
6549 return partial;
6550 else if (same_bool_result_p (t, partial))
6551 return t;
6552 }
6553 }
6554 }
6555 }
6556 return NULL_TREE;
6557}
6558
6559/* Try to simplify the AND of two comparisons defined by
6560 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6561 If this can be done without constructing an intermediate value,
6562 return the resulting tree; otherwise NULL_TREE is returned.
6563 This function is deliberately asymmetric as it recurses on SSA_DEFs
6564 in the first comparison but not the second. */
6565
6566static tree
5f487a34 6567and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6568 enum tree_code code2, tree op2a, tree op2b)
6569{
ae22ac3c 6570 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6571
e89065a1
SL
6572 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6573 if (operand_equal_p (op1a, op2a, 0)
6574 && operand_equal_p (op1b, op2b, 0))
6575 {
eb9820c0 6576 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6577 tree t = combine_comparisons (UNKNOWN_LOCATION,
6578 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 6579 truth_type, op1a, op1b);
e89065a1
SL
6580 if (t)
6581 return t;
6582 }
6583
6584 /* Likewise the swapped case of the above. */
6585 if (operand_equal_p (op1a, op2b, 0)
6586 && operand_equal_p (op1b, op2a, 0))
6587 {
eb9820c0 6588 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6589 tree t = combine_comparisons (UNKNOWN_LOCATION,
6590 TRUTH_ANDIF_EXPR, code1,
6591 swap_tree_comparison (code2),
31ed6226 6592 truth_type, op1a, op1b);
e89065a1
SL
6593 if (t)
6594 return t;
6595 }
6596
e89065a1
SL
6597 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6598 NAME's definition is a truth value. See if there are any simplifications
6599 that can be done against the NAME's definition. */
6600 if (TREE_CODE (op1a) == SSA_NAME
6601 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6602 && (integer_zerop (op1b) || integer_onep (op1b)))
6603 {
6604 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6605 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 6606 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
6607 switch (gimple_code (stmt))
6608 {
6609 case GIMPLE_ASSIGN:
6610 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
6611 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6612 op2b);
e89065a1
SL
6613
6614 case GIMPLE_PHI:
6615 /* If every argument to the PHI produces the same result when
6616 ANDed with the second comparison, we win.
6617 Do not do this unless the type is bool since we need a bool
6618 result here anyway. */
6619 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6620 {
6621 tree result = NULL_TREE;
6622 unsigned i;
6623 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6624 {
6625 tree arg = gimple_phi_arg_def (stmt, i);
6626
6627 /* If this PHI has itself as an argument, ignore it.
6628 If all the other args produce the same result,
6629 we're still OK. */
6630 if (arg == gimple_phi_result (stmt))
6631 continue;
6632 else if (TREE_CODE (arg) == INTEGER_CST)
6633 {
6634 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6635 {
6636 if (!result)
6637 result = boolean_false_node;
6638 else if (!integer_zerop (result))
6639 return NULL_TREE;
6640 }
6641 else if (!result)
6642 result = fold_build2 (code2, boolean_type_node,
6643 op2a, op2b);
6644 else if (!same_bool_comparison_p (result,
6645 code2, op2a, op2b))
6646 return NULL_TREE;
6647 }
0e8b84ec
JJ
6648 else if (TREE_CODE (arg) == SSA_NAME
6649 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 6650 {
6c66f733 6651 tree temp;
355fe088 6652 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
6653 /* In simple cases we can look through PHI nodes,
6654 but we have to be careful with loops.
6655 See PR49073. */
6656 if (! dom_info_available_p (CDI_DOMINATORS)
6657 || gimple_bb (def_stmt) == gimple_bb (stmt)
6658 || dominated_by_p (CDI_DOMINATORS,
6659 gimple_bb (def_stmt),
6660 gimple_bb (stmt)))
6661 return NULL_TREE;
5f487a34 6662 temp = and_var_with_comparison (type, arg, invert, code2,
6c66f733 6663 op2a, op2b);
e89065a1
SL
6664 if (!temp)
6665 return NULL_TREE;
6666 else if (!result)
6667 result = temp;
6668 else if (!same_bool_result_p (result, temp))
6669 return NULL_TREE;
6670 }
6671 else
6672 return NULL_TREE;
6673 }
6674 return result;
6675 }
6676
6677 default:
6678 break;
6679 }
6680 }
6681 return NULL_TREE;
6682}
6683
5f487a34
LJH
6684/* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6685 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6686 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6687 simplify this to a single expression. As we are going to lower the cost
6688 of building SSA names / gimple stmts significantly, we need to allocate
6689 them ont the stack. This will cause the code to be a bit ugly. */
6690
6691static tree
6692maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6693 enum tree_code code1,
6694 tree op1a, tree op1b,
6695 enum tree_code code2, tree op2a,
6696 tree op2b)
6697{
6698 /* Allocate gimple stmt1 on the stack. */
6699 gassign *stmt1
6700 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6701 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6702 gimple_assign_set_rhs_code (stmt1, code1);
6703 gimple_assign_set_rhs1 (stmt1, op1a);
6704 gimple_assign_set_rhs2 (stmt1, op1b);
6705
6706 /* Allocate gimple stmt2 on the stack. */
6707 gassign *stmt2
6708 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6709 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6710 gimple_assign_set_rhs_code (stmt2, code2);
6711 gimple_assign_set_rhs1 (stmt2, op2a);
6712 gimple_assign_set_rhs2 (stmt2, op2b);
6713
6714 /* Allocate SSA names(lhs1) on the stack. */
6715 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6716 memset (lhs1, 0, sizeof (tree_ssa_name));
6717 TREE_SET_CODE (lhs1, SSA_NAME);
6718 TREE_TYPE (lhs1) = type;
6719 init_ssa_name_imm_use (lhs1);
6720
6721 /* Allocate SSA names(lhs2) on the stack. */
6722 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6723 memset (lhs2, 0, sizeof (tree_ssa_name));
6724 TREE_SET_CODE (lhs2, SSA_NAME);
6725 TREE_TYPE (lhs2) = type;
6726 init_ssa_name_imm_use (lhs2);
6727
6728 gimple_assign_set_lhs (stmt1, lhs1);
6729 gimple_assign_set_lhs (stmt2, lhs2);
6730
6731 gimple_match_op op (gimple_match_cond::UNCOND, code,
6732 type, gimple_assign_lhs (stmt1),
6733 gimple_assign_lhs (stmt2));
6734 if (op.resimplify (NULL, follow_all_ssa_edges))
6735 {
6736 if (gimple_simplified_result_is_gimple_val (&op))
6737 {
6738 tree res = op.ops[0];
6739 if (res == lhs1)
6740 return build2 (code1, type, op1a, op1b);
6741 else if (res == lhs2)
6742 return build2 (code2, type, op2a, op2b);
6743 else
6744 return res;
6745 }
ae9c3507
ML
6746 else if (op.code.is_tree_code ()
6747 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6748 {
6749 tree op0 = op.ops[0];
6750 tree op1 = op.ops[1];
6751 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6752 return NULL_TREE; /* not simple */
6753
6754 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6755 }
5f487a34
LJH
6756 }
6757
6758 return NULL_TREE;
6759}
6760
e89065a1
SL
6761/* Try to simplify the AND of two comparisons, specified by
6762 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6763 If this can be simplified to a single expression (without requiring
6764 introducing more SSA variables to hold intermediate values),
6765 return the resulting tree. Otherwise return NULL_TREE.
6766 If the result expression is non-null, it has boolean type. */
6767
6768tree
5f487a34
LJH
6769maybe_fold_and_comparisons (tree type,
6770 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6771 enum tree_code code2, tree op2a, tree op2b)
6772{
5f487a34 6773 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 6774 return t;
5f487a34
LJH
6775
6776 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6777 return t;
6778
6779 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6780 op1a, op1b, code2, op2a,
6781 op2b))
6782 return t;
6783
6784 return NULL_TREE;
e89065a1
SL
6785}
6786
6787/* Helper function for or_comparisons_1: try to simplify the OR of the
6788 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6789 If INVERT is true, invert the value of VAR before doing the OR.
6790 Return NULL_EXPR if we can't simplify this to a single expression. */
6791
6792static tree
5f487a34 6793or_var_with_comparison (tree type, tree var, bool invert,
e89065a1
SL
6794 enum tree_code code2, tree op2a, tree op2b)
6795{
6796 tree t;
355fe088 6797 gimple *stmt = SSA_NAME_DEF_STMT (var);
e89065a1
SL
6798
6799 /* We can only deal with variables whose definitions are assignments. */
6800 if (!is_gimple_assign (stmt))
6801 return NULL_TREE;
6802
6803 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6804 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6805 Then we only have to consider the simpler non-inverted cases. */
6806 if (invert)
5f487a34 6807 t = and_var_with_comparison_1 (type, stmt,
e89065a1
SL
6808 invert_tree_comparison (code2, false),
6809 op2a, op2b);
6810 else
5f487a34 6811 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
e89065a1
SL
6812 return canonicalize_bool (t, invert);
6813}
6814
6815/* Try to simplify the OR of the ssa variable defined by the assignment
6816 STMT with the comparison specified by (OP2A CODE2 OP2B).
6817 Return NULL_EXPR if we can't simplify this to a single expression. */
6818
6819static tree
5f487a34 6820or_var_with_comparison_1 (tree type, gimple *stmt,
e89065a1
SL
6821 enum tree_code code2, tree op2a, tree op2b)
6822{
6823 tree var = gimple_assign_lhs (stmt);
6824 tree true_test_var = NULL_TREE;
6825 tree false_test_var = NULL_TREE;
6826 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6827
6828 /* Check for identities like (var OR (var != 0)) => true . */
6829 if (TREE_CODE (op2a) == SSA_NAME
6830 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6831 {
6832 if ((code2 == NE_EXPR && integer_zerop (op2b))
6833 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6834 {
6835 true_test_var = op2a;
6836 if (var == true_test_var)
6837 return var;
6838 }
6839 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6840 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6841 {
6842 false_test_var = op2a;
6843 if (var == false_test_var)
6844 return boolean_true_node;
6845 }
6846 }
6847
6848 /* If the definition is a comparison, recurse on it. */
6849 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6850 {
5f487a34 6851 tree t = or_comparisons_1 (type, innercode,
e89065a1
SL
6852 gimple_assign_rhs1 (stmt),
6853 gimple_assign_rhs2 (stmt),
6854 code2,
6855 op2a,
6856 op2b);
6857 if (t)
6858 return t;
6859 }
6860
6861 /* If the definition is an AND or OR expression, we may be able to
6862 simplify by reassociating. */
eb9820c0
KT
6863 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6864 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
6865 {
6866 tree inner1 = gimple_assign_rhs1 (stmt);
6867 tree inner2 = gimple_assign_rhs2 (stmt);
355fe088 6868 gimple *s;
e89065a1
SL
6869 tree t;
6870 tree partial = NULL_TREE;
eb9820c0 6871 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
6872
6873 /* Check for boolean identities that don't require recursive examination
6874 of inner1/inner2:
6875 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6876 inner1 OR (inner1 AND inner2) => inner1
6877 !inner1 OR (inner1 OR inner2) => true
6878 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6879 */
6880 if (inner1 == true_test_var)
6881 return (is_or ? var : inner1);
6882 else if (inner2 == true_test_var)
6883 return (is_or ? var : inner2);
6884 else if (inner1 == false_test_var)
6885 return (is_or
6886 ? boolean_true_node
5f487a34
LJH
6887 : or_var_with_comparison (type, inner2, false, code2, op2a,
6888 op2b));
e89065a1
SL
6889 else if (inner2 == false_test_var)
6890 return (is_or
6891 ? boolean_true_node
5f487a34
LJH
6892 : or_var_with_comparison (type, inner1, false, code2, op2a,
6893 op2b));
e89065a1
SL
6894
6895 /* Next, redistribute/reassociate the OR across the inner tests.
6896 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6897 if (TREE_CODE (inner1) == SSA_NAME
6898 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6899 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6900 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6901 gimple_assign_rhs1 (s),
6902 gimple_assign_rhs2 (s),
6903 code2, op2a, op2b)))
6904 {
6905 /* Handle the OR case, where we are reassociating:
6906 (inner1 OR inner2) OR (op2a code2 op2b)
6907 => (t OR inner2)
6908 If the partial result t is a constant, we win. Otherwise
6909 continue on to try reassociating with the other inner test. */
8236c8eb 6910 if (is_or)
e89065a1
SL
6911 {
6912 if (integer_onep (t))
6913 return boolean_true_node;
6914 else if (integer_zerop (t))
6915 return inner2;
6916 }
6917
6918 /* Handle the AND case, where we are redistributing:
6919 (inner1 AND inner2) OR (op2a code2 op2b)
6920 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
6921 else if (integer_zerop (t))
6922 return boolean_false_node;
6923
6924 /* Save partial result for later. */
6925 partial = t;
e89065a1
SL
6926 }
6927
6928 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6929 if (TREE_CODE (inner2) == SSA_NAME
6930 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6931 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5f487a34 6932 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
e89065a1
SL
6933 gimple_assign_rhs1 (s),
6934 gimple_assign_rhs2 (s),
6935 code2, op2a, op2b)))
6936 {
6937 /* Handle the OR case, where we are reassociating:
6938 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
6939 => (inner1 OR t)
6940 => (t OR partial) */
6941 if (is_or)
e89065a1
SL
6942 {
6943 if (integer_zerop (t))
6944 return inner1;
6945 else if (integer_onep (t))
6946 return boolean_true_node;
8236c8eb
JJ
6947 /* If both are the same, we can apply the identity
6948 (x OR x) == x. */
6949 else if (partial && same_bool_result_p (t, partial))
6950 return t;
e89065a1
SL
6951 }
6952
6953 /* Handle the AND case, where we are redistributing:
6954 (inner1 AND inner2) OR (op2a code2 op2b)
6955 => (t AND (inner1 OR (op2a code2 op2b)))
6956 => (t AND partial) */
6957 else
6958 {
6959 if (integer_zerop (t))
6960 return boolean_false_node;
6961 else if (partial)
6962 {
6963 /* We already got a simplification for the other
6964 operand to the redistributed AND expression. The
6965 interesting case is when at least one is true.
6966 Or, if both are the same, we can apply the identity
8236c8eb 6967 (x AND x) == x. */
e89065a1
SL
6968 if (integer_onep (partial))
6969 return t;
6970 else if (integer_onep (t))
6971 return partial;
6972 else if (same_bool_result_p (t, partial))
8236c8eb 6973 return t;
e89065a1
SL
6974 }
6975 }
6976 }
6977 }
6978 return NULL_TREE;
6979}
6980
6981/* Try to simplify the OR of two comparisons defined by
6982 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6983 If this can be done without constructing an intermediate value,
6984 return the resulting tree; otherwise NULL_TREE is returned.
6985 This function is deliberately asymmetric as it recurses on SSA_DEFs
6986 in the first comparison but not the second. */
6987
6988static tree
5f487a34 6989or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
6990 enum tree_code code2, tree op2a, tree op2b)
6991{
ae22ac3c 6992 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 6993
e89065a1
SL
6994 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6995 if (operand_equal_p (op1a, op2a, 0)
6996 && operand_equal_p (op1b, op2b, 0))
6997 {
eb9820c0 6998 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
6999 tree t = combine_comparisons (UNKNOWN_LOCATION,
7000 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 7001 truth_type, op1a, op1b);
e89065a1
SL
7002 if (t)
7003 return t;
7004 }
7005
7006 /* Likewise the swapped case of the above. */
7007 if (operand_equal_p (op1a, op2b, 0)
7008 && operand_equal_p (op1b, op2a, 0))
7009 {
eb9820c0 7010 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
7011 tree t = combine_comparisons (UNKNOWN_LOCATION,
7012 TRUTH_ORIF_EXPR, code1,
7013 swap_tree_comparison (code2),
31ed6226 7014 truth_type, op1a, op1b);
e89065a1
SL
7015 if (t)
7016 return t;
7017 }
7018
e89065a1
SL
7019 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7020 NAME's definition is a truth value. See if there are any simplifications
7021 that can be done against the NAME's definition. */
7022 if (TREE_CODE (op1a) == SSA_NAME
7023 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7024 && (integer_zerop (op1b) || integer_onep (op1b)))
7025 {
7026 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7027 || (code1 == NE_EXPR && integer_onep (op1b)));
355fe088 7028 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
e89065a1
SL
7029 switch (gimple_code (stmt))
7030 {
7031 case GIMPLE_ASSIGN:
7032 /* Try to simplify by copy-propagating the definition. */
5f487a34
LJH
7033 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7034 op2b);
e89065a1
SL
7035
7036 case GIMPLE_PHI:
7037 /* If every argument to the PHI produces the same result when
7038 ORed with the second comparison, we win.
7039 Do not do this unless the type is bool since we need a bool
7040 result here anyway. */
7041 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7042 {
7043 tree result = NULL_TREE;
7044 unsigned i;
7045 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7046 {
7047 tree arg = gimple_phi_arg_def (stmt, i);
7048
7049 /* If this PHI has itself as an argument, ignore it.
7050 If all the other args produce the same result,
7051 we're still OK. */
7052 if (arg == gimple_phi_result (stmt))
7053 continue;
7054 else if (TREE_CODE (arg) == INTEGER_CST)
7055 {
7056 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7057 {
7058 if (!result)
7059 result = boolean_true_node;
7060 else if (!integer_onep (result))
7061 return NULL_TREE;
7062 }
7063 else if (!result)
7064 result = fold_build2 (code2, boolean_type_node,
7065 op2a, op2b);
7066 else if (!same_bool_comparison_p (result,
7067 code2, op2a, op2b))
7068 return NULL_TREE;
7069 }
0e8b84ec
JJ
7070 else if (TREE_CODE (arg) == SSA_NAME
7071 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 7072 {
6c66f733 7073 tree temp;
355fe088 7074 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6c66f733
JJ
7075 /* In simple cases we can look through PHI nodes,
7076 but we have to be careful with loops.
7077 See PR49073. */
7078 if (! dom_info_available_p (CDI_DOMINATORS)
7079 || gimple_bb (def_stmt) == gimple_bb (stmt)
7080 || dominated_by_p (CDI_DOMINATORS,
7081 gimple_bb (def_stmt),
7082 gimple_bb (stmt)))
7083 return NULL_TREE;
5f487a34 7084 temp = or_var_with_comparison (type, arg, invert, code2,
6c66f733 7085 op2a, op2b);
e89065a1
SL
7086 if (!temp)
7087 return NULL_TREE;
7088 else if (!result)
7089 result = temp;
7090 else if (!same_bool_result_p (result, temp))
7091 return NULL_TREE;
7092 }
7093 else
7094 return NULL_TREE;
7095 }
7096 return result;
7097 }
7098
7099 default:
7100 break;
7101 }
7102 }
7103 return NULL_TREE;
7104}
7105
7106/* Try to simplify the OR of two comparisons, specified by
7107 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7108 If this can be simplified to a single expression (without requiring
7109 introducing more SSA variables to hold intermediate values),
7110 return the resulting tree. Otherwise return NULL_TREE.
7111 If the result expression is non-null, it has boolean type. */
7112
7113tree
5f487a34
LJH
7114maybe_fold_or_comparisons (tree type,
7115 enum tree_code code1, tree op1a, tree op1b,
e89065a1
SL
7116 enum tree_code code2, tree op2a, tree op2b)
7117{
5f487a34 7118 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
e89065a1 7119 return t;
cfef45c8 7120
5f487a34
LJH
7121 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7122 return t;
7123
7124 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7125 op1a, op1b, code2, op2a,
7126 op2b))
7127 return t;
7128
7129 return NULL_TREE;
7130}
cfef45c8
RG
7131
7132/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7133
7134 Either NULL_TREE, a simplified but non-constant or a constant
7135 is returned.
7136
7137 ??? This should go into a gimple-fold-inline.h file to be eventually
7138 privatized with the single valueize function used in the various TUs
7139 to avoid the indirect function call overhead. */
7140
7141tree
355fe088 7142gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
d2a85801 7143 tree (*gvalueize) (tree))
cfef45c8 7144{
5d75ad95 7145 gimple_match_op res_op;
45cc9f96
RB
7146 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7147 edges if there are intermediate VARYING defs. For this reason
7148 do not follow SSA edges here even though SCCVN can technically
7149 just deal fine with that. */
5d75ad95 7150 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
45cc9f96 7151 {
34050b6b 7152 tree res = NULL_TREE;
5d75ad95
RS
7153 if (gimple_simplified_result_is_gimple_val (&res_op))
7154 res = res_op.ops[0];
34050b6b 7155 else if (mprts_hook)
5d75ad95 7156 res = mprts_hook (&res_op);
34050b6b 7157 if (res)
45cc9f96 7158 {
34050b6b
RB
7159 if (dump_file && dump_flags & TDF_DETAILS)
7160 {
7161 fprintf (dump_file, "Match-and-simplified ");
7162 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7163 fprintf (dump_file, " to ");
ef6cb4c7 7164 print_generic_expr (dump_file, res);
34050b6b
RB
7165 fprintf (dump_file, "\n");
7166 }
7167 return res;
45cc9f96 7168 }
45cc9f96
RB
7169 }
7170
cfef45c8
RG
7171 location_t loc = gimple_location (stmt);
7172 switch (gimple_code (stmt))
7173 {
7174 case GIMPLE_ASSIGN:
7175 {
7176 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7177
7178 switch (get_gimple_rhs_class (subcode))
7179 {
7180 case GIMPLE_SINGLE_RHS:
7181 {
7182 tree rhs = gimple_assign_rhs1 (stmt);
7183 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7184
7185 if (TREE_CODE (rhs) == SSA_NAME)
7186 {
7187 /* If the RHS is an SSA_NAME, return its known constant value,
7188 if any. */
7189 return (*valueize) (rhs);
7190 }
7191 /* Handle propagating invariant addresses into address
7192 operations. */
7193 else if (TREE_CODE (rhs) == ADDR_EXPR
7194 && !is_gimple_min_invariant (rhs))
7195 {
a90c8804 7196 poly_int64 offset = 0;
cfef45c8
RG
7197 tree base;
7198 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7199 &offset,
7200 valueize);
7201 if (base
7202 && (CONSTANT_CLASS_P (base)
7203 || decl_address_invariant_p (base)))
7204 return build_invariant_address (TREE_TYPE (rhs),
7205 base, offset);
7206 }
7207 else if (TREE_CODE (rhs) == CONSTRUCTOR
7208 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
928686b1
RS
7209 && known_eq (CONSTRUCTOR_NELTS (rhs),
7210 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
cfef45c8 7211 {
794e3180
RS
7212 unsigned i, nelts;
7213 tree val;
cfef45c8 7214
928686b1 7215 nelts = CONSTRUCTOR_NELTS (rhs);
5ebaa477 7216 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
cfef45c8
RG
7217 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7218 {
7219 val = (*valueize) (val);
7220 if (TREE_CODE (val) == INTEGER_CST
7221 || TREE_CODE (val) == REAL_CST
7222 || TREE_CODE (val) == FIXED_CST)
794e3180 7223 vec.quick_push (val);
cfef45c8
RG
7224 else
7225 return NULL_TREE;
7226 }
7227
5ebaa477 7228 return vec.build ();
cfef45c8 7229 }
bdf37f7a
JH
7230 if (subcode == OBJ_TYPE_REF)
7231 {
7232 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7233 /* If callee is constant, we can fold away the wrapper. */
7234 if (is_gimple_min_invariant (val))
7235 return val;
7236 }
cfef45c8
RG
7237
7238 if (kind == tcc_reference)
7239 {
7240 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7241 || TREE_CODE (rhs) == REALPART_EXPR
7242 || TREE_CODE (rhs) == IMAGPART_EXPR)
7243 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7244 {
7245 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7246 return fold_unary_loc (EXPR_LOCATION (rhs),
7247 TREE_CODE (rhs),
7248 TREE_TYPE (rhs), val);
7249 }
7250 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7251 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7252 {
7253 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7254 return fold_ternary_loc (EXPR_LOCATION (rhs),
7255 TREE_CODE (rhs),
7256 TREE_TYPE (rhs), val,
7257 TREE_OPERAND (rhs, 1),
7258 TREE_OPERAND (rhs, 2));
7259 }
7260 else if (TREE_CODE (rhs) == MEM_REF
7261 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7262 {
7263 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7264 if (TREE_CODE (val) == ADDR_EXPR
7265 && is_gimple_min_invariant (val))
7266 {
7267 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7268 unshare_expr (val),
7269 TREE_OPERAND (rhs, 1));
7270 if (tem)
7271 rhs = tem;
7272 }
7273 }
7274 return fold_const_aggregate_ref_1 (rhs, valueize);
7275 }
7276 else if (kind == tcc_declaration)
7277 return get_symbol_constant_value (rhs);
7278 return rhs;
7279 }
7280
7281 case GIMPLE_UNARY_RHS:
f3582e54 7282 return NULL_TREE;
cfef45c8
RG
7283
7284 case GIMPLE_BINARY_RHS:
4b1b9e64
RB
7285 /* Translate &x + CST into an invariant form suitable for
7286 further propagation. */
7287 if (subcode == POINTER_PLUS_EXPR)
7288 {
4b1b9e64
RB
7289 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7290 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
4b1b9e64
RB
7291 if (TREE_CODE (op0) == ADDR_EXPR
7292 && TREE_CODE (op1) == INTEGER_CST)
7293 {
7294 tree off = fold_convert (ptr_type_node, op1);
4e3d3e40
RB
7295 return build1_loc
7296 (loc, ADDR_EXPR, TREE_TYPE (op0),
4b1b9e64
RB
7297 fold_build2 (MEM_REF,
7298 TREE_TYPE (TREE_TYPE (op0)),
7299 unshare_expr (op0), off));
7300 }
7301 }
59c20dc7
RB
7302 /* Canonicalize bool != 0 and bool == 0 appearing after
7303 valueization. While gimple_simplify handles this
7304 it can get confused by the ~X == 1 -> X == 0 transform
7305 which we cant reduce to a SSA name or a constant
7306 (and we have no way to tell gimple_simplify to not
7307 consider those transforms in the first place). */
7308 else if (subcode == EQ_EXPR
7309 || subcode == NE_EXPR)
7310 {
7311 tree lhs = gimple_assign_lhs (stmt);
7312 tree op0 = gimple_assign_rhs1 (stmt);
7313 if (useless_type_conversion_p (TREE_TYPE (lhs),
7314 TREE_TYPE (op0)))
7315 {
7316 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7317 op0 = (*valueize) (op0);
8861704d
RB
7318 if (TREE_CODE (op0) == INTEGER_CST)
7319 std::swap (op0, op1);
7320 if (TREE_CODE (op1) == INTEGER_CST
7321 && ((subcode == NE_EXPR && integer_zerop (op1))
7322 || (subcode == EQ_EXPR && integer_onep (op1))))
7323 return op0;
59c20dc7
RB
7324 }
7325 }
4b1b9e64 7326 return NULL_TREE;
cfef45c8
RG
7327
7328 case GIMPLE_TERNARY_RHS:
7329 {
7330 /* Handle ternary operators that can appear in GIMPLE form. */
7331 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7332 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7333 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
cfef45c8
RG
7334 return fold_ternary_loc (loc, subcode,
7335 gimple_expr_type (stmt), op0, op1, op2);
7336 }
7337
7338 default:
7339 gcc_unreachable ();
7340 }
7341 }
7342
7343 case GIMPLE_CALL:
7344 {
25583c4f 7345 tree fn;
538dd0b7 7346 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
7347
7348 if (gimple_call_internal_p (stmt))
31e071ae
MP
7349 {
7350 enum tree_code subcode = ERROR_MARK;
7351 switch (gimple_call_internal_fn (stmt))
7352 {
7353 case IFN_UBSAN_CHECK_ADD:
7354 subcode = PLUS_EXPR;
7355 break;
7356 case IFN_UBSAN_CHECK_SUB:
7357 subcode = MINUS_EXPR;
7358 break;
7359 case IFN_UBSAN_CHECK_MUL:
7360 subcode = MULT_EXPR;
7361 break;
68fa96d6
ML
7362 case IFN_BUILTIN_EXPECT:
7363 {
7364 tree arg0 = gimple_call_arg (stmt, 0);
7365 tree op0 = (*valueize) (arg0);
7366 if (TREE_CODE (op0) == INTEGER_CST)
7367 return op0;
7368 return NULL_TREE;
7369 }
31e071ae
MP
7370 default:
7371 return NULL_TREE;
7372 }
368b454d
JJ
7373 tree arg0 = gimple_call_arg (stmt, 0);
7374 tree arg1 = gimple_call_arg (stmt, 1);
7375 tree op0 = (*valueize) (arg0);
7376 tree op1 = (*valueize) (arg1);
31e071ae
MP
7377
7378 if (TREE_CODE (op0) != INTEGER_CST
7379 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
7380 {
7381 switch (subcode)
7382 {
7383 case MULT_EXPR:
7384 /* x * 0 = 0 * x = 0 without overflow. */
7385 if (integer_zerop (op0) || integer_zerop (op1))
7386 return build_zero_cst (TREE_TYPE (arg0));
7387 break;
7388 case MINUS_EXPR:
7389 /* y - y = 0 without overflow. */
7390 if (operand_equal_p (op0, op1, 0))
7391 return build_zero_cst (TREE_TYPE (arg0));
7392 break;
7393 default:
7394 break;
7395 }
7396 }
7397 tree res
7398 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
7399 if (res
7400 && TREE_CODE (res) == INTEGER_CST
7401 && !TREE_OVERFLOW (res))
7402 return res;
7403 return NULL_TREE;
7404 }
25583c4f
RS
7405
7406 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8 7407 if (TREE_CODE (fn) == ADDR_EXPR
6773658a 7408 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3d78e008 7409 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
5c944c6c
RB
7410 && gimple_builtin_call_types_compatible_p (stmt,
7411 TREE_OPERAND (fn, 0)))
cfef45c8
RG
7412 {
7413 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 7414 tree retval;
cfef45c8
RG
7415 unsigned i;
7416 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7417 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 7418 retval = fold_builtin_call_array (loc,
538dd0b7 7419 gimple_call_return_type (call_stmt),
cfef45c8 7420 fn, gimple_call_num_args (stmt), args);
cfef45c8 7421 if (retval)
5c944c6c
RB
7422 {
7423 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7424 STRIP_NOPS (retval);
538dd0b7
DM
7425 retval = fold_convert (gimple_call_return_type (call_stmt),
7426 retval);
5c944c6c 7427 }
cfef45c8
RG
7428 return retval;
7429 }
7430 return NULL_TREE;
7431 }
7432
7433 default:
7434 return NULL_TREE;
7435 }
7436}
7437
7438/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7439 Returns NULL_TREE if folding to a constant is not possible, otherwise
7440 returns a constant according to is_gimple_min_invariant. */
7441
7442tree
355fe088 7443gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
cfef45c8
RG
7444{
7445 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7446 if (res && is_gimple_min_invariant (res))
7447 return res;
7448 return NULL_TREE;
7449}
7450
7451
7452/* The following set of functions are supposed to fold references using
7453 their constant initializers. */
7454
cfef45c8
RG
7455/* See if we can find constructor defining value of BASE.
7456 When we know the consructor with constant offset (such as
7457 base is array[40] and we do know constructor of array), then
7458 BIT_OFFSET is adjusted accordingly.
7459
7460 As a special case, return error_mark_node when constructor
7461 is not explicitly available, but it is known to be zero
7462 such as 'static const int a;'. */
7463static tree
588db50c 7464get_base_constructor (tree base, poly_int64_pod *bit_offset,
cfef45c8
RG
7465 tree (*valueize)(tree))
7466{
588db50c 7467 poly_int64 bit_offset2, size, max_size;
ee45a32d
EB
7468 bool reverse;
7469
cfef45c8
RG
7470 if (TREE_CODE (base) == MEM_REF)
7471 {
6a5aca53
ML
7472 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7473 if (!boff.to_shwi (bit_offset))
7474 return NULL_TREE;
cfef45c8
RG
7475
7476 if (valueize
7477 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7478 base = valueize (TREE_OPERAND (base, 0));
7479 if (!base || TREE_CODE (base) != ADDR_EXPR)
7480 return NULL_TREE;
7481 base = TREE_OPERAND (base, 0);
7482 }
13e88953
RB
7483 else if (valueize
7484 && TREE_CODE (base) == SSA_NAME)
7485 base = valueize (base);
cfef45c8
RG
7486
7487 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7488 DECL_INITIAL. If BASE is a nested reference into another
7489 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7490 the inner reference. */
7491 switch (TREE_CODE (base))
7492 {
7493 case VAR_DECL:
cfef45c8 7494 case CONST_DECL:
6a6dac52
JH
7495 {
7496 tree init = ctor_for_folding (base);
7497
688010ba 7498 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
7499 NULL means unknown, while error_mark_node is 0. */
7500 if (init == error_mark_node)
7501 return NULL_TREE;
7502 if (!init)
7503 return error_mark_node;
7504 return init;
7505 }
cfef45c8 7506
13e88953
RB
7507 case VIEW_CONVERT_EXPR:
7508 return get_base_constructor (TREE_OPERAND (base, 0),
7509 bit_offset, valueize);
7510
cfef45c8
RG
7511 case ARRAY_REF:
7512 case COMPONENT_REF:
ee45a32d
EB
7513 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7514 &reverse);
588db50c 7515 if (!known_size_p (max_size) || maybe_ne (size, max_size))
cfef45c8
RG
7516 return NULL_TREE;
7517 *bit_offset += bit_offset2;
7518 return get_base_constructor (base, bit_offset, valueize);
7519
cfef45c8
RG
7520 case CONSTRUCTOR:
7521 return base;
7522
7523 default:
13e88953
RB
7524 if (CONSTANT_CLASS_P (base))
7525 return base;
7526
cfef45c8
RG
7527 return NULL_TREE;
7528 }
7529}
7530
35b4d3a6
MS
7531/* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7532 to the memory at bit OFFSET. When non-null, TYPE is the expected
7533 type of the reference; otherwise the type of the referenced element
7534 is used instead. When SIZE is zero, attempt to fold a reference to
7535 the entire element which OFFSET refers to. Increment *SUBOFF by
7536 the bit offset of the accessed element. */
cfef45c8
RG
7537
7538static tree
7539fold_array_ctor_reference (tree type, tree ctor,
7540 unsigned HOST_WIDE_INT offset,
c44c2088 7541 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7542 tree from_decl,
7543 unsigned HOST_WIDE_INT *suboff)
cfef45c8 7544{
807e902e
KZ
7545 offset_int low_bound;
7546 offset_int elt_size;
807e902e 7547 offset_int access_index;
6a636014 7548 tree domain_type = NULL_TREE;
cfef45c8
RG
7549 HOST_WIDE_INT inner_offset;
7550
7551 /* Compute low bound and elt size. */
eb8f1123
RG
7552 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7553 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
7554 if (domain_type && TYPE_MIN_VALUE (domain_type))
7555 {
6aa238a1 7556 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7557 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7558 return NULL_TREE;
807e902e 7559 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
7560 }
7561 else
807e902e 7562 low_bound = 0;
6aa238a1 7563 /* Static constructors for variably sized objects make no sense. */
9ef2eff0
RB
7564 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7565 return NULL_TREE;
807e902e 7566 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8 7567
35b4d3a6 7568 /* When TYPE is non-null, verify that it specifies a constant-sized
831e688a 7569 access of a multiple of the array element size. Avoid division
6aa238a1
MS
7570 by zero below when ELT_SIZE is zero, such as with the result of
7571 an initializer for a zero-length array or an empty struct. */
7572 if (elt_size == 0
7573 || (type
7574 && (!TYPE_SIZE_UNIT (type)
831e688a 7575 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
cfef45c8
RG
7576 return NULL_TREE;
7577
7578 /* Compute the array index we look for. */
807e902e
KZ
7579 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7580 elt_size);
27bcd47c 7581 access_index += low_bound;
cfef45c8
RG
7582
7583 /* And offset within the access. */
27bcd47c 7584 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8 7585
3c076c96
JJ
7586 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7587 if (size > elt_sz * BITS_PER_UNIT)
831e688a
RB
7588 {
7589 /* native_encode_expr constraints. */
7590 if (size > MAX_BITSIZE_MODE_ANY_MODE
7591 || size % BITS_PER_UNIT != 0
3c076c96
JJ
7592 || inner_offset % BITS_PER_UNIT != 0
7593 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
831e688a
RB
7594 return NULL_TREE;
7595
7596 unsigned ctor_idx;
7597 tree val = get_array_ctor_element_at_index (ctor, access_index,
7598 &ctor_idx);
7599 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7600 return build_zero_cst (type);
7601
7602 /* native-encode adjacent ctor elements. */
7603 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7604 unsigned bufoff = 0;
7605 offset_int index = 0;
7606 offset_int max_index = access_index;
7607 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7608 if (!val)
7609 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7610 else if (!CONSTANT_CLASS_P (val))
7611 return NULL_TREE;
7612 if (!elt->index)
7613 ;
7614 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7615 {
7616 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7617 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7618 }
7619 else
7620 index = max_index = wi::to_offset (elt->index);
7621 index = wi::umax (index, access_index);
7622 do
7623 {
3c076c96
JJ
7624 if (bufoff + elt_sz > sizeof (buf))
7625 elt_sz = sizeof (buf) - bufoff;
7626 int len = native_encode_expr (val, buf + bufoff, elt_sz,
831e688a 7627 inner_offset / BITS_PER_UNIT);
3c076c96 7628 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
831e688a
RB
7629 return NULL_TREE;
7630 inner_offset = 0;
7631 bufoff += len;
7632
7633 access_index += 1;
7634 if (wi::cmpu (access_index, index) == 0)
7635 val = elt->value;
7636 else if (wi::cmpu (access_index, max_index) > 0)
7637 {
7638 ctor_idx++;
7639 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7640 {
7641 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7642 ++max_index;
7643 }
7644 else
7645 {
7646 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7647 index = 0;
7648 max_index = access_index;
7649 if (!elt->index)
7650 ;
7651 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7652 {
7653 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7654 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7655 }
7656 else
7657 index = max_index = wi::to_offset (elt->index);
7658 index = wi::umax (index, access_index);
7659 if (wi::cmpu (access_index, index) == 0)
7660 val = elt->value;
7661 else
7662 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7663 }
7664 }
7665 }
7666 while (bufoff < size / BITS_PER_UNIT);
7667 *suboff += size;
7668 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7669 }
7670
6a636014 7671 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
35b4d3a6
MS
7672 {
7673 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7674 {
7675 /* For the final reference to the entire accessed element
7676 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7677 may be null) in favor of the type of the element, and set
7678 SIZE to the size of the accessed element. */
7679 inner_offset = 0;
7680 type = TREE_TYPE (val);
6e41c27b 7681 size = elt_sz * BITS_PER_UNIT;
35b4d3a6 7682 }
6e41c27b
RB
7683 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7684 && TREE_CODE (val) == CONSTRUCTOR
7685 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7686 /* If this isn't the last element in the CTOR and a CTOR itself
7687 and it does not cover the whole object we are requesting give up
7688 since we're not set up for combining from multiple CTORs. */
7689 return NULL_TREE;
35b4d3a6 7690
6e41c27b 7691 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
35b4d3a6
MS
7692 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7693 suboff);
7694 }
cfef45c8 7695
35b4d3a6
MS
7696 /* Memory not explicitly mentioned in constructor is 0 (or
7697 the reference is out of range). */
7698 return type ? build_zero_cst (type) : NULL_TREE;
cfef45c8
RG
7699}
7700
35b4d3a6
MS
7701/* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7702 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7703 is the expected type of the reference; otherwise the type of
7704 the referenced member is used instead. When SIZE is zero,
7705 attempt to fold a reference to the entire member which OFFSET
7706 refers to; in this case. Increment *SUBOFF by the bit offset
7707 of the accessed member. */
cfef45c8
RG
7708
7709static tree
7710fold_nonarray_ctor_reference (tree type, tree ctor,
7711 unsigned HOST_WIDE_INT offset,
c44c2088 7712 unsigned HOST_WIDE_INT size,
35b4d3a6
MS
7713 tree from_decl,
7714 unsigned HOST_WIDE_INT *suboff)
cfef45c8
RG
7715{
7716 unsigned HOST_WIDE_INT cnt;
7717 tree cfield, cval;
7718
7719 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7720 cval)
7721 {
7722 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7723 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7724 tree field_size = DECL_SIZE (cfield);
35b4d3a6
MS
7725
7726 if (!field_size)
7727 {
7728 /* Determine the size of the flexible array member from
7729 the size of the initializer provided for it. */
7730 field_size = TYPE_SIZE (TREE_TYPE (cval));
7731 }
cfef45c8
RG
7732
7733 /* Variable sized objects in static constructors makes no sense,
7734 but field_size can be NULL for flexible array members. */
7735 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7736 && TREE_CODE (byte_offset) == INTEGER_CST
7737 && (field_size != NULL_TREE
7738 ? TREE_CODE (field_size) == INTEGER_CST
7739 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7740
7741 /* Compute bit offset of the field. */
35b4d3a6
MS
7742 offset_int bitoffset
7743 = (wi::to_offset (field_offset)
7744 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
cfef45c8 7745 /* Compute bit offset where the field ends. */
35b4d3a6 7746 offset_int bitoffset_end;
cfef45c8 7747 if (field_size != NULL_TREE)
807e902e 7748 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 7749 else
807e902e 7750 bitoffset_end = 0;
cfef45c8 7751
35b4d3a6
MS
7752 /* Compute the bit offset of the end of the desired access.
7753 As a special case, if the size of the desired access is
7754 zero, assume the access is to the entire field (and let
7755 the caller make any necessary adjustments by storing
7756 the actual bounds of the field in FIELDBOUNDS). */
7757 offset_int access_end = offset_int (offset);
7758 if (size)
7759 access_end += size;
7760 else
7761 access_end = bitoffset_end;
b8b2b009 7762
35b4d3a6
MS
7763 /* Is there any overlap between the desired access at
7764 [OFFSET, OFFSET+SIZE) and the offset of the field within
7765 the object at [BITOFFSET, BITOFFSET_END)? */
807e902e 7766 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 7767 && (field_size == NULL_TREE
807e902e 7768 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 7769 {
35b4d3a6
MS
7770 *suboff += bitoffset.to_uhwi ();
7771
7772 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7773 {
7774 /* For the final reference to the entire accessed member
7775 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7776 be null) in favor of the type of the member, and set
7777 SIZE to the size of the accessed member. */
7778 offset = bitoffset.to_uhwi ();
7779 type = TREE_TYPE (cval);
7780 size = (bitoffset_end - bitoffset).to_uhwi ();
7781 }
7782
7783 /* We do have overlap. Now see if the field is large enough
7784 to cover the access. Give up for accesses that extend
7785 beyond the end of the object or that span multiple fields. */
807e902e 7786 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 7787 return NULL_TREE;
032c80e9 7788 if (offset < bitoffset)
b8b2b009 7789 return NULL_TREE;
35b4d3a6
MS
7790
7791 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8 7792 return fold_ctor_reference (type, cval,
27bcd47c 7793 inner_offset.to_uhwi (), size,
35b4d3a6 7794 from_decl, suboff);
cfef45c8
RG
7795 }
7796 }
14b7950f
MS
7797
7798 if (!type)
7799 return NULL_TREE;
7800
7801 return build_zero_cst (type);
cfef45c8
RG
7802}
7803
35b4d3a6 7804/* CTOR is value initializing memory. Fold a reference of TYPE and
14b7950f 7805 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
35b4d3a6
MS
7806 is zero, attempt to fold a reference to the entire subobject
7807 which OFFSET refers to. This is used when folding accesses to
7808 string members of aggregates. When non-null, set *SUBOFF to
7809 the bit offset of the accessed subobject. */
cfef45c8 7810
8403c2cf 7811tree
35b4d3a6
MS
7812fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7813 const poly_uint64 &poly_size, tree from_decl,
7814 unsigned HOST_WIDE_INT *suboff /* = NULL */)
cfef45c8
RG
7815{
7816 tree ret;
7817
7818 /* We found the field with exact match. */
35b4d3a6
MS
7819 if (type
7820 && useless_type_conversion_p (type, TREE_TYPE (ctor))
30acf282 7821 && known_eq (poly_offset, 0U))
9d60be38 7822 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 7823
30acf282
RS
7824 /* The remaining optimizations need a constant size and offset. */
7825 unsigned HOST_WIDE_INT size, offset;
7826 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
7827 return NULL_TREE;
7828
cfef45c8
RG
7829 /* We are at the end of walk, see if we can view convert the
7830 result. */
7831 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7832 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
7833 && !compare_tree_int (TYPE_SIZE (type), size)
7834 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 7835 {
9d60be38 7836 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8 7837 if (ret)
672d9f8e
RB
7838 {
7839 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7840 if (ret)
7841 STRIP_USELESS_TYPE_CONVERSION (ret);
7842 }
cfef45c8
RG
7843 return ret;
7844 }
b2505143
RB
7845 /* For constants and byte-aligned/sized reads try to go through
7846 native_encode/interpret. */
7847 if (CONSTANT_CLASS_P (ctor)
7848 && BITS_PER_UNIT == 8
7849 && offset % BITS_PER_UNIT == 0
ea69031c 7850 && offset / BITS_PER_UNIT <= INT_MAX
b2505143 7851 && size % BITS_PER_UNIT == 0
ea69031c
JJ
7852 && size <= MAX_BITSIZE_MODE_ANY_MODE
7853 && can_native_interpret_type_p (type))
b2505143
RB
7854 {
7855 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1ff0a84c
JJ
7856 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7857 offset / BITS_PER_UNIT);
7858 if (len > 0)
7859 return native_interpret_expr (type, buf, len);
b2505143 7860 }
cfef45c8
RG
7861 if (TREE_CODE (ctor) == CONSTRUCTOR)
7862 {
35b4d3a6
MS
7863 unsigned HOST_WIDE_INT dummy = 0;
7864 if (!suboff)
7865 suboff = &dummy;
cfef45c8 7866
ea69031c 7867 tree ret;
eb8f1123
RG
7868 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7869 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
ea69031c
JJ
7870 ret = fold_array_ctor_reference (type, ctor, offset, size,
7871 from_decl, suboff);
7872 else
7873 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7874 from_decl, suboff);
7875
7876 /* Fall back to native_encode_initializer. Needs to be done
7877 only in the outermost fold_ctor_reference call (because it itself
7878 recurses into CONSTRUCTORs) and doesn't update suboff. */
7879 if (ret == NULL_TREE
7880 && suboff == &dummy
7881 && BITS_PER_UNIT == 8
7882 && offset % BITS_PER_UNIT == 0
7883 && offset / BITS_PER_UNIT <= INT_MAX
7884 && size % BITS_PER_UNIT == 0
7885 && size <= MAX_BITSIZE_MODE_ANY_MODE
7886 && can_native_interpret_type_p (type))
7887 {
7888 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7889 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7890 offset / BITS_PER_UNIT);
7891 if (len > 0)
7892 return native_interpret_expr (type, buf, len);
7893 }
35b4d3a6 7894
ea69031c 7895 return ret;
cfef45c8
RG
7896 }
7897
7898 return NULL_TREE;
7899}
7900
7901/* Return the tree representing the element referenced by T if T is an
7902 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7903 names using VALUEIZE. Return NULL_TREE otherwise. */
7904
7905tree
7906fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7907{
7908 tree ctor, idx, base;
588db50c 7909 poly_int64 offset, size, max_size;
cfef45c8 7910 tree tem;
ee45a32d 7911 bool reverse;
cfef45c8 7912
f8a7df45
RG
7913 if (TREE_THIS_VOLATILE (t))
7914 return NULL_TREE;
7915
3a65ee74 7916 if (DECL_P (t))
cfef45c8
RG
7917 return get_symbol_constant_value (t);
7918
7919 tem = fold_read_from_constant_string (t);
7920 if (tem)
7921 return tem;
7922
7923 switch (TREE_CODE (t))
7924 {
7925 case ARRAY_REF:
7926 case ARRAY_RANGE_REF:
7927 /* Constant indexes are handled well by get_base_constructor.
7928 Only special case variable offsets.
7929 FIXME: This code can't handle nested references with variable indexes
7930 (they will be handled only by iteration of ccp). Perhaps we can bring
7931 get_ref_base_and_extent here and make it use a valueize callback. */
7932 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7933 && valueize
7934 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
588db50c 7935 && poly_int_tree_p (idx))
cfef45c8
RG
7936 {
7937 tree low_bound, unit_size;
7938
7939 /* If the resulting bit-offset is constant, track it. */
7940 if ((low_bound = array_ref_low_bound (t),
588db50c 7941 poly_int_tree_p (low_bound))
cfef45c8 7942 && (unit_size = array_ref_element_size (t),
807e902e 7943 tree_fits_uhwi_p (unit_size)))
cfef45c8 7944 {
588db50c
RS
7945 poly_offset_int woffset
7946 = wi::sext (wi::to_poly_offset (idx)
7947 - wi::to_poly_offset (low_bound),
807e902e 7948 TYPE_PRECISION (TREE_TYPE (idx)));
a9e6359a
RB
7949 woffset *= tree_to_uhwi (unit_size);
7950 woffset *= BITS_PER_UNIT;
588db50c 7951 if (woffset.to_shwi (&offset))
807e902e 7952 {
807e902e
KZ
7953 base = TREE_OPERAND (t, 0);
7954 ctor = get_base_constructor (base, &offset, valueize);
7955 /* Empty constructor. Always fold to 0. */
7956 if (ctor == error_mark_node)
7957 return build_zero_cst (TREE_TYPE (t));
7958 /* Out of bound array access. Value is undefined,
7959 but don't fold. */
588db50c 7960 if (maybe_lt (offset, 0))
807e902e 7961 return NULL_TREE;
67914693 7962 /* We cannot determine ctor. */
807e902e
KZ
7963 if (!ctor)
7964 return NULL_TREE;
7965 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7966 tree_to_uhwi (unit_size)
7967 * BITS_PER_UNIT,
7968 base);
7969 }
cfef45c8
RG
7970 }
7971 }
7972 /* Fallthru. */
7973
7974 case COMPONENT_REF:
7975 case BIT_FIELD_REF:
7976 case TARGET_MEM_REF:
7977 case MEM_REF:
ee45a32d 7978 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
cfef45c8
RG
7979 ctor = get_base_constructor (base, &offset, valueize);
7980
7981 /* Empty constructor. Always fold to 0. */
7982 if (ctor == error_mark_node)
7983 return build_zero_cst (TREE_TYPE (t));
7984 /* We do not know precise address. */
588db50c 7985 if (!known_size_p (max_size) || maybe_ne (max_size, size))
cfef45c8 7986 return NULL_TREE;
67914693 7987 /* We cannot determine ctor. */
cfef45c8
RG
7988 if (!ctor)
7989 return NULL_TREE;
7990
7991 /* Out of bound array access. Value is undefined, but don't fold. */
588db50c 7992 if (maybe_lt (offset, 0))
cfef45c8
RG
7993 return NULL_TREE;
7994
e4f1cbc3
JJ
7995 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
7996 if (tem)
7997 return tem;
7998
7999 /* For bit field reads try to read the representative and
8000 adjust. */
8001 if (TREE_CODE (t) == COMPONENT_REF
8002 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8003 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8004 {
8005 HOST_WIDE_INT csize, coffset;
8006 tree field = TREE_OPERAND (t, 1);
8007 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8008 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8009 && size.is_constant (&csize)
8010 && offset.is_constant (&coffset)
8011 && (coffset % BITS_PER_UNIT != 0
8012 || csize % BITS_PER_UNIT != 0)
8013 && !reverse
8014 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8015 {
8016 poly_int64 bitoffset;
8017 poly_uint64 field_offset, repr_offset;
8018 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8019 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8020 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8021 else
8022 bitoffset = 0;
8023 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8024 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8025 HOST_WIDE_INT bitoff;
8026 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8027 - TYPE_PRECISION (TREE_TYPE (field)));
8028 if (bitoffset.is_constant (&bitoff)
8029 && bitoff >= 0
8030 && bitoff <= diff)
8031 {
8032 offset -= bitoff;
8033 size = tree_to_uhwi (DECL_SIZE (repr));
8034
8035 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8036 size, base);
8037 if (tem && TREE_CODE (tem) == INTEGER_CST)
8038 {
8039 if (!BYTES_BIG_ENDIAN)
8040 tem = wide_int_to_tree (TREE_TYPE (field),
8041 wi::lrshift (wi::to_wide (tem),
8042 bitoff));
8043 else
8044 tem = wide_int_to_tree (TREE_TYPE (field),
8045 wi::lrshift (wi::to_wide (tem),
8046 diff - bitoff));
8047 return tem;
8048 }
8049 }
8050 }
8051 }
8052 break;
cfef45c8
RG
8053
8054 case REALPART_EXPR:
8055 case IMAGPART_EXPR:
8056 {
8057 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8058 if (c && TREE_CODE (c) == COMPLEX_CST)
8059 return fold_build1_loc (EXPR_LOCATION (t),
ea69031c 8060 TREE_CODE (t), TREE_TYPE (t), c);
cfef45c8
RG
8061 break;
8062 }
8063
8064 default:
8065 break;
8066 }
8067
8068 return NULL_TREE;
8069}
8070
8071tree
8072fold_const_aggregate_ref (tree t)
8073{
8074 return fold_const_aggregate_ref_1 (t, NULL);
8075}
06bc3ec7 8076
85942f45 8077/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
8078 at OFFSET.
8079 Set CAN_REFER if non-NULL to false if method
8080 is not referable or if the virtual table is ill-formed (such as rewriten
8081 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
8082
8083tree
85942f45
JH
8084gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8085 tree v,
ec77d61f
JH
8086 unsigned HOST_WIDE_INT offset,
8087 bool *can_refer)
81fa35bd 8088{
85942f45
JH
8089 tree vtable = v, init, fn;
8090 unsigned HOST_WIDE_INT size;
8c311b50
JH
8091 unsigned HOST_WIDE_INT elt_size, access_index;
8092 tree domain_type;
81fa35bd 8093
ec77d61f
JH
8094 if (can_refer)
8095 *can_refer = true;
8096
9de2f554 8097 /* First of all double check we have virtual table. */
8813a647 8098 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
ec77d61f 8099 {
ec77d61f
JH
8100 /* Pass down that we lost track of the target. */
8101 if (can_refer)
8102 *can_refer = false;
8103 return NULL_TREE;
8104 }
9de2f554 8105
2aa3da06
JH
8106 init = ctor_for_folding (v);
8107
9de2f554 8108 /* The virtual tables should always be born with constructors
2aa3da06
JH
8109 and we always should assume that they are avaialble for
8110 folding. At the moment we do not stream them in all cases,
8111 but it should never happen that ctor seem unreachable. */
8112 gcc_assert (init);
8113 if (init == error_mark_node)
8114 {
ec77d61f
JH
8115 /* Pass down that we lost track of the target. */
8116 if (can_refer)
8117 *can_refer = false;
2aa3da06
JH
8118 return NULL_TREE;
8119 }
81fa35bd 8120 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 8121 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 8122 offset *= BITS_PER_UNIT;
81fa35bd 8123 offset += token * size;
9de2f554 8124
8c311b50
JH
8125 /* Lookup the value in the constructor that is assumed to be array.
8126 This is equivalent to
8127 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8128 offset, size, NULL);
8129 but in a constant time. We expect that frontend produced a simple
8130 array without indexed initializers. */
8131
8132 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8133 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8134 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8135 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8136
8137 access_index = offset / BITS_PER_UNIT / elt_size;
8138 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8139
bf8d8309
MP
8140 /* The C++ FE can now produce indexed fields, and we check if the indexes
8141 match. */
8c311b50
JH
8142 if (access_index < CONSTRUCTOR_NELTS (init))
8143 {
8144 fn = CONSTRUCTOR_ELT (init, access_index)->value;
bf8d8309
MP
8145 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8146 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8c311b50
JH
8147 STRIP_NOPS (fn);
8148 }
8149 else
8150 fn = NULL;
9de2f554
JH
8151
8152 /* For type inconsistent program we may end up looking up virtual method
8153 in virtual table that does not contain TOKEN entries. We may overrun
8154 the virtual table and pick up a constant or RTTI info pointer.
8155 In any case the call is undefined. */
8156 if (!fn
8157 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8158 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8159 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8160 else
8161 {
8162 fn = TREE_OPERAND (fn, 0);
8163
8164 /* When cgraph node is missing and function is not public, we cannot
8165 devirtualize. This can happen in WHOPR when the actual method
8166 ends up in other partition, because we found devirtualization
8167 possibility too late. */
8168 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
8169 {
8170 if (can_refer)
8171 {
8172 *can_refer = false;
8173 return fn;
8174 }
8175 return NULL_TREE;
8176 }
9de2f554 8177 }
81fa35bd 8178
7501ca28
RG
8179 /* Make sure we create a cgraph node for functions we'll reference.
8180 They can be non-existent if the reference comes from an entry
8181 of an external vtable for example. */
d52f5295 8182 cgraph_node::get_create (fn);
7501ca28 8183
81fa35bd
MJ
8184 return fn;
8185}
8186
85942f45
JH
8187/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8188 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8189 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
8190 OBJ_TYPE_REF_OBJECT(REF).
8191 Set CAN_REFER if non-NULL to false if method
8192 is not referable or if the virtual table is ill-formed (such as rewriten
8193 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
8194
8195tree
ec77d61f
JH
8196gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8197 bool *can_refer)
85942f45
JH
8198{
8199 unsigned HOST_WIDE_INT offset;
8200 tree v;
8201
8202 v = BINFO_VTABLE (known_binfo);
8203 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8204 if (!v)
8205 return NULL_TREE;
8206
8207 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
8208 {
8209 if (can_refer)
8210 *can_refer = false;
8211 return NULL_TREE;
8212 }
8213 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
8214}
8215
737f500a
RB
8216/* Given a pointer value T, return a simplified version of an
8217 indirection through T, or NULL_TREE if no simplification is
b184c8f1
AM
8218 possible. Note that the resulting type may be different from
8219 the type pointed to in the sense that it is still compatible
8220 from the langhooks point of view. */
8221
8222tree
8223gimple_fold_indirect_ref (tree t)
8224{
8225 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8226 tree sub = t;
8227 tree subtype;
8228
8229 STRIP_NOPS (sub);
8230 subtype = TREE_TYPE (sub);
737f500a
RB
8231 if (!POINTER_TYPE_P (subtype)
8232 || TYPE_REF_CAN_ALIAS_ALL (ptype))
b184c8f1
AM
8233 return NULL_TREE;
8234
8235 if (TREE_CODE (sub) == ADDR_EXPR)
8236 {
8237 tree op = TREE_OPERAND (sub, 0);
8238 tree optype = TREE_TYPE (op);
8239 /* *&p => p */
8240 if (useless_type_conversion_p (type, optype))
8241 return op;
8242
8243 /* *(foo *)&fooarray => fooarray[0] */
8244 if (TREE_CODE (optype) == ARRAY_TYPE
8245 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8246 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8247 {
8248 tree type_domain = TYPE_DOMAIN (optype);
8249 tree min_val = size_zero_node;
8250 if (type_domain && TYPE_MIN_VALUE (type_domain))
8251 min_val = TYPE_MIN_VALUE (type_domain);
8252 if (TREE_CODE (min_val) == INTEGER_CST)
8253 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8254 }
8255 /* *(foo *)&complexfoo => __real__ complexfoo */
8256 else if (TREE_CODE (optype) == COMPLEX_TYPE
8257 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8258 return fold_build1 (REALPART_EXPR, type, op);
8259 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8260 else if (TREE_CODE (optype) == VECTOR_TYPE
8261 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8262 {
8263 tree part_width = TYPE_SIZE (type);
8264 tree index = bitsize_int (0);
8265 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8266 }
8267 }
8268
8269 /* *(p + CST) -> ... */
8270 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8271 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8272 {
8273 tree addr = TREE_OPERAND (sub, 0);
8274 tree off = TREE_OPERAND (sub, 1);
8275 tree addrtype;
8276
8277 STRIP_NOPS (addr);
8278 addrtype = TREE_TYPE (addr);
8279
8280 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8281 if (TREE_CODE (addr) == ADDR_EXPR
8282 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8283 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 8284 && tree_fits_uhwi_p (off))
b184c8f1 8285 {
ae7e9ddd 8286 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
8287 tree part_width = TYPE_SIZE (type);
8288 unsigned HOST_WIDE_INT part_widthi
9439e9a1 8289 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
8290 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8291 tree index = bitsize_int (indexi);
928686b1
RS
8292 if (known_lt (offset / part_widthi,
8293 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
b184c8f1
AM
8294 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8295 part_width, index);
8296 }
8297
8298 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8299 if (TREE_CODE (addr) == ADDR_EXPR
8300 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8301 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8302 {
8303 tree size = TYPE_SIZE_UNIT (type);
8304 if (tree_int_cst_equal (size, off))
8305 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8306 }
8307
8308 /* *(p + CST) -> MEM_REF <p, CST>. */
8309 if (TREE_CODE (addr) != ADDR_EXPR
8310 || DECL_P (TREE_OPERAND (addr, 0)))
8311 return fold_build2 (MEM_REF, type,
8312 addr,
8e6cdc90 8313 wide_int_to_tree (ptype, wi::to_wide (off)));
b184c8f1
AM
8314 }
8315
8316 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8317 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8318 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8319 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8320 {
8321 tree type_domain;
8322 tree min_val = size_zero_node;
8323 tree osub = sub;
8324 sub = gimple_fold_indirect_ref (sub);
8325 if (! sub)
8326 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8327 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8328 if (type_domain && TYPE_MIN_VALUE (type_domain))
8329 min_val = TYPE_MIN_VALUE (type_domain);
8330 if (TREE_CODE (min_val) == INTEGER_CST)
8331 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8332 }
8333
8334 return NULL_TREE;
8335}
19e51b40
JJ
8336
8337/* Return true if CODE is an operation that when operating on signed
8338 integer types involves undefined behavior on overflow and the
8339 operation can be expressed with unsigned arithmetic. */
8340
8341bool
8342arith_code_with_undefined_signed_overflow (tree_code code)
8343{
8344 switch (code)
8345 {
8e2c037d 8346 case ABS_EXPR:
19e51b40
JJ
8347 case PLUS_EXPR:
8348 case MINUS_EXPR:
8349 case MULT_EXPR:
8350 case NEGATE_EXPR:
8351 case POINTER_PLUS_EXPR:
8352 return true;
8353 default:
8354 return false;
8355 }
8356}
8357
8358/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8359 operation that can be transformed to unsigned arithmetic by converting
8360 its operand, carrying out the operation in the corresponding unsigned
8361 type and converting the result back to the original type.
8362
8363 Returns a sequence of statements that replace STMT and also contain
8364 a modified form of STMT itself. */
8365
8366gimple_seq
355fe088 8367rewrite_to_defined_overflow (gimple *stmt)
19e51b40
JJ
8368{
8369 if (dump_file && (dump_flags & TDF_DETAILS))
8370 {
8371 fprintf (dump_file, "rewriting stmt with undefined signed "
8372 "overflow ");
8373 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8374 }
8375
8376 tree lhs = gimple_assign_lhs (stmt);
8377 tree type = unsigned_type_for (TREE_TYPE (lhs));
8378 gimple_seq stmts = NULL;
8e2c037d
RB
8379 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8380 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8381 else
8382 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8383 {
8384 tree op = gimple_op (stmt, i);
8385 op = gimple_convert (&stmts, type, op);
8386 gimple_set_op (stmt, i, op);
8387 }
19e51b40
JJ
8388 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8389 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8390 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
f74c4b2c 8391 gimple_set_modified (stmt, true);
19e51b40 8392 gimple_seq_add_stmt (&stmts, stmt);
355fe088 8393 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
8394 gimple_seq_add_stmt (&stmts, cvt);
8395
8396 return stmts;
8397}
d4f5cd5e 8398
3d2cf79f 8399
c26de36d
RB
8400/* The valueization hook we use for the gimple_build API simplification.
8401 This makes us match fold_buildN behavior by only combining with
8402 statements in the sequence(s) we are currently building. */
8403
8404static tree
8405gimple_build_valueize (tree op)
8406{
8407 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8408 return op;
8409 return NULL_TREE;
8410}
8411
3d2cf79f 8412/* Build the expression CODE OP0 of type TYPE with location LOC,
c26de36d 8413 simplifying it first if possible. Returns the built
3d2cf79f
RB
8414 expression value and appends statements possibly defining it
8415 to SEQ. */
8416
8417tree
8418gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8419 enum tree_code code, tree type, tree op0)
3d2cf79f 8420{
c26de36d 8421 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
3d2cf79f
RB
8422 if (!res)
8423 {
a15ebbcd 8424 res = create_tmp_reg_or_ssa_name (type);
355fe088 8425 gimple *stmt;
3d2cf79f
RB
8426 if (code == REALPART_EXPR
8427 || code == IMAGPART_EXPR
8428 || code == VIEW_CONVERT_EXPR)
0d0e4a03 8429 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 8430 else
0d0e4a03 8431 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
8432 gimple_set_location (stmt, loc);
8433 gimple_seq_add_stmt_without_update (seq, stmt);
8434 }
8435 return res;
8436}
8437
8438/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
c26de36d 8439 simplifying it first if possible. Returns the built
3d2cf79f
RB
8440 expression value and appends statements possibly defining it
8441 to SEQ. */
8442
8443tree
8444gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8445 enum tree_code code, tree type, tree op0, tree op1)
3d2cf79f 8446{
c26de36d 8447 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
3d2cf79f
RB
8448 if (!res)
8449 {
a15ebbcd 8450 res = create_tmp_reg_or_ssa_name (type);
355fe088 8451 gimple *stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
8452 gimple_set_location (stmt, loc);
8453 gimple_seq_add_stmt_without_update (seq, stmt);
8454 }
8455 return res;
8456}
8457
8458/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
c26de36d 8459 simplifying it first if possible. Returns the built
3d2cf79f
RB
8460 expression value and appends statements possibly defining it
8461 to SEQ. */
8462
8463tree
8464gimple_build (gimple_seq *seq, location_t loc,
c26de36d 8465 enum tree_code code, tree type, tree op0, tree op1, tree op2)
3d2cf79f
RB
8466{
8467 tree res = gimple_simplify (code, type, op0, op1, op2,
c26de36d 8468 seq, gimple_build_valueize);
3d2cf79f
RB
8469 if (!res)
8470 {
a15ebbcd 8471 res = create_tmp_reg_or_ssa_name (type);
355fe088 8472 gimple *stmt;
3d2cf79f 8473 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
8474 stmt = gimple_build_assign (res, code,
8475 build3 (code, type, op0, op1, op2));
3d2cf79f 8476 else
0d0e4a03 8477 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
8478 gimple_set_location (stmt, loc);
8479 gimple_seq_add_stmt_without_update (seq, stmt);
8480 }
8481 return res;
8482}
8483
93a73251
MM
8484/* Build the call FN () with a result of type TYPE (or no result if TYPE is
8485 void) with a location LOC. Returns the built expression value (or NULL_TREE
8486 if TYPE is void) and appends statements possibly defining it to SEQ. */
8487
8488tree
8489gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8490{
8491 tree res = NULL_TREE;
8492 gcall *stmt;
8493 if (internal_fn_p (fn))
8494 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8495 else
8496 {
8497 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8498 stmt = gimple_build_call (decl, 0);
8499 }
8500 if (!VOID_TYPE_P (type))
8501 {
8502 res = create_tmp_reg_or_ssa_name (type);
8503 gimple_call_set_lhs (stmt, res);
8504 }
8505 gimple_set_location (stmt, loc);
8506 gimple_seq_add_stmt_without_update (seq, stmt);
8507 return res;
8508}
8509
3d2cf79f
RB
8510/* Build the call FN (ARG0) with a result of type TYPE
8511 (or no result if TYPE is void) with location LOC,
c26de36d 8512 simplifying it first if possible. Returns the built
3d2cf79f
RB
8513 expression value (or NULL_TREE if TYPE is void) and appends
8514 statements possibly defining it to SEQ. */
8515
8516tree
eb69361d
RS
8517gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8518 tree type, tree arg0)
3d2cf79f 8519{
c26de36d 8520 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
3d2cf79f
RB
8521 if (!res)
8522 {
eb69361d
RS
8523 gcall *stmt;
8524 if (internal_fn_p (fn))
8525 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8526 else
8527 {
8528 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8529 stmt = gimple_build_call (decl, 1, arg0);
8530 }
3d2cf79f
RB
8531 if (!VOID_TYPE_P (type))
8532 {
a15ebbcd 8533 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8534 gimple_call_set_lhs (stmt, res);
8535 }
8536 gimple_set_location (stmt, loc);
8537 gimple_seq_add_stmt_without_update (seq, stmt);
8538 }
8539 return res;
8540}
8541
8542/* Build the call FN (ARG0, ARG1) with a result of type TYPE
8543 (or no result if TYPE is void) with location LOC,
c26de36d 8544 simplifying it first if possible. Returns the built
3d2cf79f
RB
8545 expression value (or NULL_TREE if TYPE is void) and appends
8546 statements possibly defining it to SEQ. */
8547
8548tree
eb69361d
RS
8549gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8550 tree type, tree arg0, tree arg1)
3d2cf79f 8551{
c26de36d 8552 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
3d2cf79f
RB
8553 if (!res)
8554 {
eb69361d
RS
8555 gcall *stmt;
8556 if (internal_fn_p (fn))
8557 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8558 else
8559 {
8560 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8561 stmt = gimple_build_call (decl, 2, arg0, arg1);
8562 }
3d2cf79f
RB
8563 if (!VOID_TYPE_P (type))
8564 {
a15ebbcd 8565 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8566 gimple_call_set_lhs (stmt, res);
8567 }
8568 gimple_set_location (stmt, loc);
8569 gimple_seq_add_stmt_without_update (seq, stmt);
8570 }
8571 return res;
8572}
8573
8574/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8575 (or no result if TYPE is void) with location LOC,
c26de36d 8576 simplifying it first if possible. Returns the built
3d2cf79f
RB
8577 expression value (or NULL_TREE if TYPE is void) and appends
8578 statements possibly defining it to SEQ. */
8579
8580tree
eb69361d
RS
8581gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8582 tree type, tree arg0, tree arg1, tree arg2)
3d2cf79f 8583{
c26de36d
RB
8584 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8585 seq, gimple_build_valueize);
3d2cf79f
RB
8586 if (!res)
8587 {
eb69361d
RS
8588 gcall *stmt;
8589 if (internal_fn_p (fn))
8590 stmt = gimple_build_call_internal (as_internal_fn (fn),
8591 3, arg0, arg1, arg2);
8592 else
8593 {
8594 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8595 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8596 }
3d2cf79f
RB
8597 if (!VOID_TYPE_P (type))
8598 {
a15ebbcd 8599 res = create_tmp_reg_or_ssa_name (type);
3d2cf79f
RB
8600 gimple_call_set_lhs (stmt, res);
8601 }
8602 gimple_set_location (stmt, loc);
8603 gimple_seq_add_stmt_without_update (seq, stmt);
8604 }
8605 return res;
8606}
8607
8608/* Build the conversion (TYPE) OP with a result of type TYPE
8609 with location LOC if such conversion is neccesary in GIMPLE,
8610 simplifying it first.
8611 Returns the built expression value and appends
8612 statements possibly defining it to SEQ. */
d4f5cd5e
RB
8613
8614tree
8615gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8616{
8617 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8618 return op;
3d2cf79f 8619 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 8620}
68e57f04 8621
74e3c262
RB
8622/* Build the conversion (ptrofftype) OP with a result of a type
8623 compatible with ptrofftype with location LOC if such conversion
8624 is neccesary in GIMPLE, simplifying it first.
8625 Returns the built expression value and appends
8626 statements possibly defining it to SEQ. */
8627
8628tree
8629gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8630{
8631 if (ptrofftype_p (TREE_TYPE (op)))
8632 return op;
8633 return gimple_convert (seq, loc, sizetype, op);
8634}
8635
e7c45b66
RS
8636/* Build a vector of type TYPE in which each element has the value OP.
8637 Return a gimple value for the result, appending any new statements
8638 to SEQ. */
8639
8640tree
8641gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8642 tree op)
8643{
928686b1
RS
8644 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8645 && !CONSTANT_CLASS_P (op))
8646 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8647
e7c45b66
RS
8648 tree res, vec = build_vector_from_val (type, op);
8649 if (is_gimple_val (vec))
8650 return vec;
8651 if (gimple_in_ssa_p (cfun))
8652 res = make_ssa_name (type);
8653 else
8654 res = create_tmp_reg (type);
8655 gimple *stmt = gimple_build_assign (res, vec);
8656 gimple_set_location (stmt, loc);
8657 gimple_seq_add_stmt_without_update (seq, stmt);
8658 return res;
8659}
8660
abe73c3d
RS
8661/* Build a vector from BUILDER, handling the case in which some elements
8662 are non-constant. Return a gimple value for the result, appending any
8663 new instructions to SEQ.
8664
8665 BUILDER must not have a stepped encoding on entry. This is because
8666 the function is not geared up to handle the arithmetic that would
8667 be needed in the variable case, and any code building a vector that
8668 is known to be constant should use BUILDER->build () directly. */
e7c45b66
RS
8669
8670tree
abe73c3d
RS
8671gimple_build_vector (gimple_seq *seq, location_t loc,
8672 tree_vector_builder *builder)
e7c45b66 8673{
abe73c3d
RS
8674 gcc_assert (builder->nelts_per_pattern () <= 2);
8675 unsigned int encoded_nelts = builder->encoded_nelts ();
8676 for (unsigned int i = 0; i < encoded_nelts; ++i)
8ebedfcd 8677 if (!CONSTANT_CLASS_P ((*builder)[i]))
e7c45b66 8678 {
abe73c3d 8679 tree type = builder->type ();
928686b1 8680 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
e7c45b66
RS
8681 vec<constructor_elt, va_gc> *v;
8682 vec_alloc (v, nelts);
8683 for (i = 0; i < nelts; ++i)
abe73c3d 8684 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
e7c45b66
RS
8685
8686 tree res;
8687 if (gimple_in_ssa_p (cfun))
8688 res = make_ssa_name (type);
8689 else
8690 res = create_tmp_reg (type);
8691 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8692 gimple_set_location (stmt, loc);
8693 gimple_seq_add_stmt_without_update (seq, stmt);
8694 return res;
8695 }
abe73c3d 8696 return builder->build ();
e7c45b66
RS
8697}
8698
93a73251
MM
8699/* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8700 and generate a value guaranteed to be rounded upwards to ALIGN.
8701
8702 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8703
8704tree
8705gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8706 tree old_size, unsigned HOST_WIDE_INT align)
8707{
8708 unsigned HOST_WIDE_INT tg_mask = align - 1;
8709 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8710 gcc_assert (INTEGRAL_TYPE_P (type));
8711 tree tree_mask = build_int_cst (type, tg_mask);
8712 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8713 tree_mask);
8714
8715 tree mask = build_int_cst (type, -align);
8716 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8717}
8718
68e57f04
RS
8719/* Return true if the result of assignment STMT is known to be non-negative.
8720 If the return value is based on the assumption that signed overflow is
8721 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8722 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8723
8724static bool
8725gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8726 int depth)
8727{
8728 enum tree_code code = gimple_assign_rhs_code (stmt);
8729 switch (get_gimple_rhs_class (code))
8730 {
8731 case GIMPLE_UNARY_RHS:
8732 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8733 gimple_expr_type (stmt),
8734 gimple_assign_rhs1 (stmt),
8735 strict_overflow_p, depth);
8736 case GIMPLE_BINARY_RHS:
8737 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8738 gimple_expr_type (stmt),
8739 gimple_assign_rhs1 (stmt),
8740 gimple_assign_rhs2 (stmt),
8741 strict_overflow_p, depth);
8742 case GIMPLE_TERNARY_RHS:
8743 return false;
8744 case GIMPLE_SINGLE_RHS:
8745 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8746 strict_overflow_p, depth);
8747 case GIMPLE_INVALID_RHS:
8748 break;
8749 }
8750 gcc_unreachable ();
8751}
8752
8753/* Return true if return value of call STMT is known to be non-negative.
8754 If the return value is based on the assumption that signed overflow is
8755 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8756 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8757
8758static bool
8759gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8760 int depth)
8761{
8762 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8763 gimple_call_arg (stmt, 0) : NULL_TREE;
8764 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8765 gimple_call_arg (stmt, 1) : NULL_TREE;
8766
8767 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
1d9da71f 8768 gimple_call_combined_fn (stmt),
68e57f04
RS
8769 arg0,
8770 arg1,
8771 strict_overflow_p, depth);
8772}
8773
4534c203
RB
8774/* Return true if return value of call STMT is known to be non-negative.
8775 If the return value is based on the assumption that signed overflow is
8776 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8777 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8778
8779static bool
8780gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8781 int depth)
8782{
8783 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8784 {
8785 tree arg = gimple_phi_arg_def (stmt, i);
8786 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8787 return false;
8788 }
8789 return true;
8790}
8791
68e57f04
RS
8792/* Return true if STMT is known to compute a non-negative value.
8793 If the return value is based on the assumption that signed overflow is
8794 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8795 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8796
8797bool
8798gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8799 int depth)
8800{
8801 switch (gimple_code (stmt))
8802 {
8803 case GIMPLE_ASSIGN:
8804 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8805 depth);
8806 case GIMPLE_CALL:
8807 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8808 depth);
4534c203
RB
8809 case GIMPLE_PHI:
8810 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8811 depth);
68e57f04
RS
8812 default:
8813 return false;
8814 }
8815}
67dbe582
RS
8816
8817/* Return true if the floating-point value computed by assignment STMT
8818 is known to have an integer value. We also allow +Inf, -Inf and NaN
5a00b0aa 8819 to be considered integer values. Return false for signaling NaN.
67dbe582
RS
8820
8821 DEPTH is the current nesting depth of the query. */
8822
8823static bool
8824gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
8825{
8826 enum tree_code code = gimple_assign_rhs_code (stmt);
8827 switch (get_gimple_rhs_class (code))
8828 {
8829 case GIMPLE_UNARY_RHS:
8830 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
8831 gimple_assign_rhs1 (stmt), depth);
8832 case GIMPLE_BINARY_RHS:
8833 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
8834 gimple_assign_rhs1 (stmt),
8835 gimple_assign_rhs2 (stmt), depth);
8836 case GIMPLE_TERNARY_RHS:
8837 return false;
8838 case GIMPLE_SINGLE_RHS:
8839 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
8840 case GIMPLE_INVALID_RHS:
8841 break;
8842 }
8843 gcc_unreachable ();
8844}
8845
8846/* Return true if the floating-point value computed by call STMT is known
8847 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 8848 considered integer values. Return false for signaling NaN.
67dbe582
RS
8849
8850 DEPTH is the current nesting depth of the query. */
8851
8852static bool
8853gimple_call_integer_valued_real_p (gimple *stmt, int depth)
8854{
8855 tree arg0 = (gimple_call_num_args (stmt) > 0
8856 ? gimple_call_arg (stmt, 0)
8857 : NULL_TREE);
8858 tree arg1 = (gimple_call_num_args (stmt) > 1
8859 ? gimple_call_arg (stmt, 1)
8860 : NULL_TREE);
1d9da71f 8861 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
67dbe582
RS
8862 arg0, arg1, depth);
8863}
8864
8865/* Return true if the floating-point result of phi STMT is known to have
8866 an integer value. We also allow +Inf, -Inf and NaN to be considered
5a00b0aa 8867 integer values. Return false for signaling NaN.
67dbe582
RS
8868
8869 DEPTH is the current nesting depth of the query. */
8870
8871static bool
8872gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
8873{
8874 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8875 {
8876 tree arg = gimple_phi_arg_def (stmt, i);
8877 if (!integer_valued_real_single_p (arg, depth + 1))
8878 return false;
8879 }
8880 return true;
8881}
8882
8883/* Return true if the floating-point value computed by STMT is known
8884 to have an integer value. We also allow +Inf, -Inf and NaN to be
5a00b0aa 8885 considered integer values. Return false for signaling NaN.
67dbe582
RS
8886
8887 DEPTH is the current nesting depth of the query. */
8888
8889bool
8890gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
8891{
8892 switch (gimple_code (stmt))
8893 {
8894 case GIMPLE_ASSIGN:
8895 return gimple_assign_integer_valued_real_p (stmt, depth);
8896 case GIMPLE_CALL:
8897 return gimple_call_integer_valued_real_p (stmt, depth);
8898 case GIMPLE_PHI:
8899 return gimple_phi_integer_valued_real_p (stmt, depth);
8900 default:
8901 return false;
8902 }
8903}