]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
Auto-generate maybe_fold_and/or_comparisons from match.pd
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
82 };
83
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
86
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
91
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
108
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
111 {
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
115
116 if (DECL_ABSTRACT_P (decl))
117 return false;
118
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
123
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
127 {
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->global.inlined_to;
139 }
140
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
169
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
179
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->global.inlined_to;
188 }
189
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
193
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
196 {
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
201 }
202
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
206
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
209 {
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
212
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
217 {
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
226 }
227 if (TREE_CODE (cval) == ADDR_EXPR)
228 {
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
231 {
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
235 }
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
238 if (!base)
239 return NULL_TREE;
240
241 if (VAR_OR_FUNCTION_DECL_P (base)
242 && !can_refer_decl_in_current_unit_p (base, from_decl))
243 return NULL_TREE;
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
246 if (VAR_P (base))
247 TREE_ADDRESSABLE (base) = 1;
248 else if (TREE_CODE (base) == FUNCTION_DECL)
249 {
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
253 cgraph_node::get_create (base);
254 }
255 /* Fixup types in global initializers. */
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
258
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
262 }
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
265 {
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
271 }
272 return orig_cval;
273 }
274
275 /* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
277
278 tree
279 get_symbol_constant_value (tree sym)
280 {
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
283 {
284 if (val)
285 {
286 val = canonicalize_constructor_val (unshare_expr (val), sym);
287 if (val && is_gimple_min_invariant (val))
288 return val;
289 else
290 return NULL_TREE;
291 }
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
296 && is_gimple_reg_type (TREE_TYPE (sym)))
297 return build_zero_cst (TREE_TYPE (sym));
298 }
299
300 return NULL_TREE;
301 }
302
303
304
305 /* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
309
310 static tree
311 maybe_fold_reference (tree expr, bool is_lhs)
312 {
313 tree result;
314
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
331
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
336
337 return NULL_TREE;
338 }
339
340
341 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
345
346 static tree
347 fold_gimple_assign (gimple_stmt_iterator *si)
348 {
349 gimple *stmt = gsi_stmt (*si);
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
352
353 tree result = NULL_TREE;
354
355 switch (get_gimple_rhs_class (subcode))
356 {
357 case GIMPLE_SINGLE_RHS:
358 {
359 tree rhs = gimple_assign_rhs1 (stmt);
360
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
363
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
366
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
368 {
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
372 else if (flag_devirtualize && virtual_method_call_p (rhs))
373 {
374 bool final;
375 vec <cgraph_node *>targets
376 = possible_polymorphic_call_targets (rhs, stmt, &final);
377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
378 {
379 if (dump_enabled_p ())
380 {
381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
386 : "NULL");
387 }
388 if (targets.length () == 1)
389 {
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
394 }
395 else
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
398 val = build_int_cst (TREE_TYPE (val), 0);
399 return val;
400 }
401 }
402 }
403
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
405 {
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
418
419 if (result)
420 {
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
426
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
429 }
430 }
431
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
434 {
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
438
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
440 if (! CONSTANT_CLASS_P (val))
441 return NULL_TREE;
442
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
445 }
446
447 else if (DECL_P (rhs))
448 return get_symbol_constant_value (rhs);
449 }
450 break;
451
452 case GIMPLE_UNARY_RHS:
453 break;
454
455 case GIMPLE_BINARY_RHS:
456 break;
457
458 case GIMPLE_TERNARY_RHS:
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
464
465 if (result)
466 {
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
470 }
471 break;
472
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
475 }
476
477 return NULL_TREE;
478 }
479
480
481 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
485
486 static void
487 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
488 {
489 gimple *stmt = gsi_stmt (*si_p);
490
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
493
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
496 gimple *laststore = NULL;
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
499 {
500 gimple *new_stmt = gsi_stmt (i);
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
506 {
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
516 }
517 }
518
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
524 {
525 gimple *new_stmt = gsi_stmt (i);
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
533 }
534
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
539 {
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
543 {
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
546 }
547 }
548
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
551 }
552
553 /* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
562
563 void
564 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
565 {
566 tree lhs;
567 gimple *stmt, *new_stmt;
568 gimple_stmt_iterator i;
569 gimple_seq stmts = NULL;
570
571 stmt = gsi_stmt (*si_p);
572
573 gcc_assert (is_gimple_call (stmt));
574
575 push_gimplify_context (gimple_in_ssa_p (cfun));
576
577 lhs = gimple_call_lhs (stmt);
578 if (lhs == NULL_TREE)
579 {
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
584 {
585 pop_gimplify_context (NULL);
586 if (gimple_in_ssa_p (cfun))
587 {
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
590 }
591 gsi_replace (si_p, gimple_build_nop (), false);
592 return;
593 }
594 }
595 else
596 {
597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
602 }
603
604 pop_gimplify_context (NULL);
605
606 gsi_replace_with_seq_vops (si_p, stmts);
607 }
608
609
610 /* Replace the call at *GSI with the gimple value VAL. */
611
612 void
613 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
614 {
615 gimple *stmt = gsi_stmt (*gsi);
616 tree lhs = gimple_call_lhs (stmt);
617 gimple *repl;
618 if (lhs)
619 {
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
623 }
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
628 {
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
631 }
632 gsi_replace (gsi, repl, false);
633 }
634
635 /* Replace the call at *GSI with the new call REPL and fold that
636 again. */
637
638 static void
639 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
640 {
641 gimple *stmt = gsi_stmt (*gsi);
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
644 gimple_move_vops (repl, stmt);
645 gsi_replace (gsi, repl, false);
646 fold_stmt (gsi);
647 }
648
649 /* Return true if VAR is a VAR_DECL or a component thereof. */
650
651 static bool
652 var_decl_component_p (tree var)
653 {
654 tree inner = var;
655 while (handled_component_p (inner))
656 inner = TREE_OPERAND (inner, 0);
657 return (DECL_P (inner)
658 || (TREE_CODE (inner) == MEM_REF
659 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
660 }
661
662 /* Return TRUE if the SIZE argument, representing the size of an
663 object, is in a range of values of which exactly zero is valid. */
664
665 static bool
666 size_must_be_zero_p (tree size)
667 {
668 if (integer_zerop (size))
669 return true;
670
671 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
672 return false;
673
674 tree type = TREE_TYPE (size);
675 int prec = TYPE_PRECISION (type);
676
677 /* Compute the value of SSIZE_MAX, the largest positive value that
678 can be stored in ssize_t, the signed counterpart of size_t. */
679 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
680 value_range_base valid_range (VR_RANGE,
681 build_int_cst (type, 0),
682 wide_int_to_tree (type, ssize_max));
683 value_range_base vr;
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
687 }
688
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
695
696 static bool
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 tree dest, tree src, enum built_in_function code)
699 {
700 gimple *stmt = gsi_stmt (*gsi);
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 tree destvar, srcvar;
704 location_t loc = gimple_location (stmt);
705
706 /* If the LEN parameter is a constant zero or in range where
707 the only valid value is zero, return DEST. */
708 if (size_must_be_zero_p (len))
709 {
710 gimple *repl;
711 if (gimple_call_lhs (stmt))
712 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 {
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
721 gsi_replace (gsi, repl, false);
722 return true;
723 }
724
725 /* If SRC and DEST are the same (and not volatile), return
726 DEST{,+LEN,+LEN-1}. */
727 if (operand_equal_p (src, dest, 0))
728 {
729 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 It's safe and may even be emitted by GCC itself (see bug
731 32667). */
732 unlink_stmt_vdef (stmt);
733 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 release_ssa_name (gimple_vdef (stmt));
735 if (!lhs)
736 {
737 gsi_replace (gsi, gimple_build_nop (), false);
738 return true;
739 }
740 goto done;
741 }
742 else
743 {
744 tree srctype, desttype;
745 unsigned int src_align, dest_align;
746 tree off0;
747 const char *tmp_str;
748 unsigned HOST_WIDE_INT tmp_len;
749
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
753
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* FIXME: Don't transform copies from strings with known length.
763 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
764 from being handled, and the case was XFAILed for that reason.
765 Now that it is handled and the XFAIL removed, as soon as other
766 strlenopt tests that rely on it for passing are adjusted, this
767 hack can be removed. */
768 && !c_strlen (src, 1)
769 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
770 && memchr (tmp_str, 0, tmp_len) == NULL))
771 {
772 unsigned ilen = tree_to_uhwi (len);
773 if (pow2p_hwi (ilen))
774 {
775 /* Detect out-of-bounds accesses without issuing warnings.
776 Avoid folding out-of-bounds copies but to avoid false
777 positives for unreachable code defer warning until after
778 DCE has worked its magic.
779 -Wrestrict is still diagnosed. */
780 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
781 dest, src, len, len,
782 false, false))
783 if (warning != OPT_Wrestrict)
784 return false;
785
786 scalar_int_mode mode;
787 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
788 if (type
789 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
790 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
791 /* If the destination pointer is not aligned we must be able
792 to emit an unaligned store. */
793 && (dest_align >= GET_MODE_ALIGNMENT (mode)
794 || !targetm.slow_unaligned_access (mode, dest_align)
795 || (optab_handler (movmisalign_optab, mode)
796 != CODE_FOR_nothing)))
797 {
798 tree srctype = type;
799 tree desttype = type;
800 if (src_align < GET_MODE_ALIGNMENT (mode))
801 srctype = build_aligned_type (type, src_align);
802 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
803 tree tem = fold_const_aggregate_ref (srcmem);
804 if (tem)
805 srcmem = tem;
806 else if (src_align < GET_MODE_ALIGNMENT (mode)
807 && targetm.slow_unaligned_access (mode, src_align)
808 && (optab_handler (movmisalign_optab, mode)
809 == CODE_FOR_nothing))
810 srcmem = NULL_TREE;
811 if (srcmem)
812 {
813 gimple *new_stmt;
814 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
815 {
816 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
817 srcmem
818 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
819 new_stmt);
820 gimple_assign_set_lhs (new_stmt, srcmem);
821 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
822 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
823 }
824 if (dest_align < GET_MODE_ALIGNMENT (mode))
825 desttype = build_aligned_type (type, dest_align);
826 new_stmt
827 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
828 dest, off0),
829 srcmem);
830 gimple_move_vops (new_stmt, stmt);
831 if (!lhs)
832 {
833 gsi_replace (gsi, new_stmt, false);
834 return true;
835 }
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 goto done;
838 }
839 }
840 }
841 }
842
843 if (code == BUILT_IN_MEMMOVE)
844 {
845 /* Both DEST and SRC must be pointer types.
846 ??? This is what old code did. Is the testing for pointer types
847 really mandatory?
848
849 If either SRC is readonly or length is 1, we can use memcpy. */
850 if (!dest_align || !src_align)
851 return false;
852 if (readonly_data_expr (src)
853 || (tree_fits_uhwi_p (len)
854 && (MIN (src_align, dest_align) / BITS_PER_UNIT
855 >= tree_to_uhwi (len))))
856 {
857 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
858 if (!fn)
859 return false;
860 gimple_call_set_fndecl (stmt, fn);
861 gimple_call_set_arg (stmt, 0, dest);
862 gimple_call_set_arg (stmt, 1, src);
863 fold_stmt (gsi);
864 return true;
865 }
866
867 /* If *src and *dest can't overlap, optimize into memcpy as well. */
868 if (TREE_CODE (src) == ADDR_EXPR
869 && TREE_CODE (dest) == ADDR_EXPR)
870 {
871 tree src_base, dest_base, fn;
872 poly_int64 src_offset = 0, dest_offset = 0;
873 poly_uint64 maxsize;
874
875 srcvar = TREE_OPERAND (src, 0);
876 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
877 if (src_base == NULL)
878 src_base = srcvar;
879 destvar = TREE_OPERAND (dest, 0);
880 dest_base = get_addr_base_and_unit_offset (destvar,
881 &dest_offset);
882 if (dest_base == NULL)
883 dest_base = destvar;
884 if (!poly_int_tree_p (len, &maxsize))
885 maxsize = -1;
886 if (SSA_VAR_P (src_base)
887 && SSA_VAR_P (dest_base))
888 {
889 if (operand_equal_p (src_base, dest_base, 0)
890 && ranges_maybe_overlap_p (src_offset, maxsize,
891 dest_offset, maxsize))
892 return false;
893 }
894 else if (TREE_CODE (src_base) == MEM_REF
895 && TREE_CODE (dest_base) == MEM_REF)
896 {
897 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
898 TREE_OPERAND (dest_base, 0), 0))
899 return false;
900 poly_offset_int full_src_offset
901 = mem_ref_offset (src_base) + src_offset;
902 poly_offset_int full_dest_offset
903 = mem_ref_offset (dest_base) + dest_offset;
904 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
905 full_dest_offset, maxsize))
906 return false;
907 }
908 else
909 return false;
910
911 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
912 if (!fn)
913 return false;
914 gimple_call_set_fndecl (stmt, fn);
915 gimple_call_set_arg (stmt, 0, dest);
916 gimple_call_set_arg (stmt, 1, src);
917 fold_stmt (gsi);
918 return true;
919 }
920
921 /* If the destination and source do not alias optimize into
922 memcpy as well. */
923 if ((is_gimple_min_invariant (dest)
924 || TREE_CODE (dest) == SSA_NAME)
925 && (is_gimple_min_invariant (src)
926 || TREE_CODE (src) == SSA_NAME))
927 {
928 ao_ref destr, srcr;
929 ao_ref_init_from_ptr_and_size (&destr, dest, len);
930 ao_ref_init_from_ptr_and_size (&srcr, src, len);
931 if (!refs_may_alias_p_1 (&destr, &srcr, false))
932 {
933 tree fn;
934 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
935 if (!fn)
936 return false;
937 gimple_call_set_fndecl (stmt, fn);
938 gimple_call_set_arg (stmt, 0, dest);
939 gimple_call_set_arg (stmt, 1, src);
940 fold_stmt (gsi);
941 return true;
942 }
943 }
944
945 return false;
946 }
947
948 if (!tree_fits_shwi_p (len))
949 return false;
950 if (!POINTER_TYPE_P (TREE_TYPE (src))
951 || !POINTER_TYPE_P (TREE_TYPE (dest)))
952 return false;
953 /* In the following try to find a type that is most natural to be
954 used for the memcpy source and destination and that allows
955 the most optimization when memcpy is turned into a plain assignment
956 using that type. In theory we could always use a char[len] type
957 but that only gains us that the destination and source possibly
958 no longer will have their address taken. */
959 srctype = TREE_TYPE (TREE_TYPE (src));
960 if (TREE_CODE (srctype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
962 srctype = TREE_TYPE (srctype);
963 desttype = TREE_TYPE (TREE_TYPE (dest));
964 if (TREE_CODE (desttype) == ARRAY_TYPE
965 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
966 desttype = TREE_TYPE (desttype);
967 if (TREE_ADDRESSABLE (srctype)
968 || TREE_ADDRESSABLE (desttype))
969 return false;
970
971 /* Make sure we are not copying using a floating-point mode or
972 a type whose size possibly does not match its precision. */
973 if (FLOAT_MODE_P (TYPE_MODE (desttype))
974 || TREE_CODE (desttype) == BOOLEAN_TYPE
975 || TREE_CODE (desttype) == ENUMERAL_TYPE)
976 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
977 if (FLOAT_MODE_P (TYPE_MODE (srctype))
978 || TREE_CODE (srctype) == BOOLEAN_TYPE
979 || TREE_CODE (srctype) == ENUMERAL_TYPE)
980 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
981 if (!srctype)
982 srctype = desttype;
983 if (!desttype)
984 desttype = srctype;
985 if (!srctype)
986 return false;
987
988 src_align = get_pointer_alignment (src);
989 dest_align = get_pointer_alignment (dest);
990 if (dest_align < TYPE_ALIGN (desttype)
991 || src_align < TYPE_ALIGN (srctype))
992 return false;
993
994 destvar = NULL_TREE;
995 if (TREE_CODE (dest) == ADDR_EXPR
996 && var_decl_component_p (TREE_OPERAND (dest, 0))
997 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
998 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
999
1000 srcvar = NULL_TREE;
1001 if (TREE_CODE (src) == ADDR_EXPR
1002 && var_decl_component_p (TREE_OPERAND (src, 0))
1003 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1004 {
1005 if (!destvar
1006 || src_align >= TYPE_ALIGN (desttype))
1007 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1008 src, off0);
1009 else if (!STRICT_ALIGNMENT)
1010 {
1011 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1012 src_align);
1013 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1014 }
1015 }
1016
1017 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1018 return false;
1019
1020 if (srcvar == NULL_TREE)
1021 {
1022 if (src_align >= TYPE_ALIGN (desttype))
1023 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1024 else
1025 {
1026 if (STRICT_ALIGNMENT)
1027 return false;
1028 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1029 src_align);
1030 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1031 }
1032 }
1033 else if (destvar == NULL_TREE)
1034 {
1035 if (dest_align >= TYPE_ALIGN (srctype))
1036 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1037 else
1038 {
1039 if (STRICT_ALIGNMENT)
1040 return false;
1041 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1042 dest_align);
1043 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1044 }
1045 }
1046
1047 /* Same as above, detect out-of-bounds accesses without issuing
1048 warnings. Avoid folding out-of-bounds copies but to avoid
1049 false positives for unreachable code defer warning until
1050 after DCE has worked its magic.
1051 -Wrestrict is still diagnosed. */
1052 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1053 dest, src, len, len,
1054 false, false))
1055 if (warning != OPT_Wrestrict)
1056 return false;
1057
1058 gimple *new_stmt;
1059 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1060 {
1061 tree tem = fold_const_aggregate_ref (srcvar);
1062 if (tem)
1063 srcvar = tem;
1064 if (! is_gimple_min_invariant (srcvar))
1065 {
1066 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1067 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1068 new_stmt);
1069 gimple_assign_set_lhs (new_stmt, srcvar);
1070 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1071 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1072 }
1073 new_stmt = gimple_build_assign (destvar, srcvar);
1074 goto set_vop_and_replace;
1075 }
1076
1077 /* We get an aggregate copy. Use an unsigned char[] type to
1078 perform the copying to preserve padding and to avoid any issues
1079 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1080 desttype = build_array_type_nelts (unsigned_char_type_node,
1081 tree_to_uhwi (len));
1082 srctype = desttype;
1083 if (src_align > TYPE_ALIGN (srctype))
1084 srctype = build_aligned_type (srctype, src_align);
1085 if (dest_align > TYPE_ALIGN (desttype))
1086 desttype = build_aligned_type (desttype, dest_align);
1087 new_stmt
1088 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1089 fold_build2 (MEM_REF, srctype, src, off0));
1090 set_vop_and_replace:
1091 gimple_move_vops (new_stmt, stmt);
1092 if (!lhs)
1093 {
1094 gsi_replace (gsi, new_stmt, false);
1095 return true;
1096 }
1097 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 }
1099
1100 done:
1101 gimple_seq stmts = NULL;
1102 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1103 len = NULL_TREE;
1104 else if (code == BUILT_IN_MEMPCPY)
1105 {
1106 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1107 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1108 TREE_TYPE (dest), dest, len);
1109 }
1110 else
1111 gcc_unreachable ();
1112
1113 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1114 gimple *repl = gimple_build_assign (lhs, dest);
1115 gsi_replace (gsi, repl, false);
1116 return true;
1117 }
1118
1119 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1120 to built-in memcmp (a, b, len). */
1121
1122 static bool
1123 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1124 {
1125 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1126
1127 if (!fn)
1128 return false;
1129
1130 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1131
1132 gimple *stmt = gsi_stmt (*gsi);
1133 tree a = gimple_call_arg (stmt, 0);
1134 tree b = gimple_call_arg (stmt, 1);
1135 tree len = gimple_call_arg (stmt, 2);
1136
1137 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1138 replace_call_with_call_and_fold (gsi, repl);
1139
1140 return true;
1141 }
1142
1143 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1144 to built-in memmove (dest, src, len). */
1145
1146 static bool
1147 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1148 {
1149 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1150
1151 if (!fn)
1152 return false;
1153
1154 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1155 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1156 len) into memmove (dest, src, len). */
1157
1158 gimple *stmt = gsi_stmt (*gsi);
1159 tree src = gimple_call_arg (stmt, 0);
1160 tree dest = gimple_call_arg (stmt, 1);
1161 tree len = gimple_call_arg (stmt, 2);
1162
1163 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1164 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1165 replace_call_with_call_and_fold (gsi, repl);
1166
1167 return true;
1168 }
1169
1170 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1171 to built-in memset (dest, 0, len). */
1172
1173 static bool
1174 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1175 {
1176 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1177
1178 if (!fn)
1179 return false;
1180
1181 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1182
1183 gimple *stmt = gsi_stmt (*gsi);
1184 tree dest = gimple_call_arg (stmt, 0);
1185 tree len = gimple_call_arg (stmt, 1);
1186
1187 gimple_seq seq = NULL;
1188 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1189 gimple_seq_add_stmt_without_update (&seq, repl);
1190 gsi_replace_with_seq_vops (gsi, seq);
1191 fold_stmt (gsi);
1192
1193 return true;
1194 }
1195
1196 /* Fold function call to builtin memset or bzero at *GSI setting the
1197 memory of size LEN to VAL. Return whether a simplification was made. */
1198
1199 static bool
1200 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1201 {
1202 gimple *stmt = gsi_stmt (*gsi);
1203 tree etype;
1204 unsigned HOST_WIDE_INT length, cval;
1205
1206 /* If the LEN parameter is zero, return DEST. */
1207 if (integer_zerop (len))
1208 {
1209 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1210 return true;
1211 }
1212
1213 if (! tree_fits_uhwi_p (len))
1214 return false;
1215
1216 if (TREE_CODE (c) != INTEGER_CST)
1217 return false;
1218
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree var = dest;
1221 if (TREE_CODE (var) != ADDR_EXPR)
1222 return false;
1223
1224 var = TREE_OPERAND (var, 0);
1225 if (TREE_THIS_VOLATILE (var))
1226 return false;
1227
1228 etype = TREE_TYPE (var);
1229 if (TREE_CODE (etype) == ARRAY_TYPE)
1230 etype = TREE_TYPE (etype);
1231
1232 if (!INTEGRAL_TYPE_P (etype)
1233 && !POINTER_TYPE_P (etype))
1234 return NULL_TREE;
1235
1236 if (! var_decl_component_p (var))
1237 return NULL_TREE;
1238
1239 length = tree_to_uhwi (len);
1240 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1241 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1242 return NULL_TREE;
1243
1244 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1245 return NULL_TREE;
1246
1247 if (integer_zerop (c))
1248 cval = 0;
1249 else
1250 {
1251 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1252 return NULL_TREE;
1253
1254 cval = TREE_INT_CST_LOW (c);
1255 cval &= 0xff;
1256 cval |= cval << 8;
1257 cval |= cval << 16;
1258 cval |= (cval << 31) << 1;
1259 }
1260
1261 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1262 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1263 gimple_move_vops (store, stmt);
1264 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1265 if (gimple_call_lhs (stmt))
1266 {
1267 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1268 gsi_replace (gsi, asgn, false);
1269 }
1270 else
1271 {
1272 gimple_stmt_iterator gsi2 = *gsi;
1273 gsi_prev (gsi);
1274 gsi_remove (&gsi2, true);
1275 }
1276
1277 return true;
1278 }
1279
1280 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1281
1282 static bool
1283 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1284 c_strlen_data *pdata, unsigned eltsize)
1285 {
1286 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1287
1288 /* The length computed by this invocation of the function. */
1289 tree val = NULL_TREE;
1290
1291 /* True if VAL is an optimistic (tight) bound determined from
1292 the size of the character array in which the string may be
1293 stored. In that case, the computed VAL is used to set
1294 PDATA->MAXBOUND. */
1295 bool tight_bound = false;
1296
1297 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1298 if (TREE_CODE (arg) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1300 {
1301 tree op = TREE_OPERAND (arg, 0);
1302 if (integer_zerop (TREE_OPERAND (op, 1)))
1303 {
1304 tree aop0 = TREE_OPERAND (op, 0);
1305 if (TREE_CODE (aop0) == INDIRECT_REF
1306 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1307 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1308 pdata, eltsize);
1309 }
1310 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1311 && rkind == SRK_LENRANGE)
1312 {
1313 /* Fail if an array is the last member of a struct object
1314 since it could be treated as a (fake) flexible array
1315 member. */
1316 tree idx = TREE_OPERAND (op, 1);
1317
1318 arg = TREE_OPERAND (op, 0);
1319 tree optype = TREE_TYPE (arg);
1320 if (tree dom = TYPE_DOMAIN (optype))
1321 if (tree bound = TYPE_MAX_VALUE (dom))
1322 if (TREE_CODE (bound) == INTEGER_CST
1323 && TREE_CODE (idx) == INTEGER_CST
1324 && tree_int_cst_lt (bound, idx))
1325 return false;
1326 }
1327 }
1328
1329 if (rkind == SRK_INT_VALUE)
1330 {
1331 /* We are computing the maximum value (not string length). */
1332 val = arg;
1333 if (TREE_CODE (val) != INTEGER_CST
1334 || tree_int_cst_sgn (val) < 0)
1335 return false;
1336 }
1337 else
1338 {
1339 c_strlen_data lendata = { };
1340 val = c_strlen (arg, 1, &lendata, eltsize);
1341
1342 if (!val && lendata.decl)
1343 {
1344 /* ARG refers to an unterminated const character array.
1345 DATA.DECL with size DATA.LEN. */
1346 val = lendata.minlen;
1347 pdata->decl = lendata.decl;
1348 }
1349 }
1350
1351 if (!val && rkind == SRK_LENRANGE)
1352 {
1353 if (TREE_CODE (arg) == ADDR_EXPR)
1354 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1355 pdata, eltsize);
1356
1357 if (TREE_CODE (arg) == ARRAY_REF)
1358 {
1359 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1360
1361 /* Determine the "innermost" array type. */
1362 while (TREE_CODE (optype) == ARRAY_TYPE
1363 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1364 optype = TREE_TYPE (optype);
1365
1366 /* Avoid arrays of pointers. */
1367 tree eltype = TREE_TYPE (optype);
1368 if (TREE_CODE (optype) != ARRAY_TYPE
1369 || !INTEGRAL_TYPE_P (eltype))
1370 return false;
1371
1372 /* Fail when the array bound is unknown or zero. */
1373 val = TYPE_SIZE_UNIT (optype);
1374 if (!val || integer_zerop (val))
1375 return false;
1376
1377 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1378 integer_one_node);
1379
1380 /* Set the minimum size to zero since the string in
1381 the array could have zero length. */
1382 pdata->minlen = ssize_int (0);
1383
1384 tight_bound = true;
1385 }
1386 else if (TREE_CODE (arg) == COMPONENT_REF
1387 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1388 == ARRAY_TYPE))
1389 {
1390 /* Use the type of the member array to determine the upper
1391 bound on the length of the array. This may be overly
1392 optimistic if the array itself isn't NUL-terminated and
1393 the caller relies on the subsequent member to contain
1394 the NUL but that would only be considered valid if
1395 the array were the last member of a struct. */
1396
1397 tree fld = TREE_OPERAND (arg, 1);
1398
1399 tree optype = TREE_TYPE (fld);
1400
1401 /* Determine the "innermost" array type. */
1402 while (TREE_CODE (optype) == ARRAY_TYPE
1403 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1404 optype = TREE_TYPE (optype);
1405
1406 /* Fail when the array bound is unknown or zero. */
1407 val = TYPE_SIZE_UNIT (optype);
1408 if (!val || integer_zerop (val))
1409 return false;
1410 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1411 integer_one_node);
1412
1413 /* Set the minimum size to zero since the string in
1414 the array could have zero length. */
1415 pdata->minlen = ssize_int (0);
1416
1417 /* The array size determined above is an optimistic bound
1418 on the length. If the array isn't nul-terminated the
1419 length computed by the library function would be greater.
1420 Even though using strlen to cross the subobject boundary
1421 is undefined, avoid drawing conclusions from the member
1422 type about the length here. */
1423 tight_bound = true;
1424 }
1425 else if (VAR_P (arg))
1426 {
1427 /* Avoid handling pointers to arrays. GCC might misuse
1428 a pointer to an array of one bound to point to an array
1429 object of a greater bound. */
1430 tree argtype = TREE_TYPE (arg);
1431 if (TREE_CODE (argtype) == ARRAY_TYPE)
1432 {
1433 val = TYPE_SIZE_UNIT (argtype);
1434 if (!val
1435 || TREE_CODE (val) != INTEGER_CST
1436 || integer_zerop (val))
1437 return false;
1438 val = wide_int_to_tree (TREE_TYPE (val),
1439 wi::sub (wi::to_wide (val), 1));
1440
1441 /* Set the minimum size to zero since the string in
1442 the array could have zero length. */
1443 pdata->minlen = ssize_int (0);
1444 }
1445 }
1446 }
1447
1448 if (!val)
1449 return false;
1450
1451 /* Adjust the lower bound on the string length as necessary. */
1452 if (!pdata->minlen
1453 || (rkind != SRK_STRLEN
1454 && TREE_CODE (pdata->minlen) == INTEGER_CST
1455 && TREE_CODE (val) == INTEGER_CST
1456 && tree_int_cst_lt (val, pdata->minlen)))
1457 pdata->minlen = val;
1458
1459 if (pdata->maxbound)
1460 {
1461 /* Adjust the tighter (more optimistic) string length bound
1462 if necessary and proceed to adjust the more conservative
1463 bound. */
1464 if (TREE_CODE (val) == INTEGER_CST)
1465 {
1466 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1467 {
1468 if (tree_int_cst_lt (pdata->maxbound, val))
1469 pdata->maxbound = val;
1470 }
1471 else
1472 pdata->maxbound = build_all_ones_cst (size_type_node);
1473 }
1474 else
1475 pdata->maxbound = val;
1476 }
1477 else
1478 pdata->maxbound = val;
1479
1480 if (tight_bound)
1481 {
1482 /* VAL computed above represents an optimistically tight bound
1483 on the length of the string based on the referenced object's
1484 or subobject's type. Determine the conservative upper bound
1485 based on the enclosing object's size if possible. */
1486 if (rkind == SRK_LENRANGE)
1487 {
1488 poly_int64 offset;
1489 tree base = get_addr_base_and_unit_offset (arg, &offset);
1490 if (!base)
1491 {
1492 /* When the call above fails due to a non-constant offset
1493 assume the offset is zero and use the size of the whole
1494 enclosing object instead. */
1495 base = get_base_address (arg);
1496 offset = 0;
1497 }
1498 /* If the base object is a pointer no upper bound on the length
1499 can be determined. Otherwise the maximum length is equal to
1500 the size of the enclosing object minus the offset of
1501 the referenced subobject minus 1 (for the terminating nul). */
1502 tree type = TREE_TYPE (base);
1503 if (TREE_CODE (type) == POINTER_TYPE
1504 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1505 val = build_all_ones_cst (size_type_node);
1506 else
1507 {
1508 val = DECL_SIZE_UNIT (base);
1509 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1510 size_int (offset + 1));
1511 }
1512 }
1513 else
1514 return false;
1515 }
1516
1517 if (pdata->maxlen)
1518 {
1519 /* Adjust the more conservative bound if possible/necessary
1520 and fail otherwise. */
1521 if (rkind != SRK_STRLEN)
1522 {
1523 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1524 || TREE_CODE (val) != INTEGER_CST)
1525 return false;
1526
1527 if (tree_int_cst_lt (pdata->maxlen, val))
1528 pdata->maxlen = val;
1529 return true;
1530 }
1531 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1532 {
1533 /* Fail if the length of this ARG is different from that
1534 previously determined from another ARG. */
1535 return false;
1536 }
1537 }
1538
1539 pdata->maxlen = val;
1540 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1541 }
1542
1543 /* For an ARG referencing one or more strings, try to obtain the range
1544 of their lengths, or the size of the largest array ARG referes to if
1545 the range of lengths cannot be determined, and store all in *PDATA.
1546 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1547 the maximum constant value.
1548 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1549 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1550 length or if we are unable to determine the length, return false.
1551 VISITED is a bitmap of visited variables.
1552 RKIND determines the kind of value or range to obtain (see
1553 strlen_range_kind).
1554 Set PDATA->DECL if ARG refers to an unterminated constant array.
1555 On input, set ELTSIZE to 1 for normal single byte character strings,
1556 and either 2 or 4 for wide characer strings (the size of wchar_t).
1557 Return true if *PDATA was successfully populated and false otherwise. */
1558
1559 static bool
1560 get_range_strlen (tree arg, bitmap *visited,
1561 strlen_range_kind rkind,
1562 c_strlen_data *pdata, unsigned eltsize)
1563 {
1564
1565 if (TREE_CODE (arg) != SSA_NAME)
1566 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1567
1568 /* If ARG is registered for SSA update we cannot look at its defining
1569 statement. */
1570 if (name_registered_for_update_p (arg))
1571 return false;
1572
1573 /* If we were already here, break the infinite cycle. */
1574 if (!*visited)
1575 *visited = BITMAP_ALLOC (NULL);
1576 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1577 return true;
1578
1579 tree var = arg;
1580 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1581
1582 switch (gimple_code (def_stmt))
1583 {
1584 case GIMPLE_ASSIGN:
1585 /* The RHS of the statement defining VAR must either have a
1586 constant length or come from another SSA_NAME with a constant
1587 length. */
1588 if (gimple_assign_single_p (def_stmt)
1589 || gimple_assign_unary_nop_p (def_stmt))
1590 {
1591 tree rhs = gimple_assign_rhs1 (def_stmt);
1592 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1593 }
1594 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1595 {
1596 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1597 gimple_assign_rhs3 (def_stmt) };
1598
1599 for (unsigned int i = 0; i < 2; i++)
1600 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1601 {
1602 if (rkind != SRK_LENRANGE)
1603 return false;
1604 /* Set the upper bound to the maximum to prevent
1605 it from being adjusted in the next iteration but
1606 leave MINLEN and the more conservative MAXBOUND
1607 determined so far alone (or leave them null if
1608 they haven't been set yet). That the MINLEN is
1609 in fact zero can be determined from MAXLEN being
1610 unbounded but the discovered minimum is used for
1611 diagnostics. */
1612 pdata->maxlen = build_all_ones_cst (size_type_node);
1613 }
1614 return true;
1615 }
1616 return false;
1617
1618 case GIMPLE_PHI:
1619 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1620 must have a constant length. */
1621 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1622 {
1623 tree arg = gimple_phi_arg (def_stmt, i)->def;
1624
1625 /* If this PHI has itself as an argument, we cannot
1626 determine the string length of this argument. However,
1627 if we can find a constant string length for the other
1628 PHI args then we can still be sure that this is a
1629 constant string length. So be optimistic and just
1630 continue with the next argument. */
1631 if (arg == gimple_phi_result (def_stmt))
1632 continue;
1633
1634 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1635 {
1636 if (rkind != SRK_LENRANGE)
1637 return false;
1638 /* Set the upper bound to the maximum to prevent
1639 it from being adjusted in the next iteration but
1640 leave MINLEN and the more conservative MAXBOUND
1641 determined so far alone (or leave them null if
1642 they haven't been set yet). That the MINLEN is
1643 in fact zero can be determined from MAXLEN being
1644 unbounded but the discovered minimum is used for
1645 diagnostics. */
1646 pdata->maxlen = build_all_ones_cst (size_type_node);
1647 }
1648 }
1649 return true;
1650
1651 default:
1652 return false;
1653 }
1654 }
1655
1656 /* Try to obtain the range of the lengths of the string(s) referenced
1657 by ARG, or the size of the largest array ARG refers to if the range
1658 of lengths cannot be determined, and store all in *PDATA. ELTSIZE
1659 is the expected size of the string element in bytes: 1 for char and
1660 some power of 2 for wide characters.
1661 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1662 for optimization. Returning false means that a nonzero PDATA->MINLEN
1663 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1664 is -1 (in that case, the actual range is indeterminate, i.e.,
1665 [0, PTRDIFF_MAX - 2]. */
1666
1667 bool
1668 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1669 {
1670 bitmap visited = NULL;
1671
1672 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1673 {
1674 /* On failure extend the length range to an impossible maximum
1675 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1676 members can stay unchanged regardless. */
1677 pdata->minlen = ssize_int (0);
1678 pdata->maxlen = build_all_ones_cst (size_type_node);
1679 }
1680 else if (!pdata->minlen)
1681 pdata->minlen = ssize_int (0);
1682
1683 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1684 if (!pdata->maxbound)
1685 pdata->maxbound = pdata->maxlen;
1686
1687 if (visited)
1688 BITMAP_FREE (visited);
1689
1690 return !integer_all_onesp (pdata->maxlen);
1691 }
1692
1693 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1694 For ARG of pointer types, NONSTR indicates if the caller is prepared
1695 to handle unterminated strings. For integer ARG and when RKIND ==
1696 SRK_INT_VALUE, NONSTR must be null.
1697
1698 If an unterminated array is discovered and our caller handles
1699 unterminated arrays, then bubble up the offending DECL and
1700 return the maximum size. Otherwise return NULL. */
1701
1702 static tree
1703 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1704 {
1705 /* A non-null NONSTR is meaningless when determining the maximum
1706 value of an integer ARG. */
1707 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1708 /* ARG must have an integral type when RKIND says so. */
1709 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1710
1711 bitmap visited = NULL;
1712
1713 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1714 is unbounded. */
1715 c_strlen_data lendata = { };
1716 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1717 lendata.maxlen = NULL_TREE;
1718 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1719 lendata.maxlen = NULL_TREE;
1720
1721 if (visited)
1722 BITMAP_FREE (visited);
1723
1724 if (nonstr)
1725 {
1726 /* For callers prepared to handle unterminated arrays set
1727 *NONSTR to point to the declaration of the array and return
1728 the maximum length/size. */
1729 *nonstr = lendata.decl;
1730 return lendata.maxlen;
1731 }
1732
1733 /* Fail if the constant array isn't nul-terminated. */
1734 return lendata.decl ? NULL_TREE : lendata.maxlen;
1735 }
1736
1737
1738 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1739 If LEN is not NULL, it represents the length of the string to be
1740 copied. Return NULL_TREE if no simplification can be made. */
1741
1742 static bool
1743 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1744 tree dest, tree src)
1745 {
1746 gimple *stmt = gsi_stmt (*gsi);
1747 location_t loc = gimple_location (stmt);
1748 tree fn;
1749
1750 /* If SRC and DEST are the same (and not volatile), return DEST. */
1751 if (operand_equal_p (src, dest, 0))
1752 {
1753 /* Issue -Wrestrict unless the pointers are null (those do
1754 not point to objects and so do not indicate an overlap;
1755 such calls could be the result of sanitization and jump
1756 threading). */
1757 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1758 {
1759 tree func = gimple_call_fndecl (stmt);
1760
1761 warning_at (loc, OPT_Wrestrict,
1762 "%qD source argument is the same as destination",
1763 func);
1764 }
1765
1766 replace_call_with_value (gsi, dest);
1767 return true;
1768 }
1769
1770 if (optimize_function_for_size_p (cfun))
1771 return false;
1772
1773 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1774 if (!fn)
1775 return false;
1776
1777 /* Set to non-null if ARG refers to an unterminated array. */
1778 tree nonstr = NULL;
1779 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1780
1781 if (nonstr)
1782 {
1783 /* Avoid folding calls with unterminated arrays. */
1784 if (!gimple_no_warning_p (stmt))
1785 warn_string_no_nul (loc, "strcpy", src, nonstr);
1786 gimple_set_no_warning (stmt, true);
1787 return false;
1788 }
1789
1790 if (!len)
1791 return false;
1792
1793 len = fold_convert_loc (loc, size_type_node, len);
1794 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1795 len = force_gimple_operand_gsi (gsi, len, true,
1796 NULL_TREE, true, GSI_SAME_STMT);
1797 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1798 replace_call_with_call_and_fold (gsi, repl);
1799 return true;
1800 }
1801
1802 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1803 If SLEN is not NULL, it represents the length of the source string.
1804 Return NULL_TREE if no simplification can be made. */
1805
1806 static bool
1807 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1808 tree dest, tree src, tree len)
1809 {
1810 gimple *stmt = gsi_stmt (*gsi);
1811 location_t loc = gimple_location (stmt);
1812 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1813
1814 /* If the LEN parameter is zero, return DEST. */
1815 if (integer_zerop (len))
1816 {
1817 /* Avoid warning if the destination refers to a an array/pointer
1818 decorate with attribute nonstring. */
1819 if (!nonstring)
1820 {
1821 tree fndecl = gimple_call_fndecl (stmt);
1822
1823 /* Warn about the lack of nul termination: the result is not
1824 a (nul-terminated) string. */
1825 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1826 if (slen && !integer_zerop (slen))
1827 warning_at (loc, OPT_Wstringop_truncation,
1828 "%G%qD destination unchanged after copying no bytes "
1829 "from a string of length %E",
1830 stmt, fndecl, slen);
1831 else
1832 warning_at (loc, OPT_Wstringop_truncation,
1833 "%G%qD destination unchanged after copying no bytes",
1834 stmt, fndecl);
1835 }
1836
1837 replace_call_with_value (gsi, dest);
1838 return true;
1839 }
1840
1841 /* We can't compare slen with len as constants below if len is not a
1842 constant. */
1843 if (TREE_CODE (len) != INTEGER_CST)
1844 return false;
1845
1846 /* Now, we must be passed a constant src ptr parameter. */
1847 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1848 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1849 return false;
1850
1851 /* The size of the source string including the terminating nul. */
1852 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1853
1854 /* We do not support simplification of this case, though we do
1855 support it when expanding trees into RTL. */
1856 /* FIXME: generate a call to __builtin_memset. */
1857 if (tree_int_cst_lt (ssize, len))
1858 return false;
1859
1860 /* Diagnose truncation that leaves the copy unterminated. */
1861 maybe_diag_stxncpy_trunc (*gsi, src, len);
1862
1863 /* OK transform into builtin memcpy. */
1864 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1865 if (!fn)
1866 return false;
1867
1868 len = fold_convert_loc (loc, size_type_node, len);
1869 len = force_gimple_operand_gsi (gsi, len, true,
1870 NULL_TREE, true, GSI_SAME_STMT);
1871 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1872 replace_call_with_call_and_fold (gsi, repl);
1873
1874 return true;
1875 }
1876
1877 /* Fold function call to builtin strchr or strrchr.
1878 If both arguments are constant, evaluate and fold the result,
1879 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1880 In general strlen is significantly faster than strchr
1881 due to being a simpler operation. */
1882 static bool
1883 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1884 {
1885 gimple *stmt = gsi_stmt (*gsi);
1886 tree str = gimple_call_arg (stmt, 0);
1887 tree c = gimple_call_arg (stmt, 1);
1888 location_t loc = gimple_location (stmt);
1889 const char *p;
1890 char ch;
1891
1892 if (!gimple_call_lhs (stmt))
1893 return false;
1894
1895 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1896 {
1897 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1898
1899 if (p1 == NULL)
1900 {
1901 replace_call_with_value (gsi, integer_zero_node);
1902 return true;
1903 }
1904
1905 tree len = build_int_cst (size_type_node, p1 - p);
1906 gimple_seq stmts = NULL;
1907 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1908 POINTER_PLUS_EXPR, str, len);
1909 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1910 gsi_replace_with_seq_vops (gsi, stmts);
1911 return true;
1912 }
1913
1914 if (!integer_zerop (c))
1915 return false;
1916
1917 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1918 if (is_strrchr && optimize_function_for_size_p (cfun))
1919 {
1920 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1921
1922 if (strchr_fn)
1923 {
1924 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1925 replace_call_with_call_and_fold (gsi, repl);
1926 return true;
1927 }
1928
1929 return false;
1930 }
1931
1932 tree len;
1933 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1934
1935 if (!strlen_fn)
1936 return false;
1937
1938 /* Create newstr = strlen (str). */
1939 gimple_seq stmts = NULL;
1940 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1941 gimple_set_location (new_stmt, loc);
1942 len = create_tmp_reg_or_ssa_name (size_type_node);
1943 gimple_call_set_lhs (new_stmt, len);
1944 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1945
1946 /* Create (str p+ strlen (str)). */
1947 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1948 POINTER_PLUS_EXPR, str, len);
1949 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1950 gsi_replace_with_seq_vops (gsi, stmts);
1951 /* gsi now points at the assignment to the lhs, get a
1952 stmt iterator to the strlen.
1953 ??? We can't use gsi_for_stmt as that doesn't work when the
1954 CFG isn't built yet. */
1955 gimple_stmt_iterator gsi2 = *gsi;
1956 gsi_prev (&gsi2);
1957 fold_stmt (&gsi2);
1958 return true;
1959 }
1960
1961 /* Fold function call to builtin strstr.
1962 If both arguments are constant, evaluate and fold the result,
1963 additionally fold strstr (x, "") into x and strstr (x, "c")
1964 into strchr (x, 'c'). */
1965 static bool
1966 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1967 {
1968 gimple *stmt = gsi_stmt (*gsi);
1969 tree haystack = gimple_call_arg (stmt, 0);
1970 tree needle = gimple_call_arg (stmt, 1);
1971 const char *p, *q;
1972
1973 if (!gimple_call_lhs (stmt))
1974 return false;
1975
1976 q = c_getstr (needle);
1977 if (q == NULL)
1978 return false;
1979
1980 if ((p = c_getstr (haystack)))
1981 {
1982 const char *r = strstr (p, q);
1983
1984 if (r == NULL)
1985 {
1986 replace_call_with_value (gsi, integer_zero_node);
1987 return true;
1988 }
1989
1990 tree len = build_int_cst (size_type_node, r - p);
1991 gimple_seq stmts = NULL;
1992 gimple *new_stmt
1993 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1994 haystack, len);
1995 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1996 gsi_replace_with_seq_vops (gsi, stmts);
1997 return true;
1998 }
1999
2000 /* For strstr (x, "") return x. */
2001 if (q[0] == '\0')
2002 {
2003 replace_call_with_value (gsi, haystack);
2004 return true;
2005 }
2006
2007 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2008 if (q[1] == '\0')
2009 {
2010 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2011 if (strchr_fn)
2012 {
2013 tree c = build_int_cst (integer_type_node, q[0]);
2014 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2015 replace_call_with_call_and_fold (gsi, repl);
2016 return true;
2017 }
2018 }
2019
2020 return false;
2021 }
2022
2023 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2024 to the call.
2025
2026 Return NULL_TREE if no simplification was possible, otherwise return the
2027 simplified form of the call as a tree.
2028
2029 The simplified form may be a constant or other expression which
2030 computes the same value, but in a more efficient manner (including
2031 calls to other builtin functions).
2032
2033 The call may contain arguments which need to be evaluated, but
2034 which are not useful to determine the result of the call. In
2035 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2036 COMPOUND_EXPR will be an argument which must be evaluated.
2037 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2038 COMPOUND_EXPR in the chain will contain the tree for the simplified
2039 form of the builtin function call. */
2040
2041 static bool
2042 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2043 {
2044 gimple *stmt = gsi_stmt (*gsi);
2045 location_t loc = gimple_location (stmt);
2046
2047 const char *p = c_getstr (src);
2048
2049 /* If the string length is zero, return the dst parameter. */
2050 if (p && *p == '\0')
2051 {
2052 replace_call_with_value (gsi, dst);
2053 return true;
2054 }
2055
2056 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2057 return false;
2058
2059 /* See if we can store by pieces into (dst + strlen(dst)). */
2060 tree newdst;
2061 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2062 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2063
2064 if (!strlen_fn || !memcpy_fn)
2065 return false;
2066
2067 /* If the length of the source string isn't computable don't
2068 split strcat into strlen and memcpy. */
2069 tree len = get_maxval_strlen (src, SRK_STRLEN);
2070 if (! len)
2071 return false;
2072
2073 /* Create strlen (dst). */
2074 gimple_seq stmts = NULL, stmts2;
2075 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2076 gimple_set_location (repl, loc);
2077 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2078 gimple_call_set_lhs (repl, newdst);
2079 gimple_seq_add_stmt_without_update (&stmts, repl);
2080
2081 /* Create (dst p+ strlen (dst)). */
2082 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2083 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2084 gimple_seq_add_seq_without_update (&stmts, stmts2);
2085
2086 len = fold_convert_loc (loc, size_type_node, len);
2087 len = size_binop_loc (loc, PLUS_EXPR, len,
2088 build_int_cst (size_type_node, 1));
2089 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2090 gimple_seq_add_seq_without_update (&stmts, stmts2);
2091
2092 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2093 gimple_seq_add_stmt_without_update (&stmts, repl);
2094 if (gimple_call_lhs (stmt))
2095 {
2096 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2097 gimple_seq_add_stmt_without_update (&stmts, repl);
2098 gsi_replace_with_seq_vops (gsi, stmts);
2099 /* gsi now points at the assignment to the lhs, get a
2100 stmt iterator to the memcpy call.
2101 ??? We can't use gsi_for_stmt as that doesn't work when the
2102 CFG isn't built yet. */
2103 gimple_stmt_iterator gsi2 = *gsi;
2104 gsi_prev (&gsi2);
2105 fold_stmt (&gsi2);
2106 }
2107 else
2108 {
2109 gsi_replace_with_seq_vops (gsi, stmts);
2110 fold_stmt (gsi);
2111 }
2112 return true;
2113 }
2114
2115 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2116 are the arguments to the call. */
2117
2118 static bool
2119 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2120 {
2121 gimple *stmt = gsi_stmt (*gsi);
2122 tree dest = gimple_call_arg (stmt, 0);
2123 tree src = gimple_call_arg (stmt, 1);
2124 tree size = gimple_call_arg (stmt, 2);
2125 tree fn;
2126 const char *p;
2127
2128
2129 p = c_getstr (src);
2130 /* If the SRC parameter is "", return DEST. */
2131 if (p && *p == '\0')
2132 {
2133 replace_call_with_value (gsi, dest);
2134 return true;
2135 }
2136
2137 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2138 return false;
2139
2140 /* If __builtin_strcat_chk is used, assume strcat is available. */
2141 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2142 if (!fn)
2143 return false;
2144
2145 gimple *repl = gimple_build_call (fn, 2, dest, src);
2146 replace_call_with_call_and_fold (gsi, repl);
2147 return true;
2148 }
2149
2150 /* Simplify a call to the strncat builtin. */
2151
2152 static bool
2153 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2154 {
2155 gimple *stmt = gsi_stmt (*gsi);
2156 tree dst = gimple_call_arg (stmt, 0);
2157 tree src = gimple_call_arg (stmt, 1);
2158 tree len = gimple_call_arg (stmt, 2);
2159
2160 const char *p = c_getstr (src);
2161
2162 /* If the requested length is zero, or the src parameter string
2163 length is zero, return the dst parameter. */
2164 if (integer_zerop (len) || (p && *p == '\0'))
2165 {
2166 replace_call_with_value (gsi, dst);
2167 return true;
2168 }
2169
2170 if (TREE_CODE (len) != INTEGER_CST || !p)
2171 return false;
2172
2173 unsigned srclen = strlen (p);
2174
2175 int cmpsrc = compare_tree_int (len, srclen);
2176
2177 /* Return early if the requested len is less than the string length.
2178 Warnings will be issued elsewhere later. */
2179 if (cmpsrc < 0)
2180 return false;
2181
2182 unsigned HOST_WIDE_INT dstsize;
2183
2184 bool nowarn = gimple_no_warning_p (stmt);
2185
2186 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2187 {
2188 int cmpdst = compare_tree_int (len, dstsize);
2189
2190 if (cmpdst >= 0)
2191 {
2192 tree fndecl = gimple_call_fndecl (stmt);
2193
2194 /* Strncat copies (at most) LEN bytes and always appends
2195 the terminating NUL so the specified bound should never
2196 be equal to (or greater than) the size of the destination.
2197 If it is, the copy could overflow. */
2198 location_t loc = gimple_location (stmt);
2199 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2200 cmpdst == 0
2201 ? G_("%G%qD specified bound %E equals "
2202 "destination size")
2203 : G_("%G%qD specified bound %E exceeds "
2204 "destination size %wu"),
2205 stmt, fndecl, len, dstsize);
2206 if (nowarn)
2207 gimple_set_no_warning (stmt, true);
2208 }
2209 }
2210
2211 if (!nowarn && cmpsrc == 0)
2212 {
2213 tree fndecl = gimple_call_fndecl (stmt);
2214 location_t loc = gimple_location (stmt);
2215
2216 /* To avoid possible overflow the specified bound should also
2217 not be equal to the length of the source, even when the size
2218 of the destination is unknown (it's not an uncommon mistake
2219 to specify as the bound to strncpy the length of the source). */
2220 if (warning_at (loc, OPT_Wstringop_overflow_,
2221 "%G%qD specified bound %E equals source length",
2222 stmt, fndecl, len))
2223 gimple_set_no_warning (stmt, true);
2224 }
2225
2226 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2227
2228 /* If the replacement _DECL isn't initialized, don't do the
2229 transformation. */
2230 if (!fn)
2231 return false;
2232
2233 /* Otherwise, emit a call to strcat. */
2234 gcall *repl = gimple_build_call (fn, 2, dst, src);
2235 replace_call_with_call_and_fold (gsi, repl);
2236 return true;
2237 }
2238
2239 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2240 LEN, and SIZE. */
2241
2242 static bool
2243 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2244 {
2245 gimple *stmt = gsi_stmt (*gsi);
2246 tree dest = gimple_call_arg (stmt, 0);
2247 tree src = gimple_call_arg (stmt, 1);
2248 tree len = gimple_call_arg (stmt, 2);
2249 tree size = gimple_call_arg (stmt, 3);
2250 tree fn;
2251 const char *p;
2252
2253 p = c_getstr (src);
2254 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2255 if ((p && *p == '\0')
2256 || integer_zerop (len))
2257 {
2258 replace_call_with_value (gsi, dest);
2259 return true;
2260 }
2261
2262 if (! tree_fits_uhwi_p (size))
2263 return false;
2264
2265 if (! integer_all_onesp (size))
2266 {
2267 tree src_len = c_strlen (src, 1);
2268 if (src_len
2269 && tree_fits_uhwi_p (src_len)
2270 && tree_fits_uhwi_p (len)
2271 && ! tree_int_cst_lt (len, src_len))
2272 {
2273 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2274 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2275 if (!fn)
2276 return false;
2277
2278 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2279 replace_call_with_call_and_fold (gsi, repl);
2280 return true;
2281 }
2282 return false;
2283 }
2284
2285 /* If __builtin_strncat_chk is used, assume strncat is available. */
2286 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2287 if (!fn)
2288 return false;
2289
2290 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2291 replace_call_with_call_and_fold (gsi, repl);
2292 return true;
2293 }
2294
2295 /* Build and append gimple statements to STMTS that would load a first
2296 character of a memory location identified by STR. LOC is location
2297 of the statement. */
2298
2299 static tree
2300 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2301 {
2302 tree var;
2303
2304 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2305 tree cst_uchar_ptr_node
2306 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2307 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2308
2309 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2310 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2311 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2312
2313 gimple_assign_set_lhs (stmt, var);
2314 gimple_seq_add_stmt_without_update (stmts, stmt);
2315
2316 return var;
2317 }
2318
2319 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2320 FCODE is the name of the builtin. */
2321
2322 static bool
2323 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2324 {
2325 gimple *stmt = gsi_stmt (*gsi);
2326 tree callee = gimple_call_fndecl (stmt);
2327 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2328
2329 tree type = integer_type_node;
2330 tree str1 = gimple_call_arg (stmt, 0);
2331 tree str2 = gimple_call_arg (stmt, 1);
2332 tree lhs = gimple_call_lhs (stmt);
2333 HOST_WIDE_INT length = -1;
2334
2335 /* Handle strncmp and strncasecmp functions. */
2336 if (gimple_call_num_args (stmt) == 3)
2337 {
2338 tree len = gimple_call_arg (stmt, 2);
2339 if (tree_fits_uhwi_p (len))
2340 length = tree_to_uhwi (len);
2341 }
2342
2343 /* If the LEN parameter is zero, return zero. */
2344 if (length == 0)
2345 {
2346 replace_call_with_value (gsi, integer_zero_node);
2347 return true;
2348 }
2349
2350 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2351 if (operand_equal_p (str1, str2, 0))
2352 {
2353 replace_call_with_value (gsi, integer_zero_node);
2354 return true;
2355 }
2356
2357 const char *p1 = c_getstr (str1);
2358 const char *p2 = c_getstr (str2);
2359
2360 /* For known strings, return an immediate value. */
2361 if (p1 && p2)
2362 {
2363 int r = 0;
2364 bool known_result = false;
2365
2366 switch (fcode)
2367 {
2368 case BUILT_IN_STRCMP:
2369 case BUILT_IN_STRCMP_EQ:
2370 {
2371 r = strcmp (p1, p2);
2372 known_result = true;
2373 break;
2374 }
2375 case BUILT_IN_STRNCMP:
2376 case BUILT_IN_STRNCMP_EQ:
2377 {
2378 if (length == -1)
2379 break;
2380 r = strncmp (p1, p2, length);
2381 known_result = true;
2382 break;
2383 }
2384 /* Only handleable situation is where the string are equal (result 0),
2385 which is already handled by operand_equal_p case. */
2386 case BUILT_IN_STRCASECMP:
2387 break;
2388 case BUILT_IN_STRNCASECMP:
2389 {
2390 if (length == -1)
2391 break;
2392 r = strncmp (p1, p2, length);
2393 if (r == 0)
2394 known_result = true;
2395 break;
2396 }
2397 default:
2398 gcc_unreachable ();
2399 }
2400
2401 if (known_result)
2402 {
2403 replace_call_with_value (gsi, build_cmp_result (type, r));
2404 return true;
2405 }
2406 }
2407
2408 bool nonzero_length = length >= 1
2409 || fcode == BUILT_IN_STRCMP
2410 || fcode == BUILT_IN_STRCMP_EQ
2411 || fcode == BUILT_IN_STRCASECMP;
2412
2413 location_t loc = gimple_location (stmt);
2414
2415 /* If the second arg is "", return *(const unsigned char*)arg1. */
2416 if (p2 && *p2 == '\0' && nonzero_length)
2417 {
2418 gimple_seq stmts = NULL;
2419 tree var = gimple_load_first_char (loc, str1, &stmts);
2420 if (lhs)
2421 {
2422 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2423 gimple_seq_add_stmt_without_update (&stmts, stmt);
2424 }
2425
2426 gsi_replace_with_seq_vops (gsi, stmts);
2427 return true;
2428 }
2429
2430 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2431 if (p1 && *p1 == '\0' && nonzero_length)
2432 {
2433 gimple_seq stmts = NULL;
2434 tree var = gimple_load_first_char (loc, str2, &stmts);
2435
2436 if (lhs)
2437 {
2438 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2439 stmt = gimple_build_assign (c, NOP_EXPR, var);
2440 gimple_seq_add_stmt_without_update (&stmts, stmt);
2441
2442 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2443 gimple_seq_add_stmt_without_update (&stmts, stmt);
2444 }
2445
2446 gsi_replace_with_seq_vops (gsi, stmts);
2447 return true;
2448 }
2449
2450 /* If len parameter is one, return an expression corresponding to
2451 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2452 if (fcode == BUILT_IN_STRNCMP && length == 1)
2453 {
2454 gimple_seq stmts = NULL;
2455 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2456 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2457
2458 if (lhs)
2459 {
2460 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2461 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2462 gimple_seq_add_stmt_without_update (&stmts, convert1);
2463
2464 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2465 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2466 gimple_seq_add_stmt_without_update (&stmts, convert2);
2467
2468 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2469 gimple_seq_add_stmt_without_update (&stmts, stmt);
2470 }
2471
2472 gsi_replace_with_seq_vops (gsi, stmts);
2473 return true;
2474 }
2475
2476 /* If length is larger than the length of one constant string,
2477 replace strncmp with corresponding strcmp */
2478 if (fcode == BUILT_IN_STRNCMP
2479 && length > 0
2480 && ((p2 && (size_t) length > strlen (p2))
2481 || (p1 && (size_t) length > strlen (p1))))
2482 {
2483 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2484 if (!fn)
2485 return false;
2486 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2487 replace_call_with_call_and_fold (gsi, repl);
2488 return true;
2489 }
2490
2491 return false;
2492 }
2493
2494 /* Fold a call to the memchr pointed by GSI iterator. */
2495
2496 static bool
2497 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2498 {
2499 gimple *stmt = gsi_stmt (*gsi);
2500 tree lhs = gimple_call_lhs (stmt);
2501 tree arg1 = gimple_call_arg (stmt, 0);
2502 tree arg2 = gimple_call_arg (stmt, 1);
2503 tree len = gimple_call_arg (stmt, 2);
2504
2505 /* If the LEN parameter is zero, return zero. */
2506 if (integer_zerop (len))
2507 {
2508 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2509 return true;
2510 }
2511
2512 char c;
2513 if (TREE_CODE (arg2) != INTEGER_CST
2514 || !tree_fits_uhwi_p (len)
2515 || !target_char_cst_p (arg2, &c))
2516 return false;
2517
2518 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2519 unsigned HOST_WIDE_INT string_length;
2520 const char *p1 = c_getstr (arg1, &string_length);
2521
2522 if (p1)
2523 {
2524 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2525 if (r == NULL)
2526 {
2527 tree mem_size, offset_node;
2528 string_constant (arg1, &offset_node, &mem_size, NULL);
2529 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2530 ? 0 : tree_to_uhwi (offset_node);
2531 /* MEM_SIZE is the size of the array the string literal
2532 is stored in. */
2533 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2534 gcc_checking_assert (string_length <= string_size);
2535 if (length <= string_size)
2536 {
2537 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2538 return true;
2539 }
2540 }
2541 else
2542 {
2543 unsigned HOST_WIDE_INT offset = r - p1;
2544 gimple_seq stmts = NULL;
2545 if (lhs != NULL_TREE)
2546 {
2547 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2548 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2549 arg1, offset_cst);
2550 gimple_seq_add_stmt_without_update (&stmts, stmt);
2551 }
2552 else
2553 gimple_seq_add_stmt_without_update (&stmts,
2554 gimple_build_nop ());
2555
2556 gsi_replace_with_seq_vops (gsi, stmts);
2557 return true;
2558 }
2559 }
2560
2561 return false;
2562 }
2563
2564 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2565 to the call. IGNORE is true if the value returned
2566 by the builtin will be ignored. UNLOCKED is true is true if this
2567 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2568 the known length of the string. Return NULL_TREE if no simplification
2569 was possible. */
2570
2571 static bool
2572 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2573 tree arg0, tree arg1,
2574 bool unlocked)
2575 {
2576 gimple *stmt = gsi_stmt (*gsi);
2577
2578 /* If we're using an unlocked function, assume the other unlocked
2579 functions exist explicitly. */
2580 tree const fn_fputc = (unlocked
2581 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2582 : builtin_decl_implicit (BUILT_IN_FPUTC));
2583 tree const fn_fwrite = (unlocked
2584 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2585 : builtin_decl_implicit (BUILT_IN_FWRITE));
2586
2587 /* If the return value is used, don't do the transformation. */
2588 if (gimple_call_lhs (stmt))
2589 return false;
2590
2591 /* Get the length of the string passed to fputs. If the length
2592 can't be determined, punt. */
2593 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2594 if (!len
2595 || TREE_CODE (len) != INTEGER_CST)
2596 return false;
2597
2598 switch (compare_tree_int (len, 1))
2599 {
2600 case -1: /* length is 0, delete the call entirely . */
2601 replace_call_with_value (gsi, integer_zero_node);
2602 return true;
2603
2604 case 0: /* length is 1, call fputc. */
2605 {
2606 const char *p = c_getstr (arg0);
2607 if (p != NULL)
2608 {
2609 if (!fn_fputc)
2610 return false;
2611
2612 gimple *repl = gimple_build_call (fn_fputc, 2,
2613 build_int_cst
2614 (integer_type_node, p[0]), arg1);
2615 replace_call_with_call_and_fold (gsi, repl);
2616 return true;
2617 }
2618 }
2619 /* FALLTHROUGH */
2620 case 1: /* length is greater than 1, call fwrite. */
2621 {
2622 /* If optimizing for size keep fputs. */
2623 if (optimize_function_for_size_p (cfun))
2624 return false;
2625 /* New argument list transforming fputs(string, stream) to
2626 fwrite(string, 1, len, stream). */
2627 if (!fn_fwrite)
2628 return false;
2629
2630 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2631 size_one_node, len, arg1);
2632 replace_call_with_call_and_fold (gsi, repl);
2633 return true;
2634 }
2635 default:
2636 gcc_unreachable ();
2637 }
2638 return false;
2639 }
2640
2641 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2642 DEST, SRC, LEN, and SIZE are the arguments to the call.
2643 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2644 code of the builtin. If MAXLEN is not NULL, it is maximum length
2645 passed as third argument. */
2646
2647 static bool
2648 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2649 tree dest, tree src, tree len, tree size,
2650 enum built_in_function fcode)
2651 {
2652 gimple *stmt = gsi_stmt (*gsi);
2653 location_t loc = gimple_location (stmt);
2654 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2655 tree fn;
2656
2657 /* If SRC and DEST are the same (and not volatile), return DEST
2658 (resp. DEST+LEN for __mempcpy_chk). */
2659 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2660 {
2661 if (fcode != BUILT_IN_MEMPCPY_CHK)
2662 {
2663 replace_call_with_value (gsi, dest);
2664 return true;
2665 }
2666 else
2667 {
2668 gimple_seq stmts = NULL;
2669 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2670 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2671 TREE_TYPE (dest), dest, len);
2672 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2673 replace_call_with_value (gsi, temp);
2674 return true;
2675 }
2676 }
2677
2678 if (! tree_fits_uhwi_p (size))
2679 return false;
2680
2681 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2682 if (! integer_all_onesp (size))
2683 {
2684 if (! tree_fits_uhwi_p (len))
2685 {
2686 /* If LEN is not constant, try MAXLEN too.
2687 For MAXLEN only allow optimizing into non-_ocs function
2688 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2689 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2690 {
2691 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2692 {
2693 /* (void) __mempcpy_chk () can be optimized into
2694 (void) __memcpy_chk (). */
2695 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2696 if (!fn)
2697 return false;
2698
2699 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2700 replace_call_with_call_and_fold (gsi, repl);
2701 return true;
2702 }
2703 return false;
2704 }
2705 }
2706 else
2707 maxlen = len;
2708
2709 if (tree_int_cst_lt (size, maxlen))
2710 return false;
2711 }
2712
2713 fn = NULL_TREE;
2714 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2715 mem{cpy,pcpy,move,set} is available. */
2716 switch (fcode)
2717 {
2718 case BUILT_IN_MEMCPY_CHK:
2719 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2720 break;
2721 case BUILT_IN_MEMPCPY_CHK:
2722 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2723 break;
2724 case BUILT_IN_MEMMOVE_CHK:
2725 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2726 break;
2727 case BUILT_IN_MEMSET_CHK:
2728 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2729 break;
2730 default:
2731 break;
2732 }
2733
2734 if (!fn)
2735 return false;
2736
2737 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2738 replace_call_with_call_and_fold (gsi, repl);
2739 return true;
2740 }
2741
2742 /* Fold a call to the __st[rp]cpy_chk builtin.
2743 DEST, SRC, and SIZE are the arguments to the call.
2744 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2745 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2746 strings passed as second argument. */
2747
2748 static bool
2749 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2750 tree dest,
2751 tree src, tree size,
2752 enum built_in_function fcode)
2753 {
2754 gimple *stmt = gsi_stmt (*gsi);
2755 location_t loc = gimple_location (stmt);
2756 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2757 tree len, fn;
2758
2759 /* If SRC and DEST are the same (and not volatile), return DEST. */
2760 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2761 {
2762 /* Issue -Wrestrict unless the pointers are null (those do
2763 not point to objects and so do not indicate an overlap;
2764 such calls could be the result of sanitization and jump
2765 threading). */
2766 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2767 {
2768 tree func = gimple_call_fndecl (stmt);
2769
2770 warning_at (loc, OPT_Wrestrict,
2771 "%qD source argument is the same as destination",
2772 func);
2773 }
2774
2775 replace_call_with_value (gsi, dest);
2776 return true;
2777 }
2778
2779 if (! tree_fits_uhwi_p (size))
2780 return false;
2781
2782 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2783 if (! integer_all_onesp (size))
2784 {
2785 len = c_strlen (src, 1);
2786 if (! len || ! tree_fits_uhwi_p (len))
2787 {
2788 /* If LEN is not constant, try MAXLEN too.
2789 For MAXLEN only allow optimizing into non-_ocs function
2790 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2791 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2792 {
2793 if (fcode == BUILT_IN_STPCPY_CHK)
2794 {
2795 if (! ignore)
2796 return false;
2797
2798 /* If return value of __stpcpy_chk is ignored,
2799 optimize into __strcpy_chk. */
2800 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2801 if (!fn)
2802 return false;
2803
2804 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2805 replace_call_with_call_and_fold (gsi, repl);
2806 return true;
2807 }
2808
2809 if (! len || TREE_SIDE_EFFECTS (len))
2810 return false;
2811
2812 /* If c_strlen returned something, but not a constant,
2813 transform __strcpy_chk into __memcpy_chk. */
2814 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2815 if (!fn)
2816 return false;
2817
2818 gimple_seq stmts = NULL;
2819 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2820 len = gimple_convert (&stmts, loc, size_type_node, len);
2821 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2822 build_int_cst (size_type_node, 1));
2823 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2824 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2825 replace_call_with_call_and_fold (gsi, repl);
2826 return true;
2827 }
2828 }
2829 else
2830 maxlen = len;
2831
2832 if (! tree_int_cst_lt (maxlen, size))
2833 return false;
2834 }
2835
2836 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2837 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2838 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2839 if (!fn)
2840 return false;
2841
2842 gimple *repl = gimple_build_call (fn, 2, dest, src);
2843 replace_call_with_call_and_fold (gsi, repl);
2844 return true;
2845 }
2846
2847 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2848 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2849 length passed as third argument. IGNORE is true if return value can be
2850 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2851
2852 static bool
2853 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2854 tree dest, tree src,
2855 tree len, tree size,
2856 enum built_in_function fcode)
2857 {
2858 gimple *stmt = gsi_stmt (*gsi);
2859 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2860 tree fn;
2861
2862 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2863 {
2864 /* If return value of __stpncpy_chk is ignored,
2865 optimize into __strncpy_chk. */
2866 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2867 if (fn)
2868 {
2869 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2870 replace_call_with_call_and_fold (gsi, repl);
2871 return true;
2872 }
2873 }
2874
2875 if (! tree_fits_uhwi_p (size))
2876 return false;
2877
2878 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2879 if (! integer_all_onesp (size))
2880 {
2881 if (! tree_fits_uhwi_p (len))
2882 {
2883 /* If LEN is not constant, try MAXLEN too.
2884 For MAXLEN only allow optimizing into non-_ocs function
2885 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2886 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2887 return false;
2888 }
2889 else
2890 maxlen = len;
2891
2892 if (tree_int_cst_lt (size, maxlen))
2893 return false;
2894 }
2895
2896 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2897 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2898 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2899 if (!fn)
2900 return false;
2901
2902 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2903 replace_call_with_call_and_fold (gsi, repl);
2904 return true;
2905 }
2906
2907 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2908 Return NULL_TREE if no simplification can be made. */
2909
2910 static bool
2911 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2912 {
2913 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2914 location_t loc = gimple_location (stmt);
2915 tree dest = gimple_call_arg (stmt, 0);
2916 tree src = gimple_call_arg (stmt, 1);
2917 tree fn, lenp1;
2918
2919 /* If the result is unused, replace stpcpy with strcpy. */
2920 if (gimple_call_lhs (stmt) == NULL_TREE)
2921 {
2922 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2923 if (!fn)
2924 return false;
2925 gimple_call_set_fndecl (stmt, fn);
2926 fold_stmt (gsi);
2927 return true;
2928 }
2929
2930 /* Set to non-null if ARG refers to an unterminated array. */
2931 c_strlen_data data = { };
2932 tree len = c_strlen (src, 1, &data, 1);
2933 if (!len
2934 || TREE_CODE (len) != INTEGER_CST)
2935 {
2936 data.decl = unterminated_array (src);
2937 if (!data.decl)
2938 return false;
2939 }
2940
2941 if (data.decl)
2942 {
2943 /* Avoid folding calls with unterminated arrays. */
2944 if (!gimple_no_warning_p (stmt))
2945 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2946 gimple_set_no_warning (stmt, true);
2947 return false;
2948 }
2949
2950 if (optimize_function_for_size_p (cfun)
2951 /* If length is zero it's small enough. */
2952 && !integer_zerop (len))
2953 return false;
2954
2955 /* If the source has a known length replace stpcpy with memcpy. */
2956 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2957 if (!fn)
2958 return false;
2959
2960 gimple_seq stmts = NULL;
2961 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2962 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2963 tem, build_int_cst (size_type_node, 1));
2964 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2965 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2966 gimple_move_vops (repl, stmt);
2967 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2968 /* Replace the result with dest + len. */
2969 stmts = NULL;
2970 tem = gimple_convert (&stmts, loc, sizetype, len);
2971 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2972 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2973 POINTER_PLUS_EXPR, dest, tem);
2974 gsi_replace (gsi, ret, false);
2975 /* Finally fold the memcpy call. */
2976 gimple_stmt_iterator gsi2 = *gsi;
2977 gsi_prev (&gsi2);
2978 fold_stmt (&gsi2);
2979 return true;
2980 }
2981
2982 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2983 NULL_TREE if a normal call should be emitted rather than expanding
2984 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2985 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2986 passed as second argument. */
2987
2988 static bool
2989 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2990 enum built_in_function fcode)
2991 {
2992 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2993 tree dest, size, len, fn, fmt, flag;
2994 const char *fmt_str;
2995
2996 /* Verify the required arguments in the original call. */
2997 if (gimple_call_num_args (stmt) < 5)
2998 return false;
2999
3000 dest = gimple_call_arg (stmt, 0);
3001 len = gimple_call_arg (stmt, 1);
3002 flag = gimple_call_arg (stmt, 2);
3003 size = gimple_call_arg (stmt, 3);
3004 fmt = gimple_call_arg (stmt, 4);
3005
3006 if (! tree_fits_uhwi_p (size))
3007 return false;
3008
3009 if (! integer_all_onesp (size))
3010 {
3011 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3012 if (! tree_fits_uhwi_p (len))
3013 {
3014 /* If LEN is not constant, try MAXLEN too.
3015 For MAXLEN only allow optimizing into non-_ocs function
3016 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3017 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3018 return false;
3019 }
3020 else
3021 maxlen = len;
3022
3023 if (tree_int_cst_lt (size, maxlen))
3024 return false;
3025 }
3026
3027 if (!init_target_chars ())
3028 return false;
3029
3030 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3031 or if format doesn't contain % chars or is "%s". */
3032 if (! integer_zerop (flag))
3033 {
3034 fmt_str = c_getstr (fmt);
3035 if (fmt_str == NULL)
3036 return false;
3037 if (strchr (fmt_str, target_percent) != NULL
3038 && strcmp (fmt_str, target_percent_s))
3039 return false;
3040 }
3041
3042 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3043 available. */
3044 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3045 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3046 if (!fn)
3047 return false;
3048
3049 /* Replace the called function and the first 5 argument by 3 retaining
3050 trailing varargs. */
3051 gimple_call_set_fndecl (stmt, fn);
3052 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3053 gimple_call_set_arg (stmt, 0, dest);
3054 gimple_call_set_arg (stmt, 1, len);
3055 gimple_call_set_arg (stmt, 2, fmt);
3056 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3057 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3058 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3059 fold_stmt (gsi);
3060 return true;
3061 }
3062
3063 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3064 Return NULL_TREE if a normal call should be emitted rather than
3065 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3066 or BUILT_IN_VSPRINTF_CHK. */
3067
3068 static bool
3069 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3070 enum built_in_function fcode)
3071 {
3072 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3073 tree dest, size, len, fn, fmt, flag;
3074 const char *fmt_str;
3075 unsigned nargs = gimple_call_num_args (stmt);
3076
3077 /* Verify the required arguments in the original call. */
3078 if (nargs < 4)
3079 return false;
3080 dest = gimple_call_arg (stmt, 0);
3081 flag = gimple_call_arg (stmt, 1);
3082 size = gimple_call_arg (stmt, 2);
3083 fmt = gimple_call_arg (stmt, 3);
3084
3085 if (! tree_fits_uhwi_p (size))
3086 return false;
3087
3088 len = NULL_TREE;
3089
3090 if (!init_target_chars ())
3091 return false;
3092
3093 /* Check whether the format is a literal string constant. */
3094 fmt_str = c_getstr (fmt);
3095 if (fmt_str != NULL)
3096 {
3097 /* If the format doesn't contain % args or %%, we know the size. */
3098 if (strchr (fmt_str, target_percent) == 0)
3099 {
3100 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3101 len = build_int_cstu (size_type_node, strlen (fmt_str));
3102 }
3103 /* If the format is "%s" and first ... argument is a string literal,
3104 we know the size too. */
3105 else if (fcode == BUILT_IN_SPRINTF_CHK
3106 && strcmp (fmt_str, target_percent_s) == 0)
3107 {
3108 tree arg;
3109
3110 if (nargs == 5)
3111 {
3112 arg = gimple_call_arg (stmt, 4);
3113 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3114 {
3115 len = c_strlen (arg, 1);
3116 if (! len || ! tree_fits_uhwi_p (len))
3117 len = NULL_TREE;
3118 }
3119 }
3120 }
3121 }
3122
3123 if (! integer_all_onesp (size))
3124 {
3125 if (! len || ! tree_int_cst_lt (len, size))
3126 return false;
3127 }
3128
3129 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3130 or if format doesn't contain % chars or is "%s". */
3131 if (! integer_zerop (flag))
3132 {
3133 if (fmt_str == NULL)
3134 return false;
3135 if (strchr (fmt_str, target_percent) != NULL
3136 && strcmp (fmt_str, target_percent_s))
3137 return false;
3138 }
3139
3140 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3141 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3142 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3143 if (!fn)
3144 return false;
3145
3146 /* Replace the called function and the first 4 argument by 2 retaining
3147 trailing varargs. */
3148 gimple_call_set_fndecl (stmt, fn);
3149 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3150 gimple_call_set_arg (stmt, 0, dest);
3151 gimple_call_set_arg (stmt, 1, fmt);
3152 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3153 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3154 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3155 fold_stmt (gsi);
3156 return true;
3157 }
3158
3159 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3160 ORIG may be null if this is a 2-argument call. We don't attempt to
3161 simplify calls with more than 3 arguments.
3162
3163 Return true if simplification was possible, otherwise false. */
3164
3165 bool
3166 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3167 {
3168 gimple *stmt = gsi_stmt (*gsi);
3169 tree dest = gimple_call_arg (stmt, 0);
3170 tree fmt = gimple_call_arg (stmt, 1);
3171 tree orig = NULL_TREE;
3172 const char *fmt_str = NULL;
3173
3174 /* Verify the required arguments in the original call. We deal with two
3175 types of sprintf() calls: 'sprintf (str, fmt)' and
3176 'sprintf (dest, "%s", orig)'. */
3177 if (gimple_call_num_args (stmt) > 3)
3178 return false;
3179
3180 if (gimple_call_num_args (stmt) == 3)
3181 orig = gimple_call_arg (stmt, 2);
3182
3183 /* Check whether the format is a literal string constant. */
3184 fmt_str = c_getstr (fmt);
3185 if (fmt_str == NULL)
3186 return false;
3187
3188 if (!init_target_chars ())
3189 return false;
3190
3191 /* If the format doesn't contain % args or %%, use strcpy. */
3192 if (strchr (fmt_str, target_percent) == NULL)
3193 {
3194 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3195
3196 if (!fn)
3197 return false;
3198
3199 /* Don't optimize sprintf (buf, "abc", ptr++). */
3200 if (orig)
3201 return false;
3202
3203 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3204 'format' is known to contain no % formats. */
3205 gimple_seq stmts = NULL;
3206 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3207
3208 /* Propagate the NO_WARNING bit to avoid issuing the same
3209 warning more than once. */
3210 if (gimple_no_warning_p (stmt))
3211 gimple_set_no_warning (repl, true);
3212
3213 gimple_seq_add_stmt_without_update (&stmts, repl);
3214 if (tree lhs = gimple_call_lhs (stmt))
3215 {
3216 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3217 strlen (fmt_str)));
3218 gimple_seq_add_stmt_without_update (&stmts, repl);
3219 gsi_replace_with_seq_vops (gsi, stmts);
3220 /* gsi now points at the assignment to the lhs, get a
3221 stmt iterator to the memcpy call.
3222 ??? We can't use gsi_for_stmt as that doesn't work when the
3223 CFG isn't built yet. */
3224 gimple_stmt_iterator gsi2 = *gsi;
3225 gsi_prev (&gsi2);
3226 fold_stmt (&gsi2);
3227 }
3228 else
3229 {
3230 gsi_replace_with_seq_vops (gsi, stmts);
3231 fold_stmt (gsi);
3232 }
3233 return true;
3234 }
3235
3236 /* If the format is "%s", use strcpy if the result isn't used. */
3237 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3238 {
3239 tree fn;
3240 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3241
3242 if (!fn)
3243 return false;
3244
3245 /* Don't crash on sprintf (str1, "%s"). */
3246 if (!orig)
3247 return false;
3248
3249 tree orig_len = NULL_TREE;
3250 if (gimple_call_lhs (stmt))
3251 {
3252 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3253 if (!orig_len)
3254 return false;
3255 }
3256
3257 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3258 gimple_seq stmts = NULL;
3259 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3260
3261 /* Propagate the NO_WARNING bit to avoid issuing the same
3262 warning more than once. */
3263 if (gimple_no_warning_p (stmt))
3264 gimple_set_no_warning (repl, true);
3265
3266 gimple_seq_add_stmt_without_update (&stmts, repl);
3267 if (tree lhs = gimple_call_lhs (stmt))
3268 {
3269 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3270 TREE_TYPE (orig_len)))
3271 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3272 repl = gimple_build_assign (lhs, orig_len);
3273 gimple_seq_add_stmt_without_update (&stmts, repl);
3274 gsi_replace_with_seq_vops (gsi, stmts);
3275 /* gsi now points at the assignment to the lhs, get a
3276 stmt iterator to the memcpy call.
3277 ??? We can't use gsi_for_stmt as that doesn't work when the
3278 CFG isn't built yet. */
3279 gimple_stmt_iterator gsi2 = *gsi;
3280 gsi_prev (&gsi2);
3281 fold_stmt (&gsi2);
3282 }
3283 else
3284 {
3285 gsi_replace_with_seq_vops (gsi, stmts);
3286 fold_stmt (gsi);
3287 }
3288 return true;
3289 }
3290 return false;
3291 }
3292
3293 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3294 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3295 attempt to simplify calls with more than 4 arguments.
3296
3297 Return true if simplification was possible, otherwise false. */
3298
3299 bool
3300 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3301 {
3302 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3303 tree dest = gimple_call_arg (stmt, 0);
3304 tree destsize = gimple_call_arg (stmt, 1);
3305 tree fmt = gimple_call_arg (stmt, 2);
3306 tree orig = NULL_TREE;
3307 const char *fmt_str = NULL;
3308
3309 if (gimple_call_num_args (stmt) > 4)
3310 return false;
3311
3312 if (gimple_call_num_args (stmt) == 4)
3313 orig = gimple_call_arg (stmt, 3);
3314
3315 if (!tree_fits_uhwi_p (destsize))
3316 return false;
3317 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3318
3319 /* Check whether the format is a literal string constant. */
3320 fmt_str = c_getstr (fmt);
3321 if (fmt_str == NULL)
3322 return false;
3323
3324 if (!init_target_chars ())
3325 return false;
3326
3327 /* If the format doesn't contain % args or %%, use strcpy. */
3328 if (strchr (fmt_str, target_percent) == NULL)
3329 {
3330 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3331 if (!fn)
3332 return false;
3333
3334 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3335 if (orig)
3336 return false;
3337
3338 /* We could expand this as
3339 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3340 or to
3341 memcpy (str, fmt_with_nul_at_cstm1, cst);
3342 but in the former case that might increase code size
3343 and in the latter case grow .rodata section too much.
3344 So punt for now. */
3345 size_t len = strlen (fmt_str);
3346 if (len >= destlen)
3347 return false;
3348
3349 gimple_seq stmts = NULL;
3350 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3351 gimple_seq_add_stmt_without_update (&stmts, repl);
3352 if (tree lhs = gimple_call_lhs (stmt))
3353 {
3354 repl = gimple_build_assign (lhs,
3355 build_int_cst (TREE_TYPE (lhs), len));
3356 gimple_seq_add_stmt_without_update (&stmts, repl);
3357 gsi_replace_with_seq_vops (gsi, stmts);
3358 /* gsi now points at the assignment to the lhs, get a
3359 stmt iterator to the memcpy call.
3360 ??? We can't use gsi_for_stmt as that doesn't work when the
3361 CFG isn't built yet. */
3362 gimple_stmt_iterator gsi2 = *gsi;
3363 gsi_prev (&gsi2);
3364 fold_stmt (&gsi2);
3365 }
3366 else
3367 {
3368 gsi_replace_with_seq_vops (gsi, stmts);
3369 fold_stmt (gsi);
3370 }
3371 return true;
3372 }
3373
3374 /* If the format is "%s", use strcpy if the result isn't used. */
3375 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3376 {
3377 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3378 if (!fn)
3379 return false;
3380
3381 /* Don't crash on snprintf (str1, cst, "%s"). */
3382 if (!orig)
3383 return false;
3384
3385 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3386 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3387 return false;
3388
3389 /* We could expand this as
3390 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3391 or to
3392 memcpy (str1, str2_with_nul_at_cstm1, cst);
3393 but in the former case that might increase code size
3394 and in the latter case grow .rodata section too much.
3395 So punt for now. */
3396 if (compare_tree_int (orig_len, destlen) >= 0)
3397 return false;
3398
3399 /* Convert snprintf (str1, cst, "%s", str2) into
3400 strcpy (str1, str2) if strlen (str2) < cst. */
3401 gimple_seq stmts = NULL;
3402 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3403 gimple_seq_add_stmt_without_update (&stmts, repl);
3404 if (tree lhs = gimple_call_lhs (stmt))
3405 {
3406 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3407 TREE_TYPE (orig_len)))
3408 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3409 repl = gimple_build_assign (lhs, orig_len);
3410 gimple_seq_add_stmt_without_update (&stmts, repl);
3411 gsi_replace_with_seq_vops (gsi, stmts);
3412 /* gsi now points at the assignment to the lhs, get a
3413 stmt iterator to the memcpy call.
3414 ??? We can't use gsi_for_stmt as that doesn't work when the
3415 CFG isn't built yet. */
3416 gimple_stmt_iterator gsi2 = *gsi;
3417 gsi_prev (&gsi2);
3418 fold_stmt (&gsi2);
3419 }
3420 else
3421 {
3422 gsi_replace_with_seq_vops (gsi, stmts);
3423 fold_stmt (gsi);
3424 }
3425 return true;
3426 }
3427 return false;
3428 }
3429
3430 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3431 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3432 more than 3 arguments, and ARG may be null in the 2-argument case.
3433
3434 Return NULL_TREE if no simplification was possible, otherwise return the
3435 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3436 code of the function to be simplified. */
3437
3438 static bool
3439 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3440 tree fp, tree fmt, tree arg,
3441 enum built_in_function fcode)
3442 {
3443 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3444 tree fn_fputc, fn_fputs;
3445 const char *fmt_str = NULL;
3446
3447 /* If the return value is used, don't do the transformation. */
3448 if (gimple_call_lhs (stmt) != NULL_TREE)
3449 return false;
3450
3451 /* Check whether the format is a literal string constant. */
3452 fmt_str = c_getstr (fmt);
3453 if (fmt_str == NULL)
3454 return false;
3455
3456 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3457 {
3458 /* If we're using an unlocked function, assume the other
3459 unlocked functions exist explicitly. */
3460 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3461 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3462 }
3463 else
3464 {
3465 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3466 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3467 }
3468
3469 if (!init_target_chars ())
3470 return false;
3471
3472 /* If the format doesn't contain % args or %%, use strcpy. */
3473 if (strchr (fmt_str, target_percent) == NULL)
3474 {
3475 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3476 && arg)
3477 return false;
3478
3479 /* If the format specifier was "", fprintf does nothing. */
3480 if (fmt_str[0] == '\0')
3481 {
3482 replace_call_with_value (gsi, NULL_TREE);
3483 return true;
3484 }
3485
3486 /* When "string" doesn't contain %, replace all cases of
3487 fprintf (fp, string) with fputs (string, fp). The fputs
3488 builtin will take care of special cases like length == 1. */
3489 if (fn_fputs)
3490 {
3491 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3492 replace_call_with_call_and_fold (gsi, repl);
3493 return true;
3494 }
3495 }
3496
3497 /* The other optimizations can be done only on the non-va_list variants. */
3498 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3499 return false;
3500
3501 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3502 else if (strcmp (fmt_str, target_percent_s) == 0)
3503 {
3504 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3505 return false;
3506 if (fn_fputs)
3507 {
3508 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3509 replace_call_with_call_and_fold (gsi, repl);
3510 return true;
3511 }
3512 }
3513
3514 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3515 else if (strcmp (fmt_str, target_percent_c) == 0)
3516 {
3517 if (!arg
3518 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3519 return false;
3520 if (fn_fputc)
3521 {
3522 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3523 replace_call_with_call_and_fold (gsi, repl);
3524 return true;
3525 }
3526 }
3527
3528 return false;
3529 }
3530
3531 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3532 FMT and ARG are the arguments to the call; we don't fold cases with
3533 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3534
3535 Return NULL_TREE if no simplification was possible, otherwise return the
3536 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3537 code of the function to be simplified. */
3538
3539 static bool
3540 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3541 tree arg, enum built_in_function fcode)
3542 {
3543 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3544 tree fn_putchar, fn_puts, newarg;
3545 const char *fmt_str = NULL;
3546
3547 /* If the return value is used, don't do the transformation. */
3548 if (gimple_call_lhs (stmt) != NULL_TREE)
3549 return false;
3550
3551 /* Check whether the format is a literal string constant. */
3552 fmt_str = c_getstr (fmt);
3553 if (fmt_str == NULL)
3554 return false;
3555
3556 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3557 {
3558 /* If we're using an unlocked function, assume the other
3559 unlocked functions exist explicitly. */
3560 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3561 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3562 }
3563 else
3564 {
3565 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3566 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3567 }
3568
3569 if (!init_target_chars ())
3570 return false;
3571
3572 if (strcmp (fmt_str, target_percent_s) == 0
3573 || strchr (fmt_str, target_percent) == NULL)
3574 {
3575 const char *str;
3576
3577 if (strcmp (fmt_str, target_percent_s) == 0)
3578 {
3579 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3580 return false;
3581
3582 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3583 return false;
3584
3585 str = c_getstr (arg);
3586 if (str == NULL)
3587 return false;
3588 }
3589 else
3590 {
3591 /* The format specifier doesn't contain any '%' characters. */
3592 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3593 && arg)
3594 return false;
3595 str = fmt_str;
3596 }
3597
3598 /* If the string was "", printf does nothing. */
3599 if (str[0] == '\0')
3600 {
3601 replace_call_with_value (gsi, NULL_TREE);
3602 return true;
3603 }
3604
3605 /* If the string has length of 1, call putchar. */
3606 if (str[1] == '\0')
3607 {
3608 /* Given printf("c"), (where c is any one character,)
3609 convert "c"[0] to an int and pass that to the replacement
3610 function. */
3611 newarg = build_int_cst (integer_type_node, str[0]);
3612 if (fn_putchar)
3613 {
3614 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3615 replace_call_with_call_and_fold (gsi, repl);
3616 return true;
3617 }
3618 }
3619 else
3620 {
3621 /* If the string was "string\n", call puts("string"). */
3622 size_t len = strlen (str);
3623 if ((unsigned char)str[len - 1] == target_newline
3624 && (size_t) (int) len == len
3625 && (int) len > 0)
3626 {
3627 char *newstr;
3628
3629 /* Create a NUL-terminated string that's one char shorter
3630 than the original, stripping off the trailing '\n'. */
3631 newstr = xstrdup (str);
3632 newstr[len - 1] = '\0';
3633 newarg = build_string_literal (len, newstr);
3634 free (newstr);
3635 if (fn_puts)
3636 {
3637 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3638 replace_call_with_call_and_fold (gsi, repl);
3639 return true;
3640 }
3641 }
3642 else
3643 /* We'd like to arrange to call fputs(string,stdout) here,
3644 but we need stdout and don't have a way to get it yet. */
3645 return false;
3646 }
3647 }
3648
3649 /* The other optimizations can be done only on the non-va_list variants. */
3650 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3651 return false;
3652
3653 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3654 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3655 {
3656 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3657 return false;
3658 if (fn_puts)
3659 {
3660 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3661 replace_call_with_call_and_fold (gsi, repl);
3662 return true;
3663 }
3664 }
3665
3666 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3667 else if (strcmp (fmt_str, target_percent_c) == 0)
3668 {
3669 if (!arg || ! useless_type_conversion_p (integer_type_node,
3670 TREE_TYPE (arg)))
3671 return false;
3672 if (fn_putchar)
3673 {
3674 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3675 replace_call_with_call_and_fold (gsi, repl);
3676 return true;
3677 }
3678 }
3679
3680 return false;
3681 }
3682
3683
3684
3685 /* Fold a call to __builtin_strlen with known length LEN. */
3686
3687 static bool
3688 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3689 {
3690 gimple *stmt = gsi_stmt (*gsi);
3691 tree arg = gimple_call_arg (stmt, 0);
3692
3693 wide_int minlen;
3694 wide_int maxlen;
3695
3696 c_strlen_data lendata = { };
3697 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3698 && !lendata.decl
3699 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3700 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3701 {
3702 /* The range of lengths refers to either a single constant
3703 string or to the longest and shortest constant string
3704 referenced by the argument of the strlen() call, or to
3705 the strings that can possibly be stored in the arrays
3706 the argument refers to. */
3707 minlen = wi::to_wide (lendata.minlen);
3708 maxlen = wi::to_wide (lendata.maxlen);
3709 }
3710 else
3711 {
3712 unsigned prec = TYPE_PRECISION (sizetype);
3713
3714 minlen = wi::shwi (0, prec);
3715 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3716 }
3717
3718 if (minlen == maxlen)
3719 {
3720 /* Fold the strlen call to a constant. */
3721 tree type = TREE_TYPE (lendata.minlen);
3722 tree len = force_gimple_operand_gsi (gsi,
3723 wide_int_to_tree (type, minlen),
3724 true, NULL, true, GSI_SAME_STMT);
3725 replace_call_with_value (gsi, len);
3726 return true;
3727 }
3728
3729 /* Set the strlen() range to [0, MAXLEN]. */
3730 if (tree lhs = gimple_call_lhs (stmt))
3731 set_strlen_range (lhs, minlen, maxlen);
3732
3733 return false;
3734 }
3735
3736 /* Fold a call to __builtin_acc_on_device. */
3737
3738 static bool
3739 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3740 {
3741 /* Defer folding until we know which compiler we're in. */
3742 if (symtab->state != EXPANSION)
3743 return false;
3744
3745 unsigned val_host = GOMP_DEVICE_HOST;
3746 unsigned val_dev = GOMP_DEVICE_NONE;
3747
3748 #ifdef ACCEL_COMPILER
3749 val_host = GOMP_DEVICE_NOT_HOST;
3750 val_dev = ACCEL_COMPILER_acc_device;
3751 #endif
3752
3753 location_t loc = gimple_location (gsi_stmt (*gsi));
3754
3755 tree host_eq = make_ssa_name (boolean_type_node);
3756 gimple *host_ass = gimple_build_assign
3757 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3758 gimple_set_location (host_ass, loc);
3759 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3760
3761 tree dev_eq = make_ssa_name (boolean_type_node);
3762 gimple *dev_ass = gimple_build_assign
3763 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3764 gimple_set_location (dev_ass, loc);
3765 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3766
3767 tree result = make_ssa_name (boolean_type_node);
3768 gimple *result_ass = gimple_build_assign
3769 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3770 gimple_set_location (result_ass, loc);
3771 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3772
3773 replace_call_with_value (gsi, result);
3774
3775 return true;
3776 }
3777
3778 /* Fold realloc (0, n) -> malloc (n). */
3779
3780 static bool
3781 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3782 {
3783 gimple *stmt = gsi_stmt (*gsi);
3784 tree arg = gimple_call_arg (stmt, 0);
3785 tree size = gimple_call_arg (stmt, 1);
3786
3787 if (operand_equal_p (arg, null_pointer_node, 0))
3788 {
3789 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3790 if (fn_malloc)
3791 {
3792 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3793 replace_call_with_call_and_fold (gsi, repl);
3794 return true;
3795 }
3796 }
3797 return false;
3798 }
3799
3800 /* Fold the non-target builtin at *GSI and return whether any simplification
3801 was made. */
3802
3803 static bool
3804 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3805 {
3806 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3807 tree callee = gimple_call_fndecl (stmt);
3808
3809 /* Give up for always_inline inline builtins until they are
3810 inlined. */
3811 if (avoid_folding_inline_builtin (callee))
3812 return false;
3813
3814 unsigned n = gimple_call_num_args (stmt);
3815 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3816 switch (fcode)
3817 {
3818 case BUILT_IN_BCMP:
3819 return gimple_fold_builtin_bcmp (gsi);
3820 case BUILT_IN_BCOPY:
3821 return gimple_fold_builtin_bcopy (gsi);
3822 case BUILT_IN_BZERO:
3823 return gimple_fold_builtin_bzero (gsi);
3824
3825 case BUILT_IN_MEMSET:
3826 return gimple_fold_builtin_memset (gsi,
3827 gimple_call_arg (stmt, 1),
3828 gimple_call_arg (stmt, 2));
3829 case BUILT_IN_MEMCPY:
3830 case BUILT_IN_MEMPCPY:
3831 case BUILT_IN_MEMMOVE:
3832 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3833 gimple_call_arg (stmt, 1), fcode);
3834 case BUILT_IN_SPRINTF_CHK:
3835 case BUILT_IN_VSPRINTF_CHK:
3836 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3837 case BUILT_IN_STRCAT_CHK:
3838 return gimple_fold_builtin_strcat_chk (gsi);
3839 case BUILT_IN_STRNCAT_CHK:
3840 return gimple_fold_builtin_strncat_chk (gsi);
3841 case BUILT_IN_STRLEN:
3842 return gimple_fold_builtin_strlen (gsi);
3843 case BUILT_IN_STRCPY:
3844 return gimple_fold_builtin_strcpy (gsi,
3845 gimple_call_arg (stmt, 0),
3846 gimple_call_arg (stmt, 1));
3847 case BUILT_IN_STRNCPY:
3848 return gimple_fold_builtin_strncpy (gsi,
3849 gimple_call_arg (stmt, 0),
3850 gimple_call_arg (stmt, 1),
3851 gimple_call_arg (stmt, 2));
3852 case BUILT_IN_STRCAT:
3853 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3854 gimple_call_arg (stmt, 1));
3855 case BUILT_IN_STRNCAT:
3856 return gimple_fold_builtin_strncat (gsi);
3857 case BUILT_IN_INDEX:
3858 case BUILT_IN_STRCHR:
3859 return gimple_fold_builtin_strchr (gsi, false);
3860 case BUILT_IN_RINDEX:
3861 case BUILT_IN_STRRCHR:
3862 return gimple_fold_builtin_strchr (gsi, true);
3863 case BUILT_IN_STRSTR:
3864 return gimple_fold_builtin_strstr (gsi);
3865 case BUILT_IN_STRCMP:
3866 case BUILT_IN_STRCMP_EQ:
3867 case BUILT_IN_STRCASECMP:
3868 case BUILT_IN_STRNCMP:
3869 case BUILT_IN_STRNCMP_EQ:
3870 case BUILT_IN_STRNCASECMP:
3871 return gimple_fold_builtin_string_compare (gsi);
3872 case BUILT_IN_MEMCHR:
3873 return gimple_fold_builtin_memchr (gsi);
3874 case BUILT_IN_FPUTS:
3875 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3876 gimple_call_arg (stmt, 1), false);
3877 case BUILT_IN_FPUTS_UNLOCKED:
3878 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3879 gimple_call_arg (stmt, 1), true);
3880 case BUILT_IN_MEMCPY_CHK:
3881 case BUILT_IN_MEMPCPY_CHK:
3882 case BUILT_IN_MEMMOVE_CHK:
3883 case BUILT_IN_MEMSET_CHK:
3884 return gimple_fold_builtin_memory_chk (gsi,
3885 gimple_call_arg (stmt, 0),
3886 gimple_call_arg (stmt, 1),
3887 gimple_call_arg (stmt, 2),
3888 gimple_call_arg (stmt, 3),
3889 fcode);
3890 case BUILT_IN_STPCPY:
3891 return gimple_fold_builtin_stpcpy (gsi);
3892 case BUILT_IN_STRCPY_CHK:
3893 case BUILT_IN_STPCPY_CHK:
3894 return gimple_fold_builtin_stxcpy_chk (gsi,
3895 gimple_call_arg (stmt, 0),
3896 gimple_call_arg (stmt, 1),
3897 gimple_call_arg (stmt, 2),
3898 fcode);
3899 case BUILT_IN_STRNCPY_CHK:
3900 case BUILT_IN_STPNCPY_CHK:
3901 return gimple_fold_builtin_stxncpy_chk (gsi,
3902 gimple_call_arg (stmt, 0),
3903 gimple_call_arg (stmt, 1),
3904 gimple_call_arg (stmt, 2),
3905 gimple_call_arg (stmt, 3),
3906 fcode);
3907 case BUILT_IN_SNPRINTF_CHK:
3908 case BUILT_IN_VSNPRINTF_CHK:
3909 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3910
3911 case BUILT_IN_FPRINTF:
3912 case BUILT_IN_FPRINTF_UNLOCKED:
3913 case BUILT_IN_VFPRINTF:
3914 if (n == 2 || n == 3)
3915 return gimple_fold_builtin_fprintf (gsi,
3916 gimple_call_arg (stmt, 0),
3917 gimple_call_arg (stmt, 1),
3918 n == 3
3919 ? gimple_call_arg (stmt, 2)
3920 : NULL_TREE,
3921 fcode);
3922 break;
3923 case BUILT_IN_FPRINTF_CHK:
3924 case BUILT_IN_VFPRINTF_CHK:
3925 if (n == 3 || n == 4)
3926 return gimple_fold_builtin_fprintf (gsi,
3927 gimple_call_arg (stmt, 0),
3928 gimple_call_arg (stmt, 2),
3929 n == 4
3930 ? gimple_call_arg (stmt, 3)
3931 : NULL_TREE,
3932 fcode);
3933 break;
3934 case BUILT_IN_PRINTF:
3935 case BUILT_IN_PRINTF_UNLOCKED:
3936 case BUILT_IN_VPRINTF:
3937 if (n == 1 || n == 2)
3938 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3939 n == 2
3940 ? gimple_call_arg (stmt, 1)
3941 : NULL_TREE, fcode);
3942 break;
3943 case BUILT_IN_PRINTF_CHK:
3944 case BUILT_IN_VPRINTF_CHK:
3945 if (n == 2 || n == 3)
3946 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3947 n == 3
3948 ? gimple_call_arg (stmt, 2)
3949 : NULL_TREE, fcode);
3950 break;
3951 case BUILT_IN_ACC_ON_DEVICE:
3952 return gimple_fold_builtin_acc_on_device (gsi,
3953 gimple_call_arg (stmt, 0));
3954 case BUILT_IN_REALLOC:
3955 return gimple_fold_builtin_realloc (gsi);
3956
3957 default:;
3958 }
3959
3960 /* Try the generic builtin folder. */
3961 bool ignore = (gimple_call_lhs (stmt) == NULL);
3962 tree result = fold_call_stmt (stmt, ignore);
3963 if (result)
3964 {
3965 if (ignore)
3966 STRIP_NOPS (result);
3967 else
3968 result = fold_convert (gimple_call_return_type (stmt), result);
3969 if (!update_call_from_tree (gsi, result))
3970 gimplify_and_update_call_from_tree (gsi, result);
3971 return true;
3972 }
3973
3974 return false;
3975 }
3976
3977 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3978 function calls to constants, where possible. */
3979
3980 static tree
3981 fold_internal_goacc_dim (const gimple *call)
3982 {
3983 int axis = oacc_get_ifn_dim_arg (call);
3984 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3985 tree result = NULL_TREE;
3986 tree type = TREE_TYPE (gimple_call_lhs (call));
3987
3988 switch (gimple_call_internal_fn (call))
3989 {
3990 case IFN_GOACC_DIM_POS:
3991 /* If the size is 1, we know the answer. */
3992 if (size == 1)
3993 result = build_int_cst (type, 0);
3994 break;
3995 case IFN_GOACC_DIM_SIZE:
3996 /* If the size is not dynamic, we know the answer. */
3997 if (size)
3998 result = build_int_cst (type, size);
3999 break;
4000 default:
4001 break;
4002 }
4003
4004 return result;
4005 }
4006
4007 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4008 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4009 &var where var is only addressable because of such calls. */
4010
4011 bool
4012 optimize_atomic_compare_exchange_p (gimple *stmt)
4013 {
4014 if (gimple_call_num_args (stmt) != 6
4015 || !flag_inline_atomics
4016 || !optimize
4017 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4018 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4019 || !gimple_vdef (stmt)
4020 || !gimple_vuse (stmt))
4021 return false;
4022
4023 tree fndecl = gimple_call_fndecl (stmt);
4024 switch (DECL_FUNCTION_CODE (fndecl))
4025 {
4026 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4027 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4028 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4029 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4030 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4031 break;
4032 default:
4033 return false;
4034 }
4035
4036 tree expected = gimple_call_arg (stmt, 1);
4037 if (TREE_CODE (expected) != ADDR_EXPR
4038 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4039 return false;
4040
4041 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4042 if (!is_gimple_reg_type (etype)
4043 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4044 || TREE_THIS_VOLATILE (etype)
4045 || VECTOR_TYPE_P (etype)
4046 || TREE_CODE (etype) == COMPLEX_TYPE
4047 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4048 might not preserve all the bits. See PR71716. */
4049 || SCALAR_FLOAT_TYPE_P (etype)
4050 || maybe_ne (TYPE_PRECISION (etype),
4051 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4052 return false;
4053
4054 tree weak = gimple_call_arg (stmt, 3);
4055 if (!integer_zerop (weak) && !integer_onep (weak))
4056 return false;
4057
4058 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4059 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4060 machine_mode mode = TYPE_MODE (itype);
4061
4062 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4063 == CODE_FOR_nothing
4064 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4065 return false;
4066
4067 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4068 return false;
4069
4070 return true;
4071 }
4072
4073 /* Fold
4074 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4075 into
4076 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4077 i = IMAGPART_EXPR <t>;
4078 r = (_Bool) i;
4079 e = REALPART_EXPR <t>; */
4080
4081 void
4082 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4083 {
4084 gimple *stmt = gsi_stmt (*gsi);
4085 tree fndecl = gimple_call_fndecl (stmt);
4086 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4087 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4088 tree ctype = build_complex_type (itype);
4089 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4090 bool throws = false;
4091 edge e = NULL;
4092 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4093 expected);
4094 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4095 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4096 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4097 {
4098 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4099 build1 (VIEW_CONVERT_EXPR, itype,
4100 gimple_assign_lhs (g)));
4101 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4102 }
4103 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4104 + int_size_in_bytes (itype);
4105 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4106 gimple_call_arg (stmt, 0),
4107 gimple_assign_lhs (g),
4108 gimple_call_arg (stmt, 2),
4109 build_int_cst (integer_type_node, flag),
4110 gimple_call_arg (stmt, 4),
4111 gimple_call_arg (stmt, 5));
4112 tree lhs = make_ssa_name (ctype);
4113 gimple_call_set_lhs (g, lhs);
4114 gimple_move_vops (g, stmt);
4115 tree oldlhs = gimple_call_lhs (stmt);
4116 if (stmt_can_throw_internal (cfun, stmt))
4117 {
4118 throws = true;
4119 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4120 }
4121 gimple_call_set_nothrow (as_a <gcall *> (g),
4122 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4123 gimple_call_set_lhs (stmt, NULL_TREE);
4124 gsi_replace (gsi, g, true);
4125 if (oldlhs)
4126 {
4127 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4128 build1 (IMAGPART_EXPR, itype, lhs));
4129 if (throws)
4130 {
4131 gsi_insert_on_edge_immediate (e, g);
4132 *gsi = gsi_for_stmt (g);
4133 }
4134 else
4135 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4136 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4137 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4138 }
4139 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4140 build1 (REALPART_EXPR, itype, lhs));
4141 if (throws && oldlhs == NULL_TREE)
4142 {
4143 gsi_insert_on_edge_immediate (e, g);
4144 *gsi = gsi_for_stmt (g);
4145 }
4146 else
4147 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4148 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4149 {
4150 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4151 VIEW_CONVERT_EXPR,
4152 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4153 gimple_assign_lhs (g)));
4154 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4155 }
4156 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4157 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4158 *gsi = gsiret;
4159 }
4160
4161 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4162 doesn't fit into TYPE. The test for overflow should be regardless of
4163 -fwrapv, and even for unsigned types. */
4164
4165 bool
4166 arith_overflowed_p (enum tree_code code, const_tree type,
4167 const_tree arg0, const_tree arg1)
4168 {
4169 widest2_int warg0 = widest2_int_cst (arg0);
4170 widest2_int warg1 = widest2_int_cst (arg1);
4171 widest2_int wres;
4172 switch (code)
4173 {
4174 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4175 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4176 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4177 default: gcc_unreachable ();
4178 }
4179 signop sign = TYPE_SIGN (type);
4180 if (sign == UNSIGNED && wi::neg_p (wres))
4181 return true;
4182 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4183 }
4184
4185 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4186 for the memory it references, otherwise return null. VECTYPE is the
4187 type of the memory vector. */
4188
4189 static tree
4190 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4191 {
4192 tree ptr = gimple_call_arg (call, 0);
4193 tree alias_align = gimple_call_arg (call, 1);
4194 tree mask = gimple_call_arg (call, 2);
4195 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4196 return NULL_TREE;
4197
4198 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4199 if (TYPE_ALIGN (vectype) != align)
4200 vectype = build_aligned_type (vectype, align);
4201 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4202 return fold_build2 (MEM_REF, vectype, ptr, offset);
4203 }
4204
4205 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4206
4207 static bool
4208 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4209 {
4210 tree lhs = gimple_call_lhs (call);
4211 if (!lhs)
4212 return false;
4213
4214 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4215 {
4216 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4217 gimple_set_location (new_stmt, gimple_location (call));
4218 gimple_move_vops (new_stmt, call);
4219 gsi_replace (gsi, new_stmt, false);
4220 return true;
4221 }
4222 return false;
4223 }
4224
4225 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4226
4227 static bool
4228 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4229 {
4230 tree rhs = gimple_call_arg (call, 3);
4231 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4232 {
4233 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4234 gimple_set_location (new_stmt, gimple_location (call));
4235 gimple_move_vops (new_stmt, call);
4236 gsi_replace (gsi, new_stmt, false);
4237 return true;
4238 }
4239 return false;
4240 }
4241
4242 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4243 The statement may be replaced by another statement, e.g., if the call
4244 simplifies to a constant value. Return true if any changes were made.
4245 It is assumed that the operands have been previously folded. */
4246
4247 static bool
4248 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4249 {
4250 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4251 tree callee;
4252 bool changed = false;
4253 unsigned i;
4254
4255 /* Fold *& in call arguments. */
4256 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4257 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4258 {
4259 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4260 if (tmp)
4261 {
4262 gimple_call_set_arg (stmt, i, tmp);
4263 changed = true;
4264 }
4265 }
4266
4267 /* Check for virtual calls that became direct calls. */
4268 callee = gimple_call_fn (stmt);
4269 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4270 {
4271 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4272 {
4273 if (dump_file && virtual_method_call_p (callee)
4274 && !possible_polymorphic_call_target_p
4275 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4276 (OBJ_TYPE_REF_EXPR (callee)))))
4277 {
4278 fprintf (dump_file,
4279 "Type inheritance inconsistent devirtualization of ");
4280 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4281 fprintf (dump_file, " to ");
4282 print_generic_expr (dump_file, callee, TDF_SLIM);
4283 fprintf (dump_file, "\n");
4284 }
4285
4286 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4287 changed = true;
4288 }
4289 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4290 {
4291 bool final;
4292 vec <cgraph_node *>targets
4293 = possible_polymorphic_call_targets (callee, stmt, &final);
4294 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4295 {
4296 tree lhs = gimple_call_lhs (stmt);
4297 if (dump_enabled_p ())
4298 {
4299 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4300 "folding virtual function call to %s\n",
4301 targets.length () == 1
4302 ? targets[0]->name ()
4303 : "__builtin_unreachable");
4304 }
4305 if (targets.length () == 1)
4306 {
4307 tree fndecl = targets[0]->decl;
4308 gimple_call_set_fndecl (stmt, fndecl);
4309 changed = true;
4310 /* If changing the call to __cxa_pure_virtual
4311 or similar noreturn function, adjust gimple_call_fntype
4312 too. */
4313 if (gimple_call_noreturn_p (stmt)
4314 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4315 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4316 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4317 == void_type_node))
4318 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4319 /* If the call becomes noreturn, remove the lhs. */
4320 if (lhs
4321 && gimple_call_noreturn_p (stmt)
4322 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4323 || should_remove_lhs_p (lhs)))
4324 {
4325 if (TREE_CODE (lhs) == SSA_NAME)
4326 {
4327 tree var = create_tmp_var (TREE_TYPE (lhs));
4328 tree def = get_or_create_ssa_default_def (cfun, var);
4329 gimple *new_stmt = gimple_build_assign (lhs, def);
4330 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4331 }
4332 gimple_call_set_lhs (stmt, NULL_TREE);
4333 }
4334 maybe_remove_unused_call_args (cfun, stmt);
4335 }
4336 else
4337 {
4338 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4339 gimple *new_stmt = gimple_build_call (fndecl, 0);
4340 gimple_set_location (new_stmt, gimple_location (stmt));
4341 /* If the call had a SSA name as lhs morph that into
4342 an uninitialized value. */
4343 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4344 {
4345 tree var = create_tmp_var (TREE_TYPE (lhs));
4346 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4347 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4348 set_ssa_default_def (cfun, var, lhs);
4349 }
4350 gimple_move_vops (new_stmt, stmt);
4351 gsi_replace (gsi, new_stmt, false);
4352 return true;
4353 }
4354 }
4355 }
4356 }
4357
4358 /* Check for indirect calls that became direct calls, and then
4359 no longer require a static chain. */
4360 if (gimple_call_chain (stmt))
4361 {
4362 tree fn = gimple_call_fndecl (stmt);
4363 if (fn && !DECL_STATIC_CHAIN (fn))
4364 {
4365 gimple_call_set_chain (stmt, NULL);
4366 changed = true;
4367 }
4368 else
4369 {
4370 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4371 if (tmp)
4372 {
4373 gimple_call_set_chain (stmt, tmp);
4374 changed = true;
4375 }
4376 }
4377 }
4378
4379 if (inplace)
4380 return changed;
4381
4382 /* Check for builtins that CCP can handle using information not
4383 available in the generic fold routines. */
4384 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4385 {
4386 if (gimple_fold_builtin (gsi))
4387 changed = true;
4388 }
4389 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4390 {
4391 changed |= targetm.gimple_fold_builtin (gsi);
4392 }
4393 else if (gimple_call_internal_p (stmt))
4394 {
4395 enum tree_code subcode = ERROR_MARK;
4396 tree result = NULL_TREE;
4397 bool cplx_result = false;
4398 tree overflow = NULL_TREE;
4399 switch (gimple_call_internal_fn (stmt))
4400 {
4401 case IFN_BUILTIN_EXPECT:
4402 result = fold_builtin_expect (gimple_location (stmt),
4403 gimple_call_arg (stmt, 0),
4404 gimple_call_arg (stmt, 1),
4405 gimple_call_arg (stmt, 2),
4406 NULL_TREE);
4407 break;
4408 case IFN_UBSAN_OBJECT_SIZE:
4409 {
4410 tree offset = gimple_call_arg (stmt, 1);
4411 tree objsize = gimple_call_arg (stmt, 2);
4412 if (integer_all_onesp (objsize)
4413 || (TREE_CODE (offset) == INTEGER_CST
4414 && TREE_CODE (objsize) == INTEGER_CST
4415 && tree_int_cst_le (offset, objsize)))
4416 {
4417 replace_call_with_value (gsi, NULL_TREE);
4418 return true;
4419 }
4420 }
4421 break;
4422 case IFN_UBSAN_PTR:
4423 if (integer_zerop (gimple_call_arg (stmt, 1)))
4424 {
4425 replace_call_with_value (gsi, NULL_TREE);
4426 return true;
4427 }
4428 break;
4429 case IFN_UBSAN_BOUNDS:
4430 {
4431 tree index = gimple_call_arg (stmt, 1);
4432 tree bound = gimple_call_arg (stmt, 2);
4433 if (TREE_CODE (index) == INTEGER_CST
4434 && TREE_CODE (bound) == INTEGER_CST)
4435 {
4436 index = fold_convert (TREE_TYPE (bound), index);
4437 if (TREE_CODE (index) == INTEGER_CST
4438 && tree_int_cst_le (index, bound))
4439 {
4440 replace_call_with_value (gsi, NULL_TREE);
4441 return true;
4442 }
4443 }
4444 }
4445 break;
4446 case IFN_GOACC_DIM_SIZE:
4447 case IFN_GOACC_DIM_POS:
4448 result = fold_internal_goacc_dim (stmt);
4449 break;
4450 case IFN_UBSAN_CHECK_ADD:
4451 subcode = PLUS_EXPR;
4452 break;
4453 case IFN_UBSAN_CHECK_SUB:
4454 subcode = MINUS_EXPR;
4455 break;
4456 case IFN_UBSAN_CHECK_MUL:
4457 subcode = MULT_EXPR;
4458 break;
4459 case IFN_ADD_OVERFLOW:
4460 subcode = PLUS_EXPR;
4461 cplx_result = true;
4462 break;
4463 case IFN_SUB_OVERFLOW:
4464 subcode = MINUS_EXPR;
4465 cplx_result = true;
4466 break;
4467 case IFN_MUL_OVERFLOW:
4468 subcode = MULT_EXPR;
4469 cplx_result = true;
4470 break;
4471 case IFN_MASK_LOAD:
4472 changed |= gimple_fold_mask_load (gsi, stmt);
4473 break;
4474 case IFN_MASK_STORE:
4475 changed |= gimple_fold_mask_store (gsi, stmt);
4476 break;
4477 default:
4478 break;
4479 }
4480 if (subcode != ERROR_MARK)
4481 {
4482 tree arg0 = gimple_call_arg (stmt, 0);
4483 tree arg1 = gimple_call_arg (stmt, 1);
4484 tree type = TREE_TYPE (arg0);
4485 if (cplx_result)
4486 {
4487 tree lhs = gimple_call_lhs (stmt);
4488 if (lhs == NULL_TREE)
4489 type = NULL_TREE;
4490 else
4491 type = TREE_TYPE (TREE_TYPE (lhs));
4492 }
4493 if (type == NULL_TREE)
4494 ;
4495 /* x = y + 0; x = y - 0; x = y * 0; */
4496 else if (integer_zerop (arg1))
4497 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4498 /* x = 0 + y; x = 0 * y; */
4499 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4500 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4501 /* x = y - y; */
4502 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4503 result = integer_zero_node;
4504 /* x = y * 1; x = 1 * y; */
4505 else if (subcode == MULT_EXPR && integer_onep (arg1))
4506 result = arg0;
4507 else if (subcode == MULT_EXPR && integer_onep (arg0))
4508 result = arg1;
4509 else if (TREE_CODE (arg0) == INTEGER_CST
4510 && TREE_CODE (arg1) == INTEGER_CST)
4511 {
4512 if (cplx_result)
4513 result = int_const_binop (subcode, fold_convert (type, arg0),
4514 fold_convert (type, arg1));
4515 else
4516 result = int_const_binop (subcode, arg0, arg1);
4517 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4518 {
4519 if (cplx_result)
4520 overflow = build_one_cst (type);
4521 else
4522 result = NULL_TREE;
4523 }
4524 }
4525 if (result)
4526 {
4527 if (result == integer_zero_node)
4528 result = build_zero_cst (type);
4529 else if (cplx_result && TREE_TYPE (result) != type)
4530 {
4531 if (TREE_CODE (result) == INTEGER_CST)
4532 {
4533 if (arith_overflowed_p (PLUS_EXPR, type, result,
4534 integer_zero_node))
4535 overflow = build_one_cst (type);
4536 }
4537 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4538 && TYPE_UNSIGNED (type))
4539 || (TYPE_PRECISION (type)
4540 < (TYPE_PRECISION (TREE_TYPE (result))
4541 + (TYPE_UNSIGNED (TREE_TYPE (result))
4542 && !TYPE_UNSIGNED (type)))))
4543 result = NULL_TREE;
4544 if (result)
4545 result = fold_convert (type, result);
4546 }
4547 }
4548 }
4549
4550 if (result)
4551 {
4552 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4553 result = drop_tree_overflow (result);
4554 if (cplx_result)
4555 {
4556 if (overflow == NULL_TREE)
4557 overflow = build_zero_cst (TREE_TYPE (result));
4558 tree ctype = build_complex_type (TREE_TYPE (result));
4559 if (TREE_CODE (result) == INTEGER_CST
4560 && TREE_CODE (overflow) == INTEGER_CST)
4561 result = build_complex (ctype, result, overflow);
4562 else
4563 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4564 ctype, result, overflow);
4565 }
4566 if (!update_call_from_tree (gsi, result))
4567 gimplify_and_update_call_from_tree (gsi, result);
4568 changed = true;
4569 }
4570 }
4571
4572 return changed;
4573 }
4574
4575
4576 /* Return true whether NAME has a use on STMT. */
4577
4578 static bool
4579 has_use_on_stmt (tree name, gimple *stmt)
4580 {
4581 imm_use_iterator iter;
4582 use_operand_p use_p;
4583 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4584 if (USE_STMT (use_p) == stmt)
4585 return true;
4586 return false;
4587 }
4588
4589 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4590 gimple_simplify.
4591
4592 Replaces *GSI with the simplification result in RCODE and OPS
4593 and the associated statements in *SEQ. Does the replacement
4594 according to INPLACE and returns true if the operation succeeded. */
4595
4596 static bool
4597 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4598 gimple_match_op *res_op,
4599 gimple_seq *seq, bool inplace)
4600 {
4601 gimple *stmt = gsi_stmt (*gsi);
4602 tree *ops = res_op->ops;
4603 unsigned int num_ops = res_op->num_ops;
4604
4605 /* Play safe and do not allow abnormals to be mentioned in
4606 newly created statements. See also maybe_push_res_to_seq.
4607 As an exception allow such uses if there was a use of the
4608 same SSA name on the old stmt. */
4609 for (unsigned int i = 0; i < num_ops; ++i)
4610 if (TREE_CODE (ops[i]) == SSA_NAME
4611 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4612 && !has_use_on_stmt (ops[i], stmt))
4613 return false;
4614
4615 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4616 for (unsigned int i = 0; i < 2; ++i)
4617 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4618 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4619 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4620 return false;
4621
4622 /* Don't insert new statements when INPLACE is true, even if we could
4623 reuse STMT for the final statement. */
4624 if (inplace && !gimple_seq_empty_p (*seq))
4625 return false;
4626
4627 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4628 {
4629 gcc_assert (res_op->code.is_tree_code ());
4630 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4631 /* GIMPLE_CONDs condition may not throw. */
4632 && (!flag_exceptions
4633 || !cfun->can_throw_non_call_exceptions
4634 || !operation_could_trap_p (res_op->code,
4635 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4636 false, NULL_TREE)))
4637 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4638 else if (res_op->code == SSA_NAME)
4639 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4640 build_zero_cst (TREE_TYPE (ops[0])));
4641 else if (res_op->code == INTEGER_CST)
4642 {
4643 if (integer_zerop (ops[0]))
4644 gimple_cond_make_false (cond_stmt);
4645 else
4646 gimple_cond_make_true (cond_stmt);
4647 }
4648 else if (!inplace)
4649 {
4650 tree res = maybe_push_res_to_seq (res_op, seq);
4651 if (!res)
4652 return false;
4653 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4654 build_zero_cst (TREE_TYPE (res)));
4655 }
4656 else
4657 return false;
4658 if (dump_file && (dump_flags & TDF_DETAILS))
4659 {
4660 fprintf (dump_file, "gimple_simplified to ");
4661 if (!gimple_seq_empty_p (*seq))
4662 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4663 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4664 0, TDF_SLIM);
4665 }
4666 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4667 return true;
4668 }
4669 else if (is_gimple_assign (stmt)
4670 && res_op->code.is_tree_code ())
4671 {
4672 if (!inplace
4673 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4674 {
4675 maybe_build_generic_op (res_op);
4676 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4677 res_op->op_or_null (0),
4678 res_op->op_or_null (1),
4679 res_op->op_or_null (2));
4680 if (dump_file && (dump_flags & TDF_DETAILS))
4681 {
4682 fprintf (dump_file, "gimple_simplified to ");
4683 if (!gimple_seq_empty_p (*seq))
4684 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4685 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4686 0, TDF_SLIM);
4687 }
4688 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4689 return true;
4690 }
4691 }
4692 else if (res_op->code.is_fn_code ()
4693 && gimple_call_combined_fn (stmt) == res_op->code)
4694 {
4695 gcc_assert (num_ops == gimple_call_num_args (stmt));
4696 for (unsigned int i = 0; i < num_ops; ++i)
4697 gimple_call_set_arg (stmt, i, ops[i]);
4698 if (dump_file && (dump_flags & TDF_DETAILS))
4699 {
4700 fprintf (dump_file, "gimple_simplified to ");
4701 if (!gimple_seq_empty_p (*seq))
4702 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4703 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4704 }
4705 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4706 return true;
4707 }
4708 else if (!inplace)
4709 {
4710 if (gimple_has_lhs (stmt))
4711 {
4712 tree lhs = gimple_get_lhs (stmt);
4713 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4714 return false;
4715 if (dump_file && (dump_flags & TDF_DETAILS))
4716 {
4717 fprintf (dump_file, "gimple_simplified to ");
4718 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4719 }
4720 gsi_replace_with_seq_vops (gsi, *seq);
4721 return true;
4722 }
4723 else
4724 gcc_unreachable ();
4725 }
4726
4727 return false;
4728 }
4729
4730 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4731
4732 static bool
4733 maybe_canonicalize_mem_ref_addr (tree *t)
4734 {
4735 bool res = false;
4736
4737 if (TREE_CODE (*t) == ADDR_EXPR)
4738 t = &TREE_OPERAND (*t, 0);
4739
4740 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4741 generic vector extension. The actual vector referenced is
4742 view-converted to an array type for this purpose. If the index
4743 is constant the canonical representation in the middle-end is a
4744 BIT_FIELD_REF so re-write the former to the latter here. */
4745 if (TREE_CODE (*t) == ARRAY_REF
4746 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4747 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4748 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4749 {
4750 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4751 if (VECTOR_TYPE_P (vtype))
4752 {
4753 tree low = array_ref_low_bound (*t);
4754 if (TREE_CODE (low) == INTEGER_CST)
4755 {
4756 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4757 {
4758 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4759 wi::to_widest (low));
4760 idx = wi::mul (idx, wi::to_widest
4761 (TYPE_SIZE (TREE_TYPE (*t))));
4762 widest_int ext
4763 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4764 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4765 {
4766 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4767 TREE_TYPE (*t),
4768 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4769 TYPE_SIZE (TREE_TYPE (*t)),
4770 wide_int_to_tree (bitsizetype, idx));
4771 res = true;
4772 }
4773 }
4774 }
4775 }
4776 }
4777
4778 while (handled_component_p (*t))
4779 t = &TREE_OPERAND (*t, 0);
4780
4781 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4782 of invariant addresses into a SSA name MEM_REF address. */
4783 if (TREE_CODE (*t) == MEM_REF
4784 || TREE_CODE (*t) == TARGET_MEM_REF)
4785 {
4786 tree addr = TREE_OPERAND (*t, 0);
4787 if (TREE_CODE (addr) == ADDR_EXPR
4788 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4789 || handled_component_p (TREE_OPERAND (addr, 0))))
4790 {
4791 tree base;
4792 poly_int64 coffset;
4793 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4794 &coffset);
4795 if (!base)
4796 gcc_unreachable ();
4797
4798 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4799 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4800 TREE_OPERAND (*t, 1),
4801 size_int (coffset));
4802 res = true;
4803 }
4804 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4805 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4806 }
4807
4808 /* Canonicalize back MEM_REFs to plain reference trees if the object
4809 accessed is a decl that has the same access semantics as the MEM_REF. */
4810 if (TREE_CODE (*t) == MEM_REF
4811 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4812 && integer_zerop (TREE_OPERAND (*t, 1))
4813 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4814 {
4815 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4816 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4817 if (/* Same volatile qualification. */
4818 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4819 /* Same TBAA behavior with -fstrict-aliasing. */
4820 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4821 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4822 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4823 /* Same alignment. */
4824 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4825 /* We have to look out here to not drop a required conversion
4826 from the rhs to the lhs if *t appears on the lhs or vice-versa
4827 if it appears on the rhs. Thus require strict type
4828 compatibility. */
4829 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4830 {
4831 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4832 res = true;
4833 }
4834 }
4835
4836 /* Canonicalize TARGET_MEM_REF in particular with respect to
4837 the indexes becoming constant. */
4838 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4839 {
4840 tree tem = maybe_fold_tmr (*t);
4841 if (tem)
4842 {
4843 *t = tem;
4844 res = true;
4845 }
4846 }
4847
4848 return res;
4849 }
4850
4851 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4852 distinguishes both cases. */
4853
4854 static bool
4855 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4856 {
4857 bool changed = false;
4858 gimple *stmt = gsi_stmt (*gsi);
4859 bool nowarning = gimple_no_warning_p (stmt);
4860 unsigned i;
4861 fold_defer_overflow_warnings ();
4862
4863 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4864 after propagation.
4865 ??? This shouldn't be done in generic folding but in the
4866 propagation helpers which also know whether an address was
4867 propagated.
4868 Also canonicalize operand order. */
4869 switch (gimple_code (stmt))
4870 {
4871 case GIMPLE_ASSIGN:
4872 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4873 {
4874 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4875 if ((REFERENCE_CLASS_P (*rhs)
4876 || TREE_CODE (*rhs) == ADDR_EXPR)
4877 && maybe_canonicalize_mem_ref_addr (rhs))
4878 changed = true;
4879 tree *lhs = gimple_assign_lhs_ptr (stmt);
4880 if (REFERENCE_CLASS_P (*lhs)
4881 && maybe_canonicalize_mem_ref_addr (lhs))
4882 changed = true;
4883 }
4884 else
4885 {
4886 /* Canonicalize operand order. */
4887 enum tree_code code = gimple_assign_rhs_code (stmt);
4888 if (TREE_CODE_CLASS (code) == tcc_comparison
4889 || commutative_tree_code (code)
4890 || commutative_ternary_tree_code (code))
4891 {
4892 tree rhs1 = gimple_assign_rhs1 (stmt);
4893 tree rhs2 = gimple_assign_rhs2 (stmt);
4894 if (tree_swap_operands_p (rhs1, rhs2))
4895 {
4896 gimple_assign_set_rhs1 (stmt, rhs2);
4897 gimple_assign_set_rhs2 (stmt, rhs1);
4898 if (TREE_CODE_CLASS (code) == tcc_comparison)
4899 gimple_assign_set_rhs_code (stmt,
4900 swap_tree_comparison (code));
4901 changed = true;
4902 }
4903 }
4904 }
4905 break;
4906 case GIMPLE_CALL:
4907 {
4908 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4909 {
4910 tree *arg = gimple_call_arg_ptr (stmt, i);
4911 if (REFERENCE_CLASS_P (*arg)
4912 && maybe_canonicalize_mem_ref_addr (arg))
4913 changed = true;
4914 }
4915 tree *lhs = gimple_call_lhs_ptr (stmt);
4916 if (*lhs
4917 && REFERENCE_CLASS_P (*lhs)
4918 && maybe_canonicalize_mem_ref_addr (lhs))
4919 changed = true;
4920 break;
4921 }
4922 case GIMPLE_ASM:
4923 {
4924 gasm *asm_stmt = as_a <gasm *> (stmt);
4925 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4926 {
4927 tree link = gimple_asm_output_op (asm_stmt, i);
4928 tree op = TREE_VALUE (link);
4929 if (REFERENCE_CLASS_P (op)
4930 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4931 changed = true;
4932 }
4933 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4934 {
4935 tree link = gimple_asm_input_op (asm_stmt, i);
4936 tree op = TREE_VALUE (link);
4937 if ((REFERENCE_CLASS_P (op)
4938 || TREE_CODE (op) == ADDR_EXPR)
4939 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4940 changed = true;
4941 }
4942 }
4943 break;
4944 case GIMPLE_DEBUG:
4945 if (gimple_debug_bind_p (stmt))
4946 {
4947 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4948 if (*val
4949 && (REFERENCE_CLASS_P (*val)
4950 || TREE_CODE (*val) == ADDR_EXPR)
4951 && maybe_canonicalize_mem_ref_addr (val))
4952 changed = true;
4953 }
4954 break;
4955 case GIMPLE_COND:
4956 {
4957 /* Canonicalize operand order. */
4958 tree lhs = gimple_cond_lhs (stmt);
4959 tree rhs = gimple_cond_rhs (stmt);
4960 if (tree_swap_operands_p (lhs, rhs))
4961 {
4962 gcond *gc = as_a <gcond *> (stmt);
4963 gimple_cond_set_lhs (gc, rhs);
4964 gimple_cond_set_rhs (gc, lhs);
4965 gimple_cond_set_code (gc,
4966 swap_tree_comparison (gimple_cond_code (gc)));
4967 changed = true;
4968 }
4969 }
4970 default:;
4971 }
4972
4973 /* Dispatch to pattern-based folding. */
4974 if (!inplace
4975 || is_gimple_assign (stmt)
4976 || gimple_code (stmt) == GIMPLE_COND)
4977 {
4978 gimple_seq seq = NULL;
4979 gimple_match_op res_op;
4980 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4981 valueize, valueize))
4982 {
4983 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4984 changed = true;
4985 else
4986 gimple_seq_discard (seq);
4987 }
4988 }
4989
4990 stmt = gsi_stmt (*gsi);
4991
4992 /* Fold the main computation performed by the statement. */
4993 switch (gimple_code (stmt))
4994 {
4995 case GIMPLE_ASSIGN:
4996 {
4997 /* Try to canonicalize for boolean-typed X the comparisons
4998 X == 0, X == 1, X != 0, and X != 1. */
4999 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5000 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5001 {
5002 tree lhs = gimple_assign_lhs (stmt);
5003 tree op1 = gimple_assign_rhs1 (stmt);
5004 tree op2 = gimple_assign_rhs2 (stmt);
5005 tree type = TREE_TYPE (op1);
5006
5007 /* Check whether the comparison operands are of the same boolean
5008 type as the result type is.
5009 Check that second operand is an integer-constant with value
5010 one or zero. */
5011 if (TREE_CODE (op2) == INTEGER_CST
5012 && (integer_zerop (op2) || integer_onep (op2))
5013 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5014 {
5015 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5016 bool is_logical_not = false;
5017
5018 /* X == 0 and X != 1 is a logical-not.of X
5019 X == 1 and X != 0 is X */
5020 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5021 || (cmp_code == NE_EXPR && integer_onep (op2)))
5022 is_logical_not = true;
5023
5024 if (is_logical_not == false)
5025 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5026 /* Only for one-bit precision typed X the transformation
5027 !X -> ~X is valied. */
5028 else if (TYPE_PRECISION (type) == 1)
5029 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5030 /* Otherwise we use !X -> X ^ 1. */
5031 else
5032 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5033 build_int_cst (type, 1));
5034 changed = true;
5035 break;
5036 }
5037 }
5038
5039 unsigned old_num_ops = gimple_num_ops (stmt);
5040 tree lhs = gimple_assign_lhs (stmt);
5041 tree new_rhs = fold_gimple_assign (gsi);
5042 if (new_rhs
5043 && !useless_type_conversion_p (TREE_TYPE (lhs),
5044 TREE_TYPE (new_rhs)))
5045 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5046 if (new_rhs
5047 && (!inplace
5048 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5049 {
5050 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5051 changed = true;
5052 }
5053 break;
5054 }
5055
5056 case GIMPLE_CALL:
5057 changed |= gimple_fold_call (gsi, inplace);
5058 break;
5059
5060 case GIMPLE_ASM:
5061 /* Fold *& in asm operands. */
5062 {
5063 gasm *asm_stmt = as_a <gasm *> (stmt);
5064 size_t noutputs;
5065 const char **oconstraints;
5066 const char *constraint;
5067 bool allows_mem, allows_reg;
5068
5069 noutputs = gimple_asm_noutputs (asm_stmt);
5070 oconstraints = XALLOCAVEC (const char *, noutputs);
5071
5072 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5073 {
5074 tree link = gimple_asm_output_op (asm_stmt, i);
5075 tree op = TREE_VALUE (link);
5076 oconstraints[i]
5077 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5078 if (REFERENCE_CLASS_P (op)
5079 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5080 {
5081 TREE_VALUE (link) = op;
5082 changed = true;
5083 }
5084 }
5085 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5086 {
5087 tree link = gimple_asm_input_op (asm_stmt, i);
5088 tree op = TREE_VALUE (link);
5089 constraint
5090 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5091 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5092 oconstraints, &allows_mem, &allows_reg);
5093 if (REFERENCE_CLASS_P (op)
5094 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5095 != NULL_TREE)
5096 {
5097 TREE_VALUE (link) = op;
5098 changed = true;
5099 }
5100 }
5101 }
5102 break;
5103
5104 case GIMPLE_DEBUG:
5105 if (gimple_debug_bind_p (stmt))
5106 {
5107 tree val = gimple_debug_bind_get_value (stmt);
5108 if (val
5109 && REFERENCE_CLASS_P (val))
5110 {
5111 tree tem = maybe_fold_reference (val, false);
5112 if (tem)
5113 {
5114 gimple_debug_bind_set_value (stmt, tem);
5115 changed = true;
5116 }
5117 }
5118 else if (val
5119 && TREE_CODE (val) == ADDR_EXPR)
5120 {
5121 tree ref = TREE_OPERAND (val, 0);
5122 tree tem = maybe_fold_reference (ref, false);
5123 if (tem)
5124 {
5125 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5126 gimple_debug_bind_set_value (stmt, tem);
5127 changed = true;
5128 }
5129 }
5130 }
5131 break;
5132
5133 case GIMPLE_RETURN:
5134 {
5135 greturn *ret_stmt = as_a<greturn *> (stmt);
5136 tree ret = gimple_return_retval(ret_stmt);
5137
5138 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5139 {
5140 tree val = valueize (ret);
5141 if (val && val != ret
5142 && may_propagate_copy (ret, val))
5143 {
5144 gimple_return_set_retval (ret_stmt, val);
5145 changed = true;
5146 }
5147 }
5148 }
5149 break;
5150
5151 default:;
5152 }
5153
5154 stmt = gsi_stmt (*gsi);
5155
5156 /* Fold *& on the lhs. */
5157 if (gimple_has_lhs (stmt))
5158 {
5159 tree lhs = gimple_get_lhs (stmt);
5160 if (lhs && REFERENCE_CLASS_P (lhs))
5161 {
5162 tree new_lhs = maybe_fold_reference (lhs, true);
5163 if (new_lhs)
5164 {
5165 gimple_set_lhs (stmt, new_lhs);
5166 changed = true;
5167 }
5168 }
5169 }
5170
5171 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5172 return changed;
5173 }
5174
5175 /* Valueziation callback that ends up not following SSA edges. */
5176
5177 tree
5178 no_follow_ssa_edges (tree)
5179 {
5180 return NULL_TREE;
5181 }
5182
5183 /* Valueization callback that ends up following single-use SSA edges only. */
5184
5185 tree
5186 follow_single_use_edges (tree val)
5187 {
5188 if (TREE_CODE (val) == SSA_NAME
5189 && !has_single_use (val))
5190 return NULL_TREE;
5191 return val;
5192 }
5193
5194 /* Valueization callback that follows all SSA edges. */
5195
5196 tree
5197 follow_all_ssa_edges (tree val)
5198 {
5199 return val;
5200 }
5201
5202 /* Fold the statement pointed to by GSI. In some cases, this function may
5203 replace the whole statement with a new one. Returns true iff folding
5204 makes any changes.
5205 The statement pointed to by GSI should be in valid gimple form but may
5206 be in unfolded state as resulting from for example constant propagation
5207 which can produce *&x = 0. */
5208
5209 bool
5210 fold_stmt (gimple_stmt_iterator *gsi)
5211 {
5212 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5213 }
5214
5215 bool
5216 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5217 {
5218 return fold_stmt_1 (gsi, false, valueize);
5219 }
5220
5221 /* Perform the minimal folding on statement *GSI. Only operations like
5222 *&x created by constant propagation are handled. The statement cannot
5223 be replaced with a new one. Return true if the statement was
5224 changed, false otherwise.
5225 The statement *GSI should be in valid gimple form but may
5226 be in unfolded state as resulting from for example constant propagation
5227 which can produce *&x = 0. */
5228
5229 bool
5230 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5231 {
5232 gimple *stmt = gsi_stmt (*gsi);
5233 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5234 gcc_assert (gsi_stmt (*gsi) == stmt);
5235 return changed;
5236 }
5237
5238 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5239 if EXPR is null or we don't know how.
5240 If non-null, the result always has boolean type. */
5241
5242 static tree
5243 canonicalize_bool (tree expr, bool invert)
5244 {
5245 if (!expr)
5246 return NULL_TREE;
5247 else if (invert)
5248 {
5249 if (integer_nonzerop (expr))
5250 return boolean_false_node;
5251 else if (integer_zerop (expr))
5252 return boolean_true_node;
5253 else if (TREE_CODE (expr) == SSA_NAME)
5254 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5255 build_int_cst (TREE_TYPE (expr), 0));
5256 else if (COMPARISON_CLASS_P (expr))
5257 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5258 boolean_type_node,
5259 TREE_OPERAND (expr, 0),
5260 TREE_OPERAND (expr, 1));
5261 else
5262 return NULL_TREE;
5263 }
5264 else
5265 {
5266 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5267 return expr;
5268 if (integer_nonzerop (expr))
5269 return boolean_true_node;
5270 else if (integer_zerop (expr))
5271 return boolean_false_node;
5272 else if (TREE_CODE (expr) == SSA_NAME)
5273 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5274 build_int_cst (TREE_TYPE (expr), 0));
5275 else if (COMPARISON_CLASS_P (expr))
5276 return fold_build2 (TREE_CODE (expr),
5277 boolean_type_node,
5278 TREE_OPERAND (expr, 0),
5279 TREE_OPERAND (expr, 1));
5280 else
5281 return NULL_TREE;
5282 }
5283 }
5284
5285 /* Check to see if a boolean expression EXPR is logically equivalent to the
5286 comparison (OP1 CODE OP2). Check for various identities involving
5287 SSA_NAMEs. */
5288
5289 static bool
5290 same_bool_comparison_p (const_tree expr, enum tree_code code,
5291 const_tree op1, const_tree op2)
5292 {
5293 gimple *s;
5294
5295 /* The obvious case. */
5296 if (TREE_CODE (expr) == code
5297 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5298 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5299 return true;
5300
5301 /* Check for comparing (name, name != 0) and the case where expr
5302 is an SSA_NAME with a definition matching the comparison. */
5303 if (TREE_CODE (expr) == SSA_NAME
5304 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5305 {
5306 if (operand_equal_p (expr, op1, 0))
5307 return ((code == NE_EXPR && integer_zerop (op2))
5308 || (code == EQ_EXPR && integer_nonzerop (op2)));
5309 s = SSA_NAME_DEF_STMT (expr);
5310 if (is_gimple_assign (s)
5311 && gimple_assign_rhs_code (s) == code
5312 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5313 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5314 return true;
5315 }
5316
5317 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5318 of name is a comparison, recurse. */
5319 if (TREE_CODE (op1) == SSA_NAME
5320 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5321 {
5322 s = SSA_NAME_DEF_STMT (op1);
5323 if (is_gimple_assign (s)
5324 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5325 {
5326 enum tree_code c = gimple_assign_rhs_code (s);
5327 if ((c == NE_EXPR && integer_zerop (op2))
5328 || (c == EQ_EXPR && integer_nonzerop (op2)))
5329 return same_bool_comparison_p (expr, c,
5330 gimple_assign_rhs1 (s),
5331 gimple_assign_rhs2 (s));
5332 if ((c == EQ_EXPR && integer_zerop (op2))
5333 || (c == NE_EXPR && integer_nonzerop (op2)))
5334 return same_bool_comparison_p (expr,
5335 invert_tree_comparison (c, false),
5336 gimple_assign_rhs1 (s),
5337 gimple_assign_rhs2 (s));
5338 }
5339 }
5340 return false;
5341 }
5342
5343 /* Check to see if two boolean expressions OP1 and OP2 are logically
5344 equivalent. */
5345
5346 static bool
5347 same_bool_result_p (const_tree op1, const_tree op2)
5348 {
5349 /* Simple cases first. */
5350 if (operand_equal_p (op1, op2, 0))
5351 return true;
5352
5353 /* Check the cases where at least one of the operands is a comparison.
5354 These are a bit smarter than operand_equal_p in that they apply some
5355 identifies on SSA_NAMEs. */
5356 if (COMPARISON_CLASS_P (op2)
5357 && same_bool_comparison_p (op1, TREE_CODE (op2),
5358 TREE_OPERAND (op2, 0),
5359 TREE_OPERAND (op2, 1)))
5360 return true;
5361 if (COMPARISON_CLASS_P (op1)
5362 && same_bool_comparison_p (op2, TREE_CODE (op1),
5363 TREE_OPERAND (op1, 0),
5364 TREE_OPERAND (op1, 1)))
5365 return true;
5366
5367 /* Default case. */
5368 return false;
5369 }
5370
5371 /* Forward declarations for some mutually recursive functions. */
5372
5373 static tree
5374 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5375 enum tree_code code2, tree op2a, tree op2b);
5376 static tree
5377 and_var_with_comparison (tree type, tree var, bool invert,
5378 enum tree_code code2, tree op2a, tree op2b);
5379 static tree
5380 and_var_with_comparison_1 (tree type, gimple *stmt,
5381 enum tree_code code2, tree op2a, tree op2b);
5382 static tree
5383 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
5384 enum tree_code code2, tree op2a, tree op2b);
5385 static tree
5386 or_var_with_comparison (tree, tree var, bool invert,
5387 enum tree_code code2, tree op2a, tree op2b);
5388 static tree
5389 or_var_with_comparison_1 (tree, gimple *stmt,
5390 enum tree_code code2, tree op2a, tree op2b);
5391
5392 /* Helper function for and_comparisons_1: try to simplify the AND of the
5393 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5394 If INVERT is true, invert the value of the VAR before doing the AND.
5395 Return NULL_EXPR if we can't simplify this to a single expression. */
5396
5397 static tree
5398 and_var_with_comparison (tree type, tree var, bool invert,
5399 enum tree_code code2, tree op2a, tree op2b)
5400 {
5401 tree t;
5402 gimple *stmt = SSA_NAME_DEF_STMT (var);
5403
5404 /* We can only deal with variables whose definitions are assignments. */
5405 if (!is_gimple_assign (stmt))
5406 return NULL_TREE;
5407
5408 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5409 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5410 Then we only have to consider the simpler non-inverted cases. */
5411 if (invert)
5412 t = or_var_with_comparison_1 (type, stmt,
5413 invert_tree_comparison (code2, false),
5414 op2a, op2b);
5415 else
5416 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5417 return canonicalize_bool (t, invert);
5418 }
5419
5420 /* Try to simplify the AND of the ssa variable defined by the assignment
5421 STMT with the comparison specified by (OP2A CODE2 OP2B).
5422 Return NULL_EXPR if we can't simplify this to a single expression. */
5423
5424 static tree
5425 and_var_with_comparison_1 (tree type, gimple *stmt,
5426 enum tree_code code2, tree op2a, tree op2b)
5427 {
5428 tree var = gimple_assign_lhs (stmt);
5429 tree true_test_var = NULL_TREE;
5430 tree false_test_var = NULL_TREE;
5431 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5432
5433 /* Check for identities like (var AND (var == 0)) => false. */
5434 if (TREE_CODE (op2a) == SSA_NAME
5435 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5436 {
5437 if ((code2 == NE_EXPR && integer_zerop (op2b))
5438 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5439 {
5440 true_test_var = op2a;
5441 if (var == true_test_var)
5442 return var;
5443 }
5444 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5445 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5446 {
5447 false_test_var = op2a;
5448 if (var == false_test_var)
5449 return boolean_false_node;
5450 }
5451 }
5452
5453 /* If the definition is a comparison, recurse on it. */
5454 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5455 {
5456 tree t = and_comparisons_1 (type, innercode,
5457 gimple_assign_rhs1 (stmt),
5458 gimple_assign_rhs2 (stmt),
5459 code2,
5460 op2a,
5461 op2b);
5462 if (t)
5463 return t;
5464 }
5465
5466 /* If the definition is an AND or OR expression, we may be able to
5467 simplify by reassociating. */
5468 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5469 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5470 {
5471 tree inner1 = gimple_assign_rhs1 (stmt);
5472 tree inner2 = gimple_assign_rhs2 (stmt);
5473 gimple *s;
5474 tree t;
5475 tree partial = NULL_TREE;
5476 bool is_and = (innercode == BIT_AND_EXPR);
5477
5478 /* Check for boolean identities that don't require recursive examination
5479 of inner1/inner2:
5480 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5481 inner1 AND (inner1 OR inner2) => inner1
5482 !inner1 AND (inner1 AND inner2) => false
5483 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5484 Likewise for similar cases involving inner2. */
5485 if (inner1 == true_test_var)
5486 return (is_and ? var : inner1);
5487 else if (inner2 == true_test_var)
5488 return (is_and ? var : inner2);
5489 else if (inner1 == false_test_var)
5490 return (is_and
5491 ? boolean_false_node
5492 : and_var_with_comparison (type, inner2, false, code2, op2a,
5493 op2b));
5494 else if (inner2 == false_test_var)
5495 return (is_and
5496 ? boolean_false_node
5497 : and_var_with_comparison (type, inner1, false, code2, op2a,
5498 op2b));
5499
5500 /* Next, redistribute/reassociate the AND across the inner tests.
5501 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5502 if (TREE_CODE (inner1) == SSA_NAME
5503 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5504 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5505 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5506 gimple_assign_rhs1 (s),
5507 gimple_assign_rhs2 (s),
5508 code2, op2a, op2b)))
5509 {
5510 /* Handle the AND case, where we are reassociating:
5511 (inner1 AND inner2) AND (op2a code2 op2b)
5512 => (t AND inner2)
5513 If the partial result t is a constant, we win. Otherwise
5514 continue on to try reassociating with the other inner test. */
5515 if (is_and)
5516 {
5517 if (integer_onep (t))
5518 return inner2;
5519 else if (integer_zerop (t))
5520 return boolean_false_node;
5521 }
5522
5523 /* Handle the OR case, where we are redistributing:
5524 (inner1 OR inner2) AND (op2a code2 op2b)
5525 => (t OR (inner2 AND (op2a code2 op2b))) */
5526 else if (integer_onep (t))
5527 return boolean_true_node;
5528
5529 /* Save partial result for later. */
5530 partial = t;
5531 }
5532
5533 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5534 if (TREE_CODE (inner2) == SSA_NAME
5535 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5536 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5537 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5538 gimple_assign_rhs1 (s),
5539 gimple_assign_rhs2 (s),
5540 code2, op2a, op2b)))
5541 {
5542 /* Handle the AND case, where we are reassociating:
5543 (inner1 AND inner2) AND (op2a code2 op2b)
5544 => (inner1 AND t) */
5545 if (is_and)
5546 {
5547 if (integer_onep (t))
5548 return inner1;
5549 else if (integer_zerop (t))
5550 return boolean_false_node;
5551 /* If both are the same, we can apply the identity
5552 (x AND x) == x. */
5553 else if (partial && same_bool_result_p (t, partial))
5554 return t;
5555 }
5556
5557 /* Handle the OR case. where we are redistributing:
5558 (inner1 OR inner2) AND (op2a code2 op2b)
5559 => (t OR (inner1 AND (op2a code2 op2b)))
5560 => (t OR partial) */
5561 else
5562 {
5563 if (integer_onep (t))
5564 return boolean_true_node;
5565 else if (partial)
5566 {
5567 /* We already got a simplification for the other
5568 operand to the redistributed OR expression. The
5569 interesting case is when at least one is false.
5570 Or, if both are the same, we can apply the identity
5571 (x OR x) == x. */
5572 if (integer_zerop (partial))
5573 return t;
5574 else if (integer_zerop (t))
5575 return partial;
5576 else if (same_bool_result_p (t, partial))
5577 return t;
5578 }
5579 }
5580 }
5581 }
5582 return NULL_TREE;
5583 }
5584
5585 /* Try to simplify the AND of two comparisons defined by
5586 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5587 If this can be done without constructing an intermediate value,
5588 return the resulting tree; otherwise NULL_TREE is returned.
5589 This function is deliberately asymmetric as it recurses on SSA_DEFs
5590 in the first comparison but not the second. */
5591
5592 static tree
5593 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5594 enum tree_code code2, tree op2a, tree op2b)
5595 {
5596 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5597
5598 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5599 if (operand_equal_p (op1a, op2a, 0)
5600 && operand_equal_p (op1b, op2b, 0))
5601 {
5602 /* Result will be either NULL_TREE, or a combined comparison. */
5603 tree t = combine_comparisons (UNKNOWN_LOCATION,
5604 TRUTH_ANDIF_EXPR, code1, code2,
5605 truth_type, op1a, op1b);
5606 if (t)
5607 return t;
5608 }
5609
5610 /* Likewise the swapped case of the above. */
5611 if (operand_equal_p (op1a, op2b, 0)
5612 && operand_equal_p (op1b, op2a, 0))
5613 {
5614 /* Result will be either NULL_TREE, or a combined comparison. */
5615 tree t = combine_comparisons (UNKNOWN_LOCATION,
5616 TRUTH_ANDIF_EXPR, code1,
5617 swap_tree_comparison (code2),
5618 truth_type, op1a, op1b);
5619 if (t)
5620 return t;
5621 }
5622
5623 /* If both comparisons are of the same value against constants, we might
5624 be able to merge them. */
5625 if (operand_equal_p (op1a, op2a, 0)
5626 && TREE_CODE (op1b) == INTEGER_CST
5627 && TREE_CODE (op2b) == INTEGER_CST)
5628 {
5629 int cmp = tree_int_cst_compare (op1b, op2b);
5630
5631 /* If we have (op1a == op1b), we should either be able to
5632 return that or FALSE, depending on whether the constant op1b
5633 also satisfies the other comparison against op2b. */
5634 if (code1 == EQ_EXPR)
5635 {
5636 bool done = true;
5637 bool val;
5638 switch (code2)
5639 {
5640 case EQ_EXPR: val = (cmp == 0); break;
5641 case NE_EXPR: val = (cmp != 0); break;
5642 case LT_EXPR: val = (cmp < 0); break;
5643 case GT_EXPR: val = (cmp > 0); break;
5644 case LE_EXPR: val = (cmp <= 0); break;
5645 case GE_EXPR: val = (cmp >= 0); break;
5646 default: done = false;
5647 }
5648 if (done)
5649 {
5650 if (val)
5651 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5652 else
5653 return boolean_false_node;
5654 }
5655 }
5656 /* Likewise if the second comparison is an == comparison. */
5657 else if (code2 == EQ_EXPR)
5658 {
5659 bool done = true;
5660 bool val;
5661 switch (code1)
5662 {
5663 case EQ_EXPR: val = (cmp == 0); break;
5664 case NE_EXPR: val = (cmp != 0); break;
5665 case LT_EXPR: val = (cmp > 0); break;
5666 case GT_EXPR: val = (cmp < 0); break;
5667 case LE_EXPR: val = (cmp >= 0); break;
5668 case GE_EXPR: val = (cmp <= 0); break;
5669 default: done = false;
5670 }
5671 if (done)
5672 {
5673 if (val)
5674 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5675 else
5676 return boolean_false_node;
5677 }
5678 }
5679
5680 /* Same business with inequality tests. */
5681 else if (code1 == NE_EXPR)
5682 {
5683 bool val;
5684 switch (code2)
5685 {
5686 case EQ_EXPR: val = (cmp != 0); break;
5687 case NE_EXPR: val = (cmp == 0); break;
5688 case LT_EXPR: val = (cmp >= 0); break;
5689 case GT_EXPR: val = (cmp <= 0); break;
5690 case LE_EXPR: val = (cmp > 0); break;
5691 case GE_EXPR: val = (cmp < 0); break;
5692 default:
5693 val = false;
5694 }
5695 if (val)
5696 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5697 }
5698 else if (code2 == NE_EXPR)
5699 {
5700 bool val;
5701 switch (code1)
5702 {
5703 case EQ_EXPR: val = (cmp == 0); break;
5704 case NE_EXPR: val = (cmp != 0); break;
5705 case LT_EXPR: val = (cmp <= 0); break;
5706 case GT_EXPR: val = (cmp >= 0); break;
5707 case LE_EXPR: val = (cmp < 0); break;
5708 case GE_EXPR: val = (cmp > 0); break;
5709 default:
5710 val = false;
5711 }
5712 if (val)
5713 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5714 }
5715
5716 /* Chose the more restrictive of two < or <= comparisons. */
5717 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5718 && (code2 == LT_EXPR || code2 == LE_EXPR))
5719 {
5720 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5721 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5722 else
5723 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5724 }
5725
5726 /* Likewise chose the more restrictive of two > or >= comparisons. */
5727 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5728 && (code2 == GT_EXPR || code2 == GE_EXPR))
5729 {
5730 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5731 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5732 else
5733 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5734 }
5735
5736 /* Check for singleton ranges. */
5737 else if (cmp == 0
5738 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5739 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5740 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5741
5742 /* Check for disjoint ranges. */
5743 else if (cmp <= 0
5744 && (code1 == LT_EXPR || code1 == LE_EXPR)
5745 && (code2 == GT_EXPR || code2 == GE_EXPR))
5746 return boolean_false_node;
5747 else if (cmp >= 0
5748 && (code1 == GT_EXPR || code1 == GE_EXPR)
5749 && (code2 == LT_EXPR || code2 == LE_EXPR))
5750 return boolean_false_node;
5751 }
5752
5753 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5754 NAME's definition is a truth value. See if there are any simplifications
5755 that can be done against the NAME's definition. */
5756 if (TREE_CODE (op1a) == SSA_NAME
5757 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5758 && (integer_zerop (op1b) || integer_onep (op1b)))
5759 {
5760 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5761 || (code1 == NE_EXPR && integer_onep (op1b)));
5762 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5763 switch (gimple_code (stmt))
5764 {
5765 case GIMPLE_ASSIGN:
5766 /* Try to simplify by copy-propagating the definition. */
5767 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5768 op2b);
5769
5770 case GIMPLE_PHI:
5771 /* If every argument to the PHI produces the same result when
5772 ANDed with the second comparison, we win.
5773 Do not do this unless the type is bool since we need a bool
5774 result here anyway. */
5775 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5776 {
5777 tree result = NULL_TREE;
5778 unsigned i;
5779 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5780 {
5781 tree arg = gimple_phi_arg_def (stmt, i);
5782
5783 /* If this PHI has itself as an argument, ignore it.
5784 If all the other args produce the same result,
5785 we're still OK. */
5786 if (arg == gimple_phi_result (stmt))
5787 continue;
5788 else if (TREE_CODE (arg) == INTEGER_CST)
5789 {
5790 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5791 {
5792 if (!result)
5793 result = boolean_false_node;
5794 else if (!integer_zerop (result))
5795 return NULL_TREE;
5796 }
5797 else if (!result)
5798 result = fold_build2 (code2, boolean_type_node,
5799 op2a, op2b);
5800 else if (!same_bool_comparison_p (result,
5801 code2, op2a, op2b))
5802 return NULL_TREE;
5803 }
5804 else if (TREE_CODE (arg) == SSA_NAME
5805 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5806 {
5807 tree temp;
5808 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5809 /* In simple cases we can look through PHI nodes,
5810 but we have to be careful with loops.
5811 See PR49073. */
5812 if (! dom_info_available_p (CDI_DOMINATORS)
5813 || gimple_bb (def_stmt) == gimple_bb (stmt)
5814 || dominated_by_p (CDI_DOMINATORS,
5815 gimple_bb (def_stmt),
5816 gimple_bb (stmt)))
5817 return NULL_TREE;
5818 temp = and_var_with_comparison (type, arg, invert, code2,
5819 op2a, op2b);
5820 if (!temp)
5821 return NULL_TREE;
5822 else if (!result)
5823 result = temp;
5824 else if (!same_bool_result_p (result, temp))
5825 return NULL_TREE;
5826 }
5827 else
5828 return NULL_TREE;
5829 }
5830 return result;
5831 }
5832
5833 default:
5834 break;
5835 }
5836 }
5837 return NULL_TREE;
5838 }
5839
5840 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5841 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5842 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5843 simplify this to a single expression. As we are going to lower the cost
5844 of building SSA names / gimple stmts significantly, we need to allocate
5845 them ont the stack. This will cause the code to be a bit ugly. */
5846
5847 static tree
5848 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5849 enum tree_code code1,
5850 tree op1a, tree op1b,
5851 enum tree_code code2, tree op2a,
5852 tree op2b)
5853 {
5854 /* Allocate gimple stmt1 on the stack. */
5855 gassign *stmt1
5856 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5857 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5858 gimple_assign_set_rhs_code (stmt1, code1);
5859 gimple_assign_set_rhs1 (stmt1, op1a);
5860 gimple_assign_set_rhs2 (stmt1, op1b);
5861
5862 /* Allocate gimple stmt2 on the stack. */
5863 gassign *stmt2
5864 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5865 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5866 gimple_assign_set_rhs_code (stmt2, code2);
5867 gimple_assign_set_rhs1 (stmt2, op2a);
5868 gimple_assign_set_rhs2 (stmt2, op2b);
5869
5870 /* Allocate SSA names(lhs1) on the stack. */
5871 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5872 memset (lhs1, 0, sizeof (tree_ssa_name));
5873 TREE_SET_CODE (lhs1, SSA_NAME);
5874 TREE_TYPE (lhs1) = type;
5875 init_ssa_name_imm_use (lhs1);
5876
5877 /* Allocate SSA names(lhs2) on the stack. */
5878 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5879 memset (lhs2, 0, sizeof (tree_ssa_name));
5880 TREE_SET_CODE (lhs2, SSA_NAME);
5881 TREE_TYPE (lhs2) = type;
5882 init_ssa_name_imm_use (lhs2);
5883
5884 gimple_assign_set_lhs (stmt1, lhs1);
5885 gimple_assign_set_lhs (stmt2, lhs2);
5886
5887 gimple_match_op op (gimple_match_cond::UNCOND, code,
5888 type, gimple_assign_lhs (stmt1),
5889 gimple_assign_lhs (stmt2));
5890 if (op.resimplify (NULL, follow_all_ssa_edges))
5891 {
5892 if (gimple_simplified_result_is_gimple_val (&op))
5893 {
5894 tree res = op.ops[0];
5895 if (res == lhs1)
5896 return build2 (code1, type, op1a, op1b);
5897 else if (res == lhs2)
5898 return build2 (code2, type, op2a, op2b);
5899 else
5900 return res;
5901 }
5902 }
5903
5904 return NULL_TREE;
5905 }
5906
5907 /* Try to simplify the AND of two comparisons, specified by
5908 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5909 If this can be simplified to a single expression (without requiring
5910 introducing more SSA variables to hold intermediate values),
5911 return the resulting tree. Otherwise return NULL_TREE.
5912 If the result expression is non-null, it has boolean type. */
5913
5914 tree
5915 maybe_fold_and_comparisons (tree type,
5916 enum tree_code code1, tree op1a, tree op1b,
5917 enum tree_code code2, tree op2a, tree op2b)
5918 {
5919 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
5920 return t;
5921
5922 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5923 return t;
5924
5925 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5926 op1a, op1b, code2, op2a,
5927 op2b))
5928 return t;
5929
5930 return NULL_TREE;
5931 }
5932
5933 /* Helper function for or_comparisons_1: try to simplify the OR of the
5934 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5935 If INVERT is true, invert the value of VAR before doing the OR.
5936 Return NULL_EXPR if we can't simplify this to a single expression. */
5937
5938 static tree
5939 or_var_with_comparison (tree type, tree var, bool invert,
5940 enum tree_code code2, tree op2a, tree op2b)
5941 {
5942 tree t;
5943 gimple *stmt = SSA_NAME_DEF_STMT (var);
5944
5945 /* We can only deal with variables whose definitions are assignments. */
5946 if (!is_gimple_assign (stmt))
5947 return NULL_TREE;
5948
5949 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5950 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5951 Then we only have to consider the simpler non-inverted cases. */
5952 if (invert)
5953 t = and_var_with_comparison_1 (type, stmt,
5954 invert_tree_comparison (code2, false),
5955 op2a, op2b);
5956 else
5957 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5958 return canonicalize_bool (t, invert);
5959 }
5960
5961 /* Try to simplify the OR of the ssa variable defined by the assignment
5962 STMT with the comparison specified by (OP2A CODE2 OP2B).
5963 Return NULL_EXPR if we can't simplify this to a single expression. */
5964
5965 static tree
5966 or_var_with_comparison_1 (tree type, gimple *stmt,
5967 enum tree_code code2, tree op2a, tree op2b)
5968 {
5969 tree var = gimple_assign_lhs (stmt);
5970 tree true_test_var = NULL_TREE;
5971 tree false_test_var = NULL_TREE;
5972 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5973
5974 /* Check for identities like (var OR (var != 0)) => true . */
5975 if (TREE_CODE (op2a) == SSA_NAME
5976 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5977 {
5978 if ((code2 == NE_EXPR && integer_zerop (op2b))
5979 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5980 {
5981 true_test_var = op2a;
5982 if (var == true_test_var)
5983 return var;
5984 }
5985 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5986 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5987 {
5988 false_test_var = op2a;
5989 if (var == false_test_var)
5990 return boolean_true_node;
5991 }
5992 }
5993
5994 /* If the definition is a comparison, recurse on it. */
5995 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5996 {
5997 tree t = or_comparisons_1 (type, innercode,
5998 gimple_assign_rhs1 (stmt),
5999 gimple_assign_rhs2 (stmt),
6000 code2,
6001 op2a,
6002 op2b);
6003 if (t)
6004 return t;
6005 }
6006
6007 /* If the definition is an AND or OR expression, we may be able to
6008 simplify by reassociating. */
6009 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6010 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6011 {
6012 tree inner1 = gimple_assign_rhs1 (stmt);
6013 tree inner2 = gimple_assign_rhs2 (stmt);
6014 gimple *s;
6015 tree t;
6016 tree partial = NULL_TREE;
6017 bool is_or = (innercode == BIT_IOR_EXPR);
6018
6019 /* Check for boolean identities that don't require recursive examination
6020 of inner1/inner2:
6021 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6022 inner1 OR (inner1 AND inner2) => inner1
6023 !inner1 OR (inner1 OR inner2) => true
6024 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6025 */
6026 if (inner1 == true_test_var)
6027 return (is_or ? var : inner1);
6028 else if (inner2 == true_test_var)
6029 return (is_or ? var : inner2);
6030 else if (inner1 == false_test_var)
6031 return (is_or
6032 ? boolean_true_node
6033 : or_var_with_comparison (type, inner2, false, code2, op2a,
6034 op2b));
6035 else if (inner2 == false_test_var)
6036 return (is_or
6037 ? boolean_true_node
6038 : or_var_with_comparison (type, inner1, false, code2, op2a,
6039 op2b));
6040
6041 /* Next, redistribute/reassociate the OR across the inner tests.
6042 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6043 if (TREE_CODE (inner1) == SSA_NAME
6044 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6045 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6046 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6047 gimple_assign_rhs1 (s),
6048 gimple_assign_rhs2 (s),
6049 code2, op2a, op2b)))
6050 {
6051 /* Handle the OR case, where we are reassociating:
6052 (inner1 OR inner2) OR (op2a code2 op2b)
6053 => (t OR inner2)
6054 If the partial result t is a constant, we win. Otherwise
6055 continue on to try reassociating with the other inner test. */
6056 if (is_or)
6057 {
6058 if (integer_onep (t))
6059 return boolean_true_node;
6060 else if (integer_zerop (t))
6061 return inner2;
6062 }
6063
6064 /* Handle the AND case, where we are redistributing:
6065 (inner1 AND inner2) OR (op2a code2 op2b)
6066 => (t AND (inner2 OR (op2a code op2b))) */
6067 else if (integer_zerop (t))
6068 return boolean_false_node;
6069
6070 /* Save partial result for later. */
6071 partial = t;
6072 }
6073
6074 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6075 if (TREE_CODE (inner2) == SSA_NAME
6076 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6077 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6078 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6079 gimple_assign_rhs1 (s),
6080 gimple_assign_rhs2 (s),
6081 code2, op2a, op2b)))
6082 {
6083 /* Handle the OR case, where we are reassociating:
6084 (inner1 OR inner2) OR (op2a code2 op2b)
6085 => (inner1 OR t)
6086 => (t OR partial) */
6087 if (is_or)
6088 {
6089 if (integer_zerop (t))
6090 return inner1;
6091 else if (integer_onep (t))
6092 return boolean_true_node;
6093 /* If both are the same, we can apply the identity
6094 (x OR x) == x. */
6095 else if (partial && same_bool_result_p (t, partial))
6096 return t;
6097 }
6098
6099 /* Handle the AND case, where we are redistributing:
6100 (inner1 AND inner2) OR (op2a code2 op2b)
6101 => (t AND (inner1 OR (op2a code2 op2b)))
6102 => (t AND partial) */
6103 else
6104 {
6105 if (integer_zerop (t))
6106 return boolean_false_node;
6107 else if (partial)
6108 {
6109 /* We already got a simplification for the other
6110 operand to the redistributed AND expression. The
6111 interesting case is when at least one is true.
6112 Or, if both are the same, we can apply the identity
6113 (x AND x) == x. */
6114 if (integer_onep (partial))
6115 return t;
6116 else if (integer_onep (t))
6117 return partial;
6118 else if (same_bool_result_p (t, partial))
6119 return t;
6120 }
6121 }
6122 }
6123 }
6124 return NULL_TREE;
6125 }
6126
6127 /* Try to simplify the OR of two comparisons defined by
6128 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6129 If this can be done without constructing an intermediate value,
6130 return the resulting tree; otherwise NULL_TREE is returned.
6131 This function is deliberately asymmetric as it recurses on SSA_DEFs
6132 in the first comparison but not the second. */
6133
6134 static tree
6135 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6136 enum tree_code code2, tree op2a, tree op2b)
6137 {
6138 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6139
6140 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6141 if (operand_equal_p (op1a, op2a, 0)
6142 && operand_equal_p (op1b, op2b, 0))
6143 {
6144 /* Result will be either NULL_TREE, or a combined comparison. */
6145 tree t = combine_comparisons (UNKNOWN_LOCATION,
6146 TRUTH_ORIF_EXPR, code1, code2,
6147 truth_type, op1a, op1b);
6148 if (t)
6149 return t;
6150 }
6151
6152 /* Likewise the swapped case of the above. */
6153 if (operand_equal_p (op1a, op2b, 0)
6154 && operand_equal_p (op1b, op2a, 0))
6155 {
6156 /* Result will be either NULL_TREE, or a combined comparison. */
6157 tree t = combine_comparisons (UNKNOWN_LOCATION,
6158 TRUTH_ORIF_EXPR, code1,
6159 swap_tree_comparison (code2),
6160 truth_type, op1a, op1b);
6161 if (t)
6162 return t;
6163 }
6164
6165 /* If both comparisons are of the same value against constants, we might
6166 be able to merge them. */
6167 if (operand_equal_p (op1a, op2a, 0)
6168 && TREE_CODE (op1b) == INTEGER_CST
6169 && TREE_CODE (op2b) == INTEGER_CST)
6170 {
6171 int cmp = tree_int_cst_compare (op1b, op2b);
6172
6173 /* If we have (op1a != op1b), we should either be able to
6174 return that or TRUE, depending on whether the constant op1b
6175 also satisfies the other comparison against op2b. */
6176 if (code1 == NE_EXPR)
6177 {
6178 bool done = true;
6179 bool val;
6180 switch (code2)
6181 {
6182 case EQ_EXPR: val = (cmp == 0); break;
6183 case NE_EXPR: val = (cmp != 0); break;
6184 case LT_EXPR: val = (cmp < 0); break;
6185 case GT_EXPR: val = (cmp > 0); break;
6186 case LE_EXPR: val = (cmp <= 0); break;
6187 case GE_EXPR: val = (cmp >= 0); break;
6188 default: done = false;
6189 }
6190 if (done)
6191 {
6192 if (val)
6193 return boolean_true_node;
6194 else
6195 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6196 }
6197 }
6198 /* Likewise if the second comparison is a != comparison. */
6199 else if (code2 == NE_EXPR)
6200 {
6201 bool done = true;
6202 bool val;
6203 switch (code1)
6204 {
6205 case EQ_EXPR: val = (cmp == 0); break;
6206 case NE_EXPR: val = (cmp != 0); break;
6207 case LT_EXPR: val = (cmp > 0); break;
6208 case GT_EXPR: val = (cmp < 0); break;
6209 case LE_EXPR: val = (cmp >= 0); break;
6210 case GE_EXPR: val = (cmp <= 0); break;
6211 default: done = false;
6212 }
6213 if (done)
6214 {
6215 if (val)
6216 return boolean_true_node;
6217 else
6218 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6219 }
6220 }
6221
6222 /* See if an equality test is redundant with the other comparison. */
6223 else if (code1 == EQ_EXPR)
6224 {
6225 bool val;
6226 switch (code2)
6227 {
6228 case EQ_EXPR: val = (cmp == 0); break;
6229 case NE_EXPR: val = (cmp != 0); break;
6230 case LT_EXPR: val = (cmp < 0); break;
6231 case GT_EXPR: val = (cmp > 0); break;
6232 case LE_EXPR: val = (cmp <= 0); break;
6233 case GE_EXPR: val = (cmp >= 0); break;
6234 default:
6235 val = false;
6236 }
6237 if (val)
6238 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6239 }
6240 else if (code2 == EQ_EXPR)
6241 {
6242 bool val;
6243 switch (code1)
6244 {
6245 case EQ_EXPR: val = (cmp == 0); break;
6246 case NE_EXPR: val = (cmp != 0); break;
6247 case LT_EXPR: val = (cmp > 0); break;
6248 case GT_EXPR: val = (cmp < 0); break;
6249 case LE_EXPR: val = (cmp >= 0); break;
6250 case GE_EXPR: val = (cmp <= 0); break;
6251 default:
6252 val = false;
6253 }
6254 if (val)
6255 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6256 }
6257
6258 /* Chose the less restrictive of two < or <= comparisons. */
6259 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6260 && (code2 == LT_EXPR || code2 == LE_EXPR))
6261 {
6262 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6263 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6264 else
6265 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6266 }
6267
6268 /* Likewise chose the less restrictive of two > or >= comparisons. */
6269 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6270 && (code2 == GT_EXPR || code2 == GE_EXPR))
6271 {
6272 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6273 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6274 else
6275 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6276 }
6277
6278 /* Check for singleton ranges. */
6279 else if (cmp == 0
6280 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6281 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6282 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6283
6284 /* Check for less/greater pairs that don't restrict the range at all. */
6285 else if (cmp >= 0
6286 && (code1 == LT_EXPR || code1 == LE_EXPR)
6287 && (code2 == GT_EXPR || code2 == GE_EXPR))
6288 return boolean_true_node;
6289 else if (cmp <= 0
6290 && (code1 == GT_EXPR || code1 == GE_EXPR)
6291 && (code2 == LT_EXPR || code2 == LE_EXPR))
6292 return boolean_true_node;
6293 }
6294
6295 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6296 NAME's definition is a truth value. See if there are any simplifications
6297 that can be done against the NAME's definition. */
6298 if (TREE_CODE (op1a) == SSA_NAME
6299 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6300 && (integer_zerop (op1b) || integer_onep (op1b)))
6301 {
6302 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6303 || (code1 == NE_EXPR && integer_onep (op1b)));
6304 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6305 switch (gimple_code (stmt))
6306 {
6307 case GIMPLE_ASSIGN:
6308 /* Try to simplify by copy-propagating the definition. */
6309 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6310 op2b);
6311
6312 case GIMPLE_PHI:
6313 /* If every argument to the PHI produces the same result when
6314 ORed with the second comparison, we win.
6315 Do not do this unless the type is bool since we need a bool
6316 result here anyway. */
6317 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6318 {
6319 tree result = NULL_TREE;
6320 unsigned i;
6321 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6322 {
6323 tree arg = gimple_phi_arg_def (stmt, i);
6324
6325 /* If this PHI has itself as an argument, ignore it.
6326 If all the other args produce the same result,
6327 we're still OK. */
6328 if (arg == gimple_phi_result (stmt))
6329 continue;
6330 else if (TREE_CODE (arg) == INTEGER_CST)
6331 {
6332 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6333 {
6334 if (!result)
6335 result = boolean_true_node;
6336 else if (!integer_onep (result))
6337 return NULL_TREE;
6338 }
6339 else if (!result)
6340 result = fold_build2 (code2, boolean_type_node,
6341 op2a, op2b);
6342 else if (!same_bool_comparison_p (result,
6343 code2, op2a, op2b))
6344 return NULL_TREE;
6345 }
6346 else if (TREE_CODE (arg) == SSA_NAME
6347 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6348 {
6349 tree temp;
6350 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6351 /* In simple cases we can look through PHI nodes,
6352 but we have to be careful with loops.
6353 See PR49073. */
6354 if (! dom_info_available_p (CDI_DOMINATORS)
6355 || gimple_bb (def_stmt) == gimple_bb (stmt)
6356 || dominated_by_p (CDI_DOMINATORS,
6357 gimple_bb (def_stmt),
6358 gimple_bb (stmt)))
6359 return NULL_TREE;
6360 temp = or_var_with_comparison (type, arg, invert, code2,
6361 op2a, op2b);
6362 if (!temp)
6363 return NULL_TREE;
6364 else if (!result)
6365 result = temp;
6366 else if (!same_bool_result_p (result, temp))
6367 return NULL_TREE;
6368 }
6369 else
6370 return NULL_TREE;
6371 }
6372 return result;
6373 }
6374
6375 default:
6376 break;
6377 }
6378 }
6379 return NULL_TREE;
6380 }
6381
6382 /* Try to simplify the OR of two comparisons, specified by
6383 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6384 If this can be simplified to a single expression (without requiring
6385 introducing more SSA variables to hold intermediate values),
6386 return the resulting tree. Otherwise return NULL_TREE.
6387 If the result expression is non-null, it has boolean type. */
6388
6389 tree
6390 maybe_fold_or_comparisons (tree type,
6391 enum tree_code code1, tree op1a, tree op1b,
6392 enum tree_code code2, tree op2a, tree op2b)
6393 {
6394 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6395 return t;
6396
6397 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6398 return t;
6399
6400 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6401 op1a, op1b, code2, op2a,
6402 op2b))
6403 return t;
6404
6405 return NULL_TREE;
6406 }
6407
6408 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6409
6410 Either NULL_TREE, a simplified but non-constant or a constant
6411 is returned.
6412
6413 ??? This should go into a gimple-fold-inline.h file to be eventually
6414 privatized with the single valueize function used in the various TUs
6415 to avoid the indirect function call overhead. */
6416
6417 tree
6418 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6419 tree (*gvalueize) (tree))
6420 {
6421 gimple_match_op res_op;
6422 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6423 edges if there are intermediate VARYING defs. For this reason
6424 do not follow SSA edges here even though SCCVN can technically
6425 just deal fine with that. */
6426 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6427 {
6428 tree res = NULL_TREE;
6429 if (gimple_simplified_result_is_gimple_val (&res_op))
6430 res = res_op.ops[0];
6431 else if (mprts_hook)
6432 res = mprts_hook (&res_op);
6433 if (res)
6434 {
6435 if (dump_file && dump_flags & TDF_DETAILS)
6436 {
6437 fprintf (dump_file, "Match-and-simplified ");
6438 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6439 fprintf (dump_file, " to ");
6440 print_generic_expr (dump_file, res);
6441 fprintf (dump_file, "\n");
6442 }
6443 return res;
6444 }
6445 }
6446
6447 location_t loc = gimple_location (stmt);
6448 switch (gimple_code (stmt))
6449 {
6450 case GIMPLE_ASSIGN:
6451 {
6452 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6453
6454 switch (get_gimple_rhs_class (subcode))
6455 {
6456 case GIMPLE_SINGLE_RHS:
6457 {
6458 tree rhs = gimple_assign_rhs1 (stmt);
6459 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6460
6461 if (TREE_CODE (rhs) == SSA_NAME)
6462 {
6463 /* If the RHS is an SSA_NAME, return its known constant value,
6464 if any. */
6465 return (*valueize) (rhs);
6466 }
6467 /* Handle propagating invariant addresses into address
6468 operations. */
6469 else if (TREE_CODE (rhs) == ADDR_EXPR
6470 && !is_gimple_min_invariant (rhs))
6471 {
6472 poly_int64 offset = 0;
6473 tree base;
6474 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6475 &offset,
6476 valueize);
6477 if (base
6478 && (CONSTANT_CLASS_P (base)
6479 || decl_address_invariant_p (base)))
6480 return build_invariant_address (TREE_TYPE (rhs),
6481 base, offset);
6482 }
6483 else if (TREE_CODE (rhs) == CONSTRUCTOR
6484 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6485 && known_eq (CONSTRUCTOR_NELTS (rhs),
6486 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6487 {
6488 unsigned i, nelts;
6489 tree val;
6490
6491 nelts = CONSTRUCTOR_NELTS (rhs);
6492 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6493 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6494 {
6495 val = (*valueize) (val);
6496 if (TREE_CODE (val) == INTEGER_CST
6497 || TREE_CODE (val) == REAL_CST
6498 || TREE_CODE (val) == FIXED_CST)
6499 vec.quick_push (val);
6500 else
6501 return NULL_TREE;
6502 }
6503
6504 return vec.build ();
6505 }
6506 if (subcode == OBJ_TYPE_REF)
6507 {
6508 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6509 /* If callee is constant, we can fold away the wrapper. */
6510 if (is_gimple_min_invariant (val))
6511 return val;
6512 }
6513
6514 if (kind == tcc_reference)
6515 {
6516 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6517 || TREE_CODE (rhs) == REALPART_EXPR
6518 || TREE_CODE (rhs) == IMAGPART_EXPR)
6519 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6520 {
6521 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6522 return fold_unary_loc (EXPR_LOCATION (rhs),
6523 TREE_CODE (rhs),
6524 TREE_TYPE (rhs), val);
6525 }
6526 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6527 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6528 {
6529 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6530 return fold_ternary_loc (EXPR_LOCATION (rhs),
6531 TREE_CODE (rhs),
6532 TREE_TYPE (rhs), val,
6533 TREE_OPERAND (rhs, 1),
6534 TREE_OPERAND (rhs, 2));
6535 }
6536 else if (TREE_CODE (rhs) == MEM_REF
6537 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6538 {
6539 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6540 if (TREE_CODE (val) == ADDR_EXPR
6541 && is_gimple_min_invariant (val))
6542 {
6543 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6544 unshare_expr (val),
6545 TREE_OPERAND (rhs, 1));
6546 if (tem)
6547 rhs = tem;
6548 }
6549 }
6550 return fold_const_aggregate_ref_1 (rhs, valueize);
6551 }
6552 else if (kind == tcc_declaration)
6553 return get_symbol_constant_value (rhs);
6554 return rhs;
6555 }
6556
6557 case GIMPLE_UNARY_RHS:
6558 return NULL_TREE;
6559
6560 case GIMPLE_BINARY_RHS:
6561 /* Translate &x + CST into an invariant form suitable for
6562 further propagation. */
6563 if (subcode == POINTER_PLUS_EXPR)
6564 {
6565 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6566 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6567 if (TREE_CODE (op0) == ADDR_EXPR
6568 && TREE_CODE (op1) == INTEGER_CST)
6569 {
6570 tree off = fold_convert (ptr_type_node, op1);
6571 return build_fold_addr_expr_loc
6572 (loc,
6573 fold_build2 (MEM_REF,
6574 TREE_TYPE (TREE_TYPE (op0)),
6575 unshare_expr (op0), off));
6576 }
6577 }
6578 /* Canonicalize bool != 0 and bool == 0 appearing after
6579 valueization. While gimple_simplify handles this
6580 it can get confused by the ~X == 1 -> X == 0 transform
6581 which we cant reduce to a SSA name or a constant
6582 (and we have no way to tell gimple_simplify to not
6583 consider those transforms in the first place). */
6584 else if (subcode == EQ_EXPR
6585 || subcode == NE_EXPR)
6586 {
6587 tree lhs = gimple_assign_lhs (stmt);
6588 tree op0 = gimple_assign_rhs1 (stmt);
6589 if (useless_type_conversion_p (TREE_TYPE (lhs),
6590 TREE_TYPE (op0)))
6591 {
6592 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6593 op0 = (*valueize) (op0);
6594 if (TREE_CODE (op0) == INTEGER_CST)
6595 std::swap (op0, op1);
6596 if (TREE_CODE (op1) == INTEGER_CST
6597 && ((subcode == NE_EXPR && integer_zerop (op1))
6598 || (subcode == EQ_EXPR && integer_onep (op1))))
6599 return op0;
6600 }
6601 }
6602 return NULL_TREE;
6603
6604 case GIMPLE_TERNARY_RHS:
6605 {
6606 /* Handle ternary operators that can appear in GIMPLE form. */
6607 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6608 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6609 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6610 return fold_ternary_loc (loc, subcode,
6611 gimple_expr_type (stmt), op0, op1, op2);
6612 }
6613
6614 default:
6615 gcc_unreachable ();
6616 }
6617 }
6618
6619 case GIMPLE_CALL:
6620 {
6621 tree fn;
6622 gcall *call_stmt = as_a <gcall *> (stmt);
6623
6624 if (gimple_call_internal_p (stmt))
6625 {
6626 enum tree_code subcode = ERROR_MARK;
6627 switch (gimple_call_internal_fn (stmt))
6628 {
6629 case IFN_UBSAN_CHECK_ADD:
6630 subcode = PLUS_EXPR;
6631 break;
6632 case IFN_UBSAN_CHECK_SUB:
6633 subcode = MINUS_EXPR;
6634 break;
6635 case IFN_UBSAN_CHECK_MUL:
6636 subcode = MULT_EXPR;
6637 break;
6638 case IFN_BUILTIN_EXPECT:
6639 {
6640 tree arg0 = gimple_call_arg (stmt, 0);
6641 tree op0 = (*valueize) (arg0);
6642 if (TREE_CODE (op0) == INTEGER_CST)
6643 return op0;
6644 return NULL_TREE;
6645 }
6646 default:
6647 return NULL_TREE;
6648 }
6649 tree arg0 = gimple_call_arg (stmt, 0);
6650 tree arg1 = gimple_call_arg (stmt, 1);
6651 tree op0 = (*valueize) (arg0);
6652 tree op1 = (*valueize) (arg1);
6653
6654 if (TREE_CODE (op0) != INTEGER_CST
6655 || TREE_CODE (op1) != INTEGER_CST)
6656 {
6657 switch (subcode)
6658 {
6659 case MULT_EXPR:
6660 /* x * 0 = 0 * x = 0 without overflow. */
6661 if (integer_zerop (op0) || integer_zerop (op1))
6662 return build_zero_cst (TREE_TYPE (arg0));
6663 break;
6664 case MINUS_EXPR:
6665 /* y - y = 0 without overflow. */
6666 if (operand_equal_p (op0, op1, 0))
6667 return build_zero_cst (TREE_TYPE (arg0));
6668 break;
6669 default:
6670 break;
6671 }
6672 }
6673 tree res
6674 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6675 if (res
6676 && TREE_CODE (res) == INTEGER_CST
6677 && !TREE_OVERFLOW (res))
6678 return res;
6679 return NULL_TREE;
6680 }
6681
6682 fn = (*valueize) (gimple_call_fn (stmt));
6683 if (TREE_CODE (fn) == ADDR_EXPR
6684 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6685 && gimple_builtin_call_types_compatible_p (stmt,
6686 TREE_OPERAND (fn, 0)))
6687 {
6688 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6689 tree retval;
6690 unsigned i;
6691 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6692 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6693 retval = fold_builtin_call_array (loc,
6694 gimple_call_return_type (call_stmt),
6695 fn, gimple_call_num_args (stmt), args);
6696 if (retval)
6697 {
6698 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6699 STRIP_NOPS (retval);
6700 retval = fold_convert (gimple_call_return_type (call_stmt),
6701 retval);
6702 }
6703 return retval;
6704 }
6705 return NULL_TREE;
6706 }
6707
6708 default:
6709 return NULL_TREE;
6710 }
6711 }
6712
6713 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6714 Returns NULL_TREE if folding to a constant is not possible, otherwise
6715 returns a constant according to is_gimple_min_invariant. */
6716
6717 tree
6718 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6719 {
6720 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6721 if (res && is_gimple_min_invariant (res))
6722 return res;
6723 return NULL_TREE;
6724 }
6725
6726
6727 /* The following set of functions are supposed to fold references using
6728 their constant initializers. */
6729
6730 /* See if we can find constructor defining value of BASE.
6731 When we know the consructor with constant offset (such as
6732 base is array[40] and we do know constructor of array), then
6733 BIT_OFFSET is adjusted accordingly.
6734
6735 As a special case, return error_mark_node when constructor
6736 is not explicitly available, but it is known to be zero
6737 such as 'static const int a;'. */
6738 static tree
6739 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6740 tree (*valueize)(tree))
6741 {
6742 poly_int64 bit_offset2, size, max_size;
6743 bool reverse;
6744
6745 if (TREE_CODE (base) == MEM_REF)
6746 {
6747 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6748 if (!boff.to_shwi (bit_offset))
6749 return NULL_TREE;
6750
6751 if (valueize
6752 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6753 base = valueize (TREE_OPERAND (base, 0));
6754 if (!base || TREE_CODE (base) != ADDR_EXPR)
6755 return NULL_TREE;
6756 base = TREE_OPERAND (base, 0);
6757 }
6758 else if (valueize
6759 && TREE_CODE (base) == SSA_NAME)
6760 base = valueize (base);
6761
6762 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6763 DECL_INITIAL. If BASE is a nested reference into another
6764 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6765 the inner reference. */
6766 switch (TREE_CODE (base))
6767 {
6768 case VAR_DECL:
6769 case CONST_DECL:
6770 {
6771 tree init = ctor_for_folding (base);
6772
6773 /* Our semantic is exact opposite of ctor_for_folding;
6774 NULL means unknown, while error_mark_node is 0. */
6775 if (init == error_mark_node)
6776 return NULL_TREE;
6777 if (!init)
6778 return error_mark_node;
6779 return init;
6780 }
6781
6782 case VIEW_CONVERT_EXPR:
6783 return get_base_constructor (TREE_OPERAND (base, 0),
6784 bit_offset, valueize);
6785
6786 case ARRAY_REF:
6787 case COMPONENT_REF:
6788 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6789 &reverse);
6790 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6791 return NULL_TREE;
6792 *bit_offset += bit_offset2;
6793 return get_base_constructor (base, bit_offset, valueize);
6794
6795 case CONSTRUCTOR:
6796 return base;
6797
6798 default:
6799 if (CONSTANT_CLASS_P (base))
6800 return base;
6801
6802 return NULL_TREE;
6803 }
6804 }
6805
6806 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6807 to the memory at bit OFFSET. When non-null, TYPE is the expected
6808 type of the reference; otherwise the type of the referenced element
6809 is used instead. When SIZE is zero, attempt to fold a reference to
6810 the entire element which OFFSET refers to. Increment *SUBOFF by
6811 the bit offset of the accessed element. */
6812
6813 static tree
6814 fold_array_ctor_reference (tree type, tree ctor,
6815 unsigned HOST_WIDE_INT offset,
6816 unsigned HOST_WIDE_INT size,
6817 tree from_decl,
6818 unsigned HOST_WIDE_INT *suboff)
6819 {
6820 offset_int low_bound;
6821 offset_int elt_size;
6822 offset_int access_index;
6823 tree domain_type = NULL_TREE;
6824 HOST_WIDE_INT inner_offset;
6825
6826 /* Compute low bound and elt size. */
6827 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6828 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6829 if (domain_type && TYPE_MIN_VALUE (domain_type))
6830 {
6831 /* Static constructors for variably sized objects make no sense. */
6832 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6833 return NULL_TREE;
6834 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6835 }
6836 else
6837 low_bound = 0;
6838 /* Static constructors for variably sized objects make no sense. */
6839 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6840 return NULL_TREE;
6841 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6842
6843 /* When TYPE is non-null, verify that it specifies a constant-sized
6844 access of a multiple of the array element size. Avoid division
6845 by zero below when ELT_SIZE is zero, such as with the result of
6846 an initializer for a zero-length array or an empty struct. */
6847 if (elt_size == 0
6848 || (type
6849 && (!TYPE_SIZE_UNIT (type)
6850 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6851 return NULL_TREE;
6852
6853 /* Compute the array index we look for. */
6854 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6855 elt_size);
6856 access_index += low_bound;
6857
6858 /* And offset within the access. */
6859 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6860
6861 if (size > elt_size.to_uhwi () * BITS_PER_UNIT)
6862 {
6863 /* native_encode_expr constraints. */
6864 if (size > MAX_BITSIZE_MODE_ANY_MODE
6865 || size % BITS_PER_UNIT != 0
6866 || inner_offset % BITS_PER_UNIT != 0)
6867 return NULL_TREE;
6868
6869 unsigned ctor_idx;
6870 tree val = get_array_ctor_element_at_index (ctor, access_index,
6871 &ctor_idx);
6872 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6873 return build_zero_cst (type);
6874
6875 /* native-encode adjacent ctor elements. */
6876 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6877 unsigned bufoff = 0;
6878 offset_int index = 0;
6879 offset_int max_index = access_index;
6880 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6881 if (!val)
6882 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6883 else if (!CONSTANT_CLASS_P (val))
6884 return NULL_TREE;
6885 if (!elt->index)
6886 ;
6887 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6888 {
6889 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6890 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6891 }
6892 else
6893 index = max_index = wi::to_offset (elt->index);
6894 index = wi::umax (index, access_index);
6895 do
6896 {
6897 int len = native_encode_expr (val, buf + bufoff,
6898 elt_size.to_uhwi (),
6899 inner_offset / BITS_PER_UNIT);
6900 if (len != elt_size - inner_offset / BITS_PER_UNIT)
6901 return NULL_TREE;
6902 inner_offset = 0;
6903 bufoff += len;
6904
6905 access_index += 1;
6906 if (wi::cmpu (access_index, index) == 0)
6907 val = elt->value;
6908 else if (wi::cmpu (access_index, max_index) > 0)
6909 {
6910 ctor_idx++;
6911 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6912 {
6913 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6914 ++max_index;
6915 }
6916 else
6917 {
6918 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6919 index = 0;
6920 max_index = access_index;
6921 if (!elt->index)
6922 ;
6923 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6924 {
6925 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6926 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6927 }
6928 else
6929 index = max_index = wi::to_offset (elt->index);
6930 index = wi::umax (index, access_index);
6931 if (wi::cmpu (access_index, index) == 0)
6932 val = elt->value;
6933 else
6934 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6935 }
6936 }
6937 }
6938 while (bufoff < size / BITS_PER_UNIT);
6939 *suboff += size;
6940 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6941 }
6942
6943 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6944 {
6945 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6946 {
6947 /* For the final reference to the entire accessed element
6948 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6949 may be null) in favor of the type of the element, and set
6950 SIZE to the size of the accessed element. */
6951 inner_offset = 0;
6952 type = TREE_TYPE (val);
6953 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6954 }
6955
6956 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6957 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6958 suboff);
6959 }
6960
6961 /* Memory not explicitly mentioned in constructor is 0 (or
6962 the reference is out of range). */
6963 return type ? build_zero_cst (type) : NULL_TREE;
6964 }
6965
6966 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6967 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6968 is the expected type of the reference; otherwise the type of
6969 the referenced member is used instead. When SIZE is zero,
6970 attempt to fold a reference to the entire member which OFFSET
6971 refers to; in this case. Increment *SUBOFF by the bit offset
6972 of the accessed member. */
6973
6974 static tree
6975 fold_nonarray_ctor_reference (tree type, tree ctor,
6976 unsigned HOST_WIDE_INT offset,
6977 unsigned HOST_WIDE_INT size,
6978 tree from_decl,
6979 unsigned HOST_WIDE_INT *suboff)
6980 {
6981 unsigned HOST_WIDE_INT cnt;
6982 tree cfield, cval;
6983
6984 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6985 cval)
6986 {
6987 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6988 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6989 tree field_size = DECL_SIZE (cfield);
6990
6991 if (!field_size)
6992 {
6993 /* Determine the size of the flexible array member from
6994 the size of the initializer provided for it. */
6995 field_size = TYPE_SIZE (TREE_TYPE (cval));
6996 }
6997
6998 /* Variable sized objects in static constructors makes no sense,
6999 but field_size can be NULL for flexible array members. */
7000 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7001 && TREE_CODE (byte_offset) == INTEGER_CST
7002 && (field_size != NULL_TREE
7003 ? TREE_CODE (field_size) == INTEGER_CST
7004 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7005
7006 /* Compute bit offset of the field. */
7007 offset_int bitoffset
7008 = (wi::to_offset (field_offset)
7009 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7010 /* Compute bit offset where the field ends. */
7011 offset_int bitoffset_end;
7012 if (field_size != NULL_TREE)
7013 bitoffset_end = bitoffset + wi::to_offset (field_size);
7014 else
7015 bitoffset_end = 0;
7016
7017 /* Compute the bit offset of the end of the desired access.
7018 As a special case, if the size of the desired access is
7019 zero, assume the access is to the entire field (and let
7020 the caller make any necessary adjustments by storing
7021 the actual bounds of the field in FIELDBOUNDS). */
7022 offset_int access_end = offset_int (offset);
7023 if (size)
7024 access_end += size;
7025 else
7026 access_end = bitoffset_end;
7027
7028 /* Is there any overlap between the desired access at
7029 [OFFSET, OFFSET+SIZE) and the offset of the field within
7030 the object at [BITOFFSET, BITOFFSET_END)? */
7031 if (wi::cmps (access_end, bitoffset) > 0
7032 && (field_size == NULL_TREE
7033 || wi::lts_p (offset, bitoffset_end)))
7034 {
7035 *suboff += bitoffset.to_uhwi ();
7036
7037 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7038 {
7039 /* For the final reference to the entire accessed member
7040 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7041 be null) in favor of the type of the member, and set
7042 SIZE to the size of the accessed member. */
7043 offset = bitoffset.to_uhwi ();
7044 type = TREE_TYPE (cval);
7045 size = (bitoffset_end - bitoffset).to_uhwi ();
7046 }
7047
7048 /* We do have overlap. Now see if the field is large enough
7049 to cover the access. Give up for accesses that extend
7050 beyond the end of the object or that span multiple fields. */
7051 if (wi::cmps (access_end, bitoffset_end) > 0)
7052 return NULL_TREE;
7053 if (offset < bitoffset)
7054 return NULL_TREE;
7055
7056 offset_int inner_offset = offset_int (offset) - bitoffset;
7057 return fold_ctor_reference (type, cval,
7058 inner_offset.to_uhwi (), size,
7059 from_decl, suboff);
7060 }
7061 }
7062
7063 if (!type)
7064 return NULL_TREE;
7065
7066 return build_zero_cst (type);
7067 }
7068
7069 /* CTOR is value initializing memory. Fold a reference of TYPE and
7070 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7071 is zero, attempt to fold a reference to the entire subobject
7072 which OFFSET refers to. This is used when folding accesses to
7073 string members of aggregates. When non-null, set *SUBOFF to
7074 the bit offset of the accessed subobject. */
7075
7076 tree
7077 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7078 const poly_uint64 &poly_size, tree from_decl,
7079 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7080 {
7081 tree ret;
7082
7083 /* We found the field with exact match. */
7084 if (type
7085 && useless_type_conversion_p (type, TREE_TYPE (ctor))
7086 && known_eq (poly_offset, 0U))
7087 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7088
7089 /* The remaining optimizations need a constant size and offset. */
7090 unsigned HOST_WIDE_INT size, offset;
7091 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
7092 return NULL_TREE;
7093
7094 /* We are at the end of walk, see if we can view convert the
7095 result. */
7096 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7097 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7098 && !compare_tree_int (TYPE_SIZE (type), size)
7099 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
7100 {
7101 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7102 if (ret)
7103 {
7104 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7105 if (ret)
7106 STRIP_USELESS_TYPE_CONVERSION (ret);
7107 }
7108 return ret;
7109 }
7110 /* For constants and byte-aligned/sized reads try to go through
7111 native_encode/interpret. */
7112 if (CONSTANT_CLASS_P (ctor)
7113 && BITS_PER_UNIT == 8
7114 && offset % BITS_PER_UNIT == 0
7115 && size % BITS_PER_UNIT == 0
7116 && size <= MAX_BITSIZE_MODE_ANY_MODE)
7117 {
7118 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7119 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7120 offset / BITS_PER_UNIT);
7121 if (len > 0)
7122 return native_interpret_expr (type, buf, len);
7123 }
7124 if (TREE_CODE (ctor) == CONSTRUCTOR)
7125 {
7126 unsigned HOST_WIDE_INT dummy = 0;
7127 if (!suboff)
7128 suboff = &dummy;
7129
7130 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7131 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7132 return fold_array_ctor_reference (type, ctor, offset, size,
7133 from_decl, suboff);
7134
7135 return fold_nonarray_ctor_reference (type, ctor, offset, size,
7136 from_decl, suboff);
7137 }
7138
7139 return NULL_TREE;
7140 }
7141
7142 /* Return the tree representing the element referenced by T if T is an
7143 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7144 names using VALUEIZE. Return NULL_TREE otherwise. */
7145
7146 tree
7147 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7148 {
7149 tree ctor, idx, base;
7150 poly_int64 offset, size, max_size;
7151 tree tem;
7152 bool reverse;
7153
7154 if (TREE_THIS_VOLATILE (t))
7155 return NULL_TREE;
7156
7157 if (DECL_P (t))
7158 return get_symbol_constant_value (t);
7159
7160 tem = fold_read_from_constant_string (t);
7161 if (tem)
7162 return tem;
7163
7164 switch (TREE_CODE (t))
7165 {
7166 case ARRAY_REF:
7167 case ARRAY_RANGE_REF:
7168 /* Constant indexes are handled well by get_base_constructor.
7169 Only special case variable offsets.
7170 FIXME: This code can't handle nested references with variable indexes
7171 (they will be handled only by iteration of ccp). Perhaps we can bring
7172 get_ref_base_and_extent here and make it use a valueize callback. */
7173 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7174 && valueize
7175 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7176 && poly_int_tree_p (idx))
7177 {
7178 tree low_bound, unit_size;
7179
7180 /* If the resulting bit-offset is constant, track it. */
7181 if ((low_bound = array_ref_low_bound (t),
7182 poly_int_tree_p (low_bound))
7183 && (unit_size = array_ref_element_size (t),
7184 tree_fits_uhwi_p (unit_size)))
7185 {
7186 poly_offset_int woffset
7187 = wi::sext (wi::to_poly_offset (idx)
7188 - wi::to_poly_offset (low_bound),
7189 TYPE_PRECISION (TREE_TYPE (idx)));
7190 woffset *= tree_to_uhwi (unit_size);
7191 woffset *= BITS_PER_UNIT;
7192 if (woffset.to_shwi (&offset))
7193 {
7194 base = TREE_OPERAND (t, 0);
7195 ctor = get_base_constructor (base, &offset, valueize);
7196 /* Empty constructor. Always fold to 0. */
7197 if (ctor == error_mark_node)
7198 return build_zero_cst (TREE_TYPE (t));
7199 /* Out of bound array access. Value is undefined,
7200 but don't fold. */
7201 if (maybe_lt (offset, 0))
7202 return NULL_TREE;
7203 /* We cannot determine ctor. */
7204 if (!ctor)
7205 return NULL_TREE;
7206 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7207 tree_to_uhwi (unit_size)
7208 * BITS_PER_UNIT,
7209 base);
7210 }
7211 }
7212 }
7213 /* Fallthru. */
7214
7215 case COMPONENT_REF:
7216 case BIT_FIELD_REF:
7217 case TARGET_MEM_REF:
7218 case MEM_REF:
7219 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7220 ctor = get_base_constructor (base, &offset, valueize);
7221
7222 /* Empty constructor. Always fold to 0. */
7223 if (ctor == error_mark_node)
7224 return build_zero_cst (TREE_TYPE (t));
7225 /* We do not know precise address. */
7226 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7227 return NULL_TREE;
7228 /* We cannot determine ctor. */
7229 if (!ctor)
7230 return NULL_TREE;
7231
7232 /* Out of bound array access. Value is undefined, but don't fold. */
7233 if (maybe_lt (offset, 0))
7234 return NULL_TREE;
7235
7236 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7237 base);
7238
7239 case REALPART_EXPR:
7240 case IMAGPART_EXPR:
7241 {
7242 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7243 if (c && TREE_CODE (c) == COMPLEX_CST)
7244 return fold_build1_loc (EXPR_LOCATION (t),
7245 TREE_CODE (t), TREE_TYPE (t), c);
7246 break;
7247 }
7248
7249 default:
7250 break;
7251 }
7252
7253 return NULL_TREE;
7254 }
7255
7256 tree
7257 fold_const_aggregate_ref (tree t)
7258 {
7259 return fold_const_aggregate_ref_1 (t, NULL);
7260 }
7261
7262 /* Lookup virtual method with index TOKEN in a virtual table V
7263 at OFFSET.
7264 Set CAN_REFER if non-NULL to false if method
7265 is not referable or if the virtual table is ill-formed (such as rewriten
7266 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7267
7268 tree
7269 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7270 tree v,
7271 unsigned HOST_WIDE_INT offset,
7272 bool *can_refer)
7273 {
7274 tree vtable = v, init, fn;
7275 unsigned HOST_WIDE_INT size;
7276 unsigned HOST_WIDE_INT elt_size, access_index;
7277 tree domain_type;
7278
7279 if (can_refer)
7280 *can_refer = true;
7281
7282 /* First of all double check we have virtual table. */
7283 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7284 {
7285 /* Pass down that we lost track of the target. */
7286 if (can_refer)
7287 *can_refer = false;
7288 return NULL_TREE;
7289 }
7290
7291 init = ctor_for_folding (v);
7292
7293 /* The virtual tables should always be born with constructors
7294 and we always should assume that they are avaialble for
7295 folding. At the moment we do not stream them in all cases,
7296 but it should never happen that ctor seem unreachable. */
7297 gcc_assert (init);
7298 if (init == error_mark_node)
7299 {
7300 /* Pass down that we lost track of the target. */
7301 if (can_refer)
7302 *can_refer = false;
7303 return NULL_TREE;
7304 }
7305 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7306 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7307 offset *= BITS_PER_UNIT;
7308 offset += token * size;
7309
7310 /* Lookup the value in the constructor that is assumed to be array.
7311 This is equivalent to
7312 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7313 offset, size, NULL);
7314 but in a constant time. We expect that frontend produced a simple
7315 array without indexed initializers. */
7316
7317 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7318 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7319 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7320 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7321
7322 access_index = offset / BITS_PER_UNIT / elt_size;
7323 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7324
7325 /* The C++ FE can now produce indexed fields, and we check if the indexes
7326 match. */
7327 if (access_index < CONSTRUCTOR_NELTS (init))
7328 {
7329 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7330 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7331 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7332 STRIP_NOPS (fn);
7333 }
7334 else
7335 fn = NULL;
7336
7337 /* For type inconsistent program we may end up looking up virtual method
7338 in virtual table that does not contain TOKEN entries. We may overrun
7339 the virtual table and pick up a constant or RTTI info pointer.
7340 In any case the call is undefined. */
7341 if (!fn
7342 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7343 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7344 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7345 else
7346 {
7347 fn = TREE_OPERAND (fn, 0);
7348
7349 /* When cgraph node is missing and function is not public, we cannot
7350 devirtualize. This can happen in WHOPR when the actual method
7351 ends up in other partition, because we found devirtualization
7352 possibility too late. */
7353 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7354 {
7355 if (can_refer)
7356 {
7357 *can_refer = false;
7358 return fn;
7359 }
7360 return NULL_TREE;
7361 }
7362 }
7363
7364 /* Make sure we create a cgraph node for functions we'll reference.
7365 They can be non-existent if the reference comes from an entry
7366 of an external vtable for example. */
7367 cgraph_node::get_create (fn);
7368
7369 return fn;
7370 }
7371
7372 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7373 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7374 KNOWN_BINFO carries the binfo describing the true type of
7375 OBJ_TYPE_REF_OBJECT(REF).
7376 Set CAN_REFER if non-NULL to false if method
7377 is not referable or if the virtual table is ill-formed (such as rewriten
7378 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7379
7380 tree
7381 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7382 bool *can_refer)
7383 {
7384 unsigned HOST_WIDE_INT offset;
7385 tree v;
7386
7387 v = BINFO_VTABLE (known_binfo);
7388 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7389 if (!v)
7390 return NULL_TREE;
7391
7392 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7393 {
7394 if (can_refer)
7395 *can_refer = false;
7396 return NULL_TREE;
7397 }
7398 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7399 }
7400
7401 /* Given a pointer value T, return a simplified version of an
7402 indirection through T, or NULL_TREE if no simplification is
7403 possible. Note that the resulting type may be different from
7404 the type pointed to in the sense that it is still compatible
7405 from the langhooks point of view. */
7406
7407 tree
7408 gimple_fold_indirect_ref (tree t)
7409 {
7410 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7411 tree sub = t;
7412 tree subtype;
7413
7414 STRIP_NOPS (sub);
7415 subtype = TREE_TYPE (sub);
7416 if (!POINTER_TYPE_P (subtype)
7417 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7418 return NULL_TREE;
7419
7420 if (TREE_CODE (sub) == ADDR_EXPR)
7421 {
7422 tree op = TREE_OPERAND (sub, 0);
7423 tree optype = TREE_TYPE (op);
7424 /* *&p => p */
7425 if (useless_type_conversion_p (type, optype))
7426 return op;
7427
7428 /* *(foo *)&fooarray => fooarray[0] */
7429 if (TREE_CODE (optype) == ARRAY_TYPE
7430 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7431 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7432 {
7433 tree type_domain = TYPE_DOMAIN (optype);
7434 tree min_val = size_zero_node;
7435 if (type_domain && TYPE_MIN_VALUE (type_domain))
7436 min_val = TYPE_MIN_VALUE (type_domain);
7437 if (TREE_CODE (min_val) == INTEGER_CST)
7438 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7439 }
7440 /* *(foo *)&complexfoo => __real__ complexfoo */
7441 else if (TREE_CODE (optype) == COMPLEX_TYPE
7442 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7443 return fold_build1 (REALPART_EXPR, type, op);
7444 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7445 else if (TREE_CODE (optype) == VECTOR_TYPE
7446 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7447 {
7448 tree part_width = TYPE_SIZE (type);
7449 tree index = bitsize_int (0);
7450 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7451 }
7452 }
7453
7454 /* *(p + CST) -> ... */
7455 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7456 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7457 {
7458 tree addr = TREE_OPERAND (sub, 0);
7459 tree off = TREE_OPERAND (sub, 1);
7460 tree addrtype;
7461
7462 STRIP_NOPS (addr);
7463 addrtype = TREE_TYPE (addr);
7464
7465 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7466 if (TREE_CODE (addr) == ADDR_EXPR
7467 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7468 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7469 && tree_fits_uhwi_p (off))
7470 {
7471 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7472 tree part_width = TYPE_SIZE (type);
7473 unsigned HOST_WIDE_INT part_widthi
7474 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7475 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7476 tree index = bitsize_int (indexi);
7477 if (known_lt (offset / part_widthi,
7478 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7479 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7480 part_width, index);
7481 }
7482
7483 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7484 if (TREE_CODE (addr) == ADDR_EXPR
7485 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7486 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7487 {
7488 tree size = TYPE_SIZE_UNIT (type);
7489 if (tree_int_cst_equal (size, off))
7490 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7491 }
7492
7493 /* *(p + CST) -> MEM_REF <p, CST>. */
7494 if (TREE_CODE (addr) != ADDR_EXPR
7495 || DECL_P (TREE_OPERAND (addr, 0)))
7496 return fold_build2 (MEM_REF, type,
7497 addr,
7498 wide_int_to_tree (ptype, wi::to_wide (off)));
7499 }
7500
7501 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7502 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7503 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7504 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7505 {
7506 tree type_domain;
7507 tree min_val = size_zero_node;
7508 tree osub = sub;
7509 sub = gimple_fold_indirect_ref (sub);
7510 if (! sub)
7511 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7512 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7513 if (type_domain && TYPE_MIN_VALUE (type_domain))
7514 min_val = TYPE_MIN_VALUE (type_domain);
7515 if (TREE_CODE (min_val) == INTEGER_CST)
7516 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7517 }
7518
7519 return NULL_TREE;
7520 }
7521
7522 /* Return true if CODE is an operation that when operating on signed
7523 integer types involves undefined behavior on overflow and the
7524 operation can be expressed with unsigned arithmetic. */
7525
7526 bool
7527 arith_code_with_undefined_signed_overflow (tree_code code)
7528 {
7529 switch (code)
7530 {
7531 case ABS_EXPR:
7532 case PLUS_EXPR:
7533 case MINUS_EXPR:
7534 case MULT_EXPR:
7535 case NEGATE_EXPR:
7536 case POINTER_PLUS_EXPR:
7537 return true;
7538 default:
7539 return false;
7540 }
7541 }
7542
7543 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7544 operation that can be transformed to unsigned arithmetic by converting
7545 its operand, carrying out the operation in the corresponding unsigned
7546 type and converting the result back to the original type.
7547
7548 Returns a sequence of statements that replace STMT and also contain
7549 a modified form of STMT itself. */
7550
7551 gimple_seq
7552 rewrite_to_defined_overflow (gimple *stmt)
7553 {
7554 if (dump_file && (dump_flags & TDF_DETAILS))
7555 {
7556 fprintf (dump_file, "rewriting stmt with undefined signed "
7557 "overflow ");
7558 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7559 }
7560
7561 tree lhs = gimple_assign_lhs (stmt);
7562 tree type = unsigned_type_for (TREE_TYPE (lhs));
7563 gimple_seq stmts = NULL;
7564 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7565 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7566 else
7567 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7568 {
7569 tree op = gimple_op (stmt, i);
7570 op = gimple_convert (&stmts, type, op);
7571 gimple_set_op (stmt, i, op);
7572 }
7573 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7574 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7575 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7576 gimple_seq_add_stmt (&stmts, stmt);
7577 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7578 gimple_seq_add_stmt (&stmts, cvt);
7579
7580 return stmts;
7581 }
7582
7583
7584 /* The valueization hook we use for the gimple_build API simplification.
7585 This makes us match fold_buildN behavior by only combining with
7586 statements in the sequence(s) we are currently building. */
7587
7588 static tree
7589 gimple_build_valueize (tree op)
7590 {
7591 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7592 return op;
7593 return NULL_TREE;
7594 }
7595
7596 /* Build the expression CODE OP0 of type TYPE with location LOC,
7597 simplifying it first if possible. Returns the built
7598 expression value and appends statements possibly defining it
7599 to SEQ. */
7600
7601 tree
7602 gimple_build (gimple_seq *seq, location_t loc,
7603 enum tree_code code, tree type, tree op0)
7604 {
7605 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7606 if (!res)
7607 {
7608 res = create_tmp_reg_or_ssa_name (type);
7609 gimple *stmt;
7610 if (code == REALPART_EXPR
7611 || code == IMAGPART_EXPR
7612 || code == VIEW_CONVERT_EXPR)
7613 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7614 else
7615 stmt = gimple_build_assign (res, code, op0);
7616 gimple_set_location (stmt, loc);
7617 gimple_seq_add_stmt_without_update (seq, stmt);
7618 }
7619 return res;
7620 }
7621
7622 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7623 simplifying it first if possible. Returns the built
7624 expression value and appends statements possibly defining it
7625 to SEQ. */
7626
7627 tree
7628 gimple_build (gimple_seq *seq, location_t loc,
7629 enum tree_code code, tree type, tree op0, tree op1)
7630 {
7631 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7632 if (!res)
7633 {
7634 res = create_tmp_reg_or_ssa_name (type);
7635 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7636 gimple_set_location (stmt, loc);
7637 gimple_seq_add_stmt_without_update (seq, stmt);
7638 }
7639 return res;
7640 }
7641
7642 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7643 simplifying it first if possible. Returns the built
7644 expression value and appends statements possibly defining it
7645 to SEQ. */
7646
7647 tree
7648 gimple_build (gimple_seq *seq, location_t loc,
7649 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7650 {
7651 tree res = gimple_simplify (code, type, op0, op1, op2,
7652 seq, gimple_build_valueize);
7653 if (!res)
7654 {
7655 res = create_tmp_reg_or_ssa_name (type);
7656 gimple *stmt;
7657 if (code == BIT_FIELD_REF)
7658 stmt = gimple_build_assign (res, code,
7659 build3 (code, type, op0, op1, op2));
7660 else
7661 stmt = gimple_build_assign (res, code, op0, op1, op2);
7662 gimple_set_location (stmt, loc);
7663 gimple_seq_add_stmt_without_update (seq, stmt);
7664 }
7665 return res;
7666 }
7667
7668 /* Build the call FN (ARG0) with a result of type TYPE
7669 (or no result if TYPE is void) with location LOC,
7670 simplifying it first if possible. Returns the built
7671 expression value (or NULL_TREE if TYPE is void) and appends
7672 statements possibly defining it to SEQ. */
7673
7674 tree
7675 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7676 tree type, tree arg0)
7677 {
7678 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7679 if (!res)
7680 {
7681 gcall *stmt;
7682 if (internal_fn_p (fn))
7683 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7684 else
7685 {
7686 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7687 stmt = gimple_build_call (decl, 1, arg0);
7688 }
7689 if (!VOID_TYPE_P (type))
7690 {
7691 res = create_tmp_reg_or_ssa_name (type);
7692 gimple_call_set_lhs (stmt, res);
7693 }
7694 gimple_set_location (stmt, loc);
7695 gimple_seq_add_stmt_without_update (seq, stmt);
7696 }
7697 return res;
7698 }
7699
7700 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7701 (or no result if TYPE is void) with location LOC,
7702 simplifying it first if possible. Returns the built
7703 expression value (or NULL_TREE if TYPE is void) and appends
7704 statements possibly defining it to SEQ. */
7705
7706 tree
7707 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7708 tree type, tree arg0, tree arg1)
7709 {
7710 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7711 if (!res)
7712 {
7713 gcall *stmt;
7714 if (internal_fn_p (fn))
7715 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7716 else
7717 {
7718 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7719 stmt = gimple_build_call (decl, 2, arg0, arg1);
7720 }
7721 if (!VOID_TYPE_P (type))
7722 {
7723 res = create_tmp_reg_or_ssa_name (type);
7724 gimple_call_set_lhs (stmt, res);
7725 }
7726 gimple_set_location (stmt, loc);
7727 gimple_seq_add_stmt_without_update (seq, stmt);
7728 }
7729 return res;
7730 }
7731
7732 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7733 (or no result if TYPE is void) with location LOC,
7734 simplifying it first if possible. Returns the built
7735 expression value (or NULL_TREE if TYPE is void) and appends
7736 statements possibly defining it to SEQ. */
7737
7738 tree
7739 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7740 tree type, tree arg0, tree arg1, tree arg2)
7741 {
7742 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7743 seq, gimple_build_valueize);
7744 if (!res)
7745 {
7746 gcall *stmt;
7747 if (internal_fn_p (fn))
7748 stmt = gimple_build_call_internal (as_internal_fn (fn),
7749 3, arg0, arg1, arg2);
7750 else
7751 {
7752 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7753 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7754 }
7755 if (!VOID_TYPE_P (type))
7756 {
7757 res = create_tmp_reg_or_ssa_name (type);
7758 gimple_call_set_lhs (stmt, res);
7759 }
7760 gimple_set_location (stmt, loc);
7761 gimple_seq_add_stmt_without_update (seq, stmt);
7762 }
7763 return res;
7764 }
7765
7766 /* Build the conversion (TYPE) OP with a result of type TYPE
7767 with location LOC if such conversion is neccesary in GIMPLE,
7768 simplifying it first.
7769 Returns the built expression value and appends
7770 statements possibly defining it to SEQ. */
7771
7772 tree
7773 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7774 {
7775 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7776 return op;
7777 return gimple_build (seq, loc, NOP_EXPR, type, op);
7778 }
7779
7780 /* Build the conversion (ptrofftype) OP with a result of a type
7781 compatible with ptrofftype with location LOC if such conversion
7782 is neccesary in GIMPLE, simplifying it first.
7783 Returns the built expression value and appends
7784 statements possibly defining it to SEQ. */
7785
7786 tree
7787 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7788 {
7789 if (ptrofftype_p (TREE_TYPE (op)))
7790 return op;
7791 return gimple_convert (seq, loc, sizetype, op);
7792 }
7793
7794 /* Build a vector of type TYPE in which each element has the value OP.
7795 Return a gimple value for the result, appending any new statements
7796 to SEQ. */
7797
7798 tree
7799 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7800 tree op)
7801 {
7802 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7803 && !CONSTANT_CLASS_P (op))
7804 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7805
7806 tree res, vec = build_vector_from_val (type, op);
7807 if (is_gimple_val (vec))
7808 return vec;
7809 if (gimple_in_ssa_p (cfun))
7810 res = make_ssa_name (type);
7811 else
7812 res = create_tmp_reg (type);
7813 gimple *stmt = gimple_build_assign (res, vec);
7814 gimple_set_location (stmt, loc);
7815 gimple_seq_add_stmt_without_update (seq, stmt);
7816 return res;
7817 }
7818
7819 /* Build a vector from BUILDER, handling the case in which some elements
7820 are non-constant. Return a gimple value for the result, appending any
7821 new instructions to SEQ.
7822
7823 BUILDER must not have a stepped encoding on entry. This is because
7824 the function is not geared up to handle the arithmetic that would
7825 be needed in the variable case, and any code building a vector that
7826 is known to be constant should use BUILDER->build () directly. */
7827
7828 tree
7829 gimple_build_vector (gimple_seq *seq, location_t loc,
7830 tree_vector_builder *builder)
7831 {
7832 gcc_assert (builder->nelts_per_pattern () <= 2);
7833 unsigned int encoded_nelts = builder->encoded_nelts ();
7834 for (unsigned int i = 0; i < encoded_nelts; ++i)
7835 if (!TREE_CONSTANT ((*builder)[i]))
7836 {
7837 tree type = builder->type ();
7838 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7839 vec<constructor_elt, va_gc> *v;
7840 vec_alloc (v, nelts);
7841 for (i = 0; i < nelts; ++i)
7842 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7843
7844 tree res;
7845 if (gimple_in_ssa_p (cfun))
7846 res = make_ssa_name (type);
7847 else
7848 res = create_tmp_reg (type);
7849 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7850 gimple_set_location (stmt, loc);
7851 gimple_seq_add_stmt_without_update (seq, stmt);
7852 return res;
7853 }
7854 return builder->build ();
7855 }
7856
7857 /* Return true if the result of assignment STMT is known to be non-negative.
7858 If the return value is based on the assumption that signed overflow is
7859 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7860 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7861
7862 static bool
7863 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7864 int depth)
7865 {
7866 enum tree_code code = gimple_assign_rhs_code (stmt);
7867 switch (get_gimple_rhs_class (code))
7868 {
7869 case GIMPLE_UNARY_RHS:
7870 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7871 gimple_expr_type (stmt),
7872 gimple_assign_rhs1 (stmt),
7873 strict_overflow_p, depth);
7874 case GIMPLE_BINARY_RHS:
7875 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7876 gimple_expr_type (stmt),
7877 gimple_assign_rhs1 (stmt),
7878 gimple_assign_rhs2 (stmt),
7879 strict_overflow_p, depth);
7880 case GIMPLE_TERNARY_RHS:
7881 return false;
7882 case GIMPLE_SINGLE_RHS:
7883 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7884 strict_overflow_p, depth);
7885 case GIMPLE_INVALID_RHS:
7886 break;
7887 }
7888 gcc_unreachable ();
7889 }
7890
7891 /* Return true if return value of call STMT is known to be non-negative.
7892 If the return value is based on the assumption that signed overflow is
7893 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7894 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7895
7896 static bool
7897 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7898 int depth)
7899 {
7900 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7901 gimple_call_arg (stmt, 0) : NULL_TREE;
7902 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7903 gimple_call_arg (stmt, 1) : NULL_TREE;
7904
7905 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7906 gimple_call_combined_fn (stmt),
7907 arg0,
7908 arg1,
7909 strict_overflow_p, depth);
7910 }
7911
7912 /* Return true if return value of call STMT is known to be non-negative.
7913 If the return value is based on the assumption that signed overflow is
7914 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7915 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7916
7917 static bool
7918 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7919 int depth)
7920 {
7921 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7922 {
7923 tree arg = gimple_phi_arg_def (stmt, i);
7924 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7925 return false;
7926 }
7927 return true;
7928 }
7929
7930 /* Return true if STMT is known to compute a non-negative value.
7931 If the return value is based on the assumption that signed overflow is
7932 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7933 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7934
7935 bool
7936 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7937 int depth)
7938 {
7939 switch (gimple_code (stmt))
7940 {
7941 case GIMPLE_ASSIGN:
7942 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7943 depth);
7944 case GIMPLE_CALL:
7945 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7946 depth);
7947 case GIMPLE_PHI:
7948 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7949 depth);
7950 default:
7951 return false;
7952 }
7953 }
7954
7955 /* Return true if the floating-point value computed by assignment STMT
7956 is known to have an integer value. We also allow +Inf, -Inf and NaN
7957 to be considered integer values. Return false for signaling NaN.
7958
7959 DEPTH is the current nesting depth of the query. */
7960
7961 static bool
7962 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7963 {
7964 enum tree_code code = gimple_assign_rhs_code (stmt);
7965 switch (get_gimple_rhs_class (code))
7966 {
7967 case GIMPLE_UNARY_RHS:
7968 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7969 gimple_assign_rhs1 (stmt), depth);
7970 case GIMPLE_BINARY_RHS:
7971 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7972 gimple_assign_rhs1 (stmt),
7973 gimple_assign_rhs2 (stmt), depth);
7974 case GIMPLE_TERNARY_RHS:
7975 return false;
7976 case GIMPLE_SINGLE_RHS:
7977 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7978 case GIMPLE_INVALID_RHS:
7979 break;
7980 }
7981 gcc_unreachable ();
7982 }
7983
7984 /* Return true if the floating-point value computed by call STMT is known
7985 to have an integer value. We also allow +Inf, -Inf and NaN to be
7986 considered integer values. Return false for signaling NaN.
7987
7988 DEPTH is the current nesting depth of the query. */
7989
7990 static bool
7991 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7992 {
7993 tree arg0 = (gimple_call_num_args (stmt) > 0
7994 ? gimple_call_arg (stmt, 0)
7995 : NULL_TREE);
7996 tree arg1 = (gimple_call_num_args (stmt) > 1
7997 ? gimple_call_arg (stmt, 1)
7998 : NULL_TREE);
7999 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
8000 arg0, arg1, depth);
8001 }
8002
8003 /* Return true if the floating-point result of phi STMT is known to have
8004 an integer value. We also allow +Inf, -Inf and NaN to be considered
8005 integer values. Return false for signaling NaN.
8006
8007 DEPTH is the current nesting depth of the query. */
8008
8009 static bool
8010 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
8011 {
8012 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8013 {
8014 tree arg = gimple_phi_arg_def (stmt, i);
8015 if (!integer_valued_real_single_p (arg, depth + 1))
8016 return false;
8017 }
8018 return true;
8019 }
8020
8021 /* Return true if the floating-point value computed by STMT is known
8022 to have an integer value. We also allow +Inf, -Inf and NaN to be
8023 considered integer values. Return false for signaling NaN.
8024
8025 DEPTH is the current nesting depth of the query. */
8026
8027 bool
8028 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
8029 {
8030 switch (gimple_code (stmt))
8031 {
8032 case GIMPLE_ASSIGN:
8033 return gimple_assign_integer_valued_real_p (stmt, depth);
8034 case GIMPLE_CALL:
8035 return gimple_call_integer_valued_real_p (stmt, depth);
8036 case GIMPLE_PHI:
8037 return gimple_phi_integer_valued_real_p (stmt, depth);
8038 default:
8039 return false;
8040 }
8041 }