]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
PR tree-optimization/92501 - strncmp with constant unterminated arrays not folded
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
82 };
83
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
86
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
91
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
108
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
111 {
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
115
116 if (DECL_ABSTRACT_P (decl))
117 return false;
118
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
123
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
127 {
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->inlined_to;
139 }
140
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
169
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
179
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->inlined_to;
188 }
189
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
193
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
196 {
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
201 }
202
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
206
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
209 {
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
212
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
217 {
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
226 }
227 if (TREE_CODE (cval) == ADDR_EXPR)
228 {
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
231 {
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
235 }
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
238 if (!base)
239 return NULL_TREE;
240
241 if (VAR_OR_FUNCTION_DECL_P (base)
242 && !can_refer_decl_in_current_unit_p (base, from_decl))
243 return NULL_TREE;
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
246 if (VAR_P (base))
247 TREE_ADDRESSABLE (base) = 1;
248 else if (TREE_CODE (base) == FUNCTION_DECL)
249 {
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
253 cgraph_node::get_create (base);
254 }
255 /* Fixup types in global initializers. */
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
258
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
262 }
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
265 {
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
271 }
272 return orig_cval;
273 }
274
275 /* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
277
278 tree
279 get_symbol_constant_value (tree sym)
280 {
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
283 {
284 if (val)
285 {
286 val = canonicalize_constructor_val (unshare_expr (val), sym);
287 if (val && is_gimple_min_invariant (val))
288 return val;
289 else
290 return NULL_TREE;
291 }
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
296 && is_gimple_reg_type (TREE_TYPE (sym)))
297 return build_zero_cst (TREE_TYPE (sym));
298 }
299
300 return NULL_TREE;
301 }
302
303
304
305 /* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
309
310 static tree
311 maybe_fold_reference (tree expr, bool is_lhs)
312 {
313 tree result;
314
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
331
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
336
337 return NULL_TREE;
338 }
339
340
341 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
345
346 static tree
347 fold_gimple_assign (gimple_stmt_iterator *si)
348 {
349 gimple *stmt = gsi_stmt (*si);
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
352
353 tree result = NULL_TREE;
354
355 switch (get_gimple_rhs_class (subcode))
356 {
357 case GIMPLE_SINGLE_RHS:
358 {
359 tree rhs = gimple_assign_rhs1 (stmt);
360
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
363
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
366
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
368 {
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
372 else if (flag_devirtualize && virtual_method_call_p (rhs))
373 {
374 bool final;
375 vec <cgraph_node *>targets
376 = possible_polymorphic_call_targets (rhs, stmt, &final);
377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
378 {
379 if (dump_enabled_p ())
380 {
381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
386 : "NULL");
387 }
388 if (targets.length () == 1)
389 {
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
394 }
395 else
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
398 val = build_int_cst (TREE_TYPE (val), 0);
399 return val;
400 }
401 }
402 }
403
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
405 {
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
418
419 if (result)
420 {
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
426
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
429 }
430 }
431
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
434 {
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
438
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
440 if (! CONSTANT_CLASS_P (val))
441 return NULL_TREE;
442
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
445 }
446
447 else if (DECL_P (rhs))
448 return get_symbol_constant_value (rhs);
449 }
450 break;
451
452 case GIMPLE_UNARY_RHS:
453 break;
454
455 case GIMPLE_BINARY_RHS:
456 break;
457
458 case GIMPLE_TERNARY_RHS:
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
464
465 if (result)
466 {
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
470 }
471 break;
472
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
475 }
476
477 return NULL_TREE;
478 }
479
480
481 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
485
486 static void
487 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
488 {
489 gimple *stmt = gsi_stmt (*si_p);
490
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
493
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
496 gimple *laststore = NULL;
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
499 {
500 gimple *new_stmt = gsi_stmt (i);
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
506 {
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
516 }
517 }
518
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
524 {
525 gimple *new_stmt = gsi_stmt (i);
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
533 }
534
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
539 {
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
543 {
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
546 }
547 }
548
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
551 }
552
553 /* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
562
563 void
564 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
565 {
566 tree lhs;
567 gimple *stmt, *new_stmt;
568 gimple_stmt_iterator i;
569 gimple_seq stmts = NULL;
570
571 stmt = gsi_stmt (*si_p);
572
573 gcc_assert (is_gimple_call (stmt));
574
575 push_gimplify_context (gimple_in_ssa_p (cfun));
576
577 lhs = gimple_call_lhs (stmt);
578 if (lhs == NULL_TREE)
579 {
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
584 {
585 pop_gimplify_context (NULL);
586 if (gimple_in_ssa_p (cfun))
587 {
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
590 }
591 gsi_replace (si_p, gimple_build_nop (), false);
592 return;
593 }
594 }
595 else
596 {
597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
602 }
603
604 pop_gimplify_context (NULL);
605
606 gsi_replace_with_seq_vops (si_p, stmts);
607 }
608
609
610 /* Replace the call at *GSI with the gimple value VAL. */
611
612 void
613 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
614 {
615 gimple *stmt = gsi_stmt (*gsi);
616 tree lhs = gimple_call_lhs (stmt);
617 gimple *repl;
618 if (lhs)
619 {
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
623 }
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
628 {
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
631 }
632 gsi_replace (gsi, repl, false);
633 }
634
635 /* Replace the call at *GSI with the new call REPL and fold that
636 again. */
637
638 static void
639 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
640 {
641 gimple *stmt = gsi_stmt (*gsi);
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
644 gimple_move_vops (repl, stmt);
645 gsi_replace (gsi, repl, false);
646 fold_stmt (gsi);
647 }
648
649 /* Return true if VAR is a VAR_DECL or a component thereof. */
650
651 static bool
652 var_decl_component_p (tree var)
653 {
654 tree inner = var;
655 while (handled_component_p (inner))
656 inner = TREE_OPERAND (inner, 0);
657 return (DECL_P (inner)
658 || (TREE_CODE (inner) == MEM_REF
659 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
660 }
661
662 /* Return TRUE if the SIZE argument, representing the size of an
663 object, is in a range of values of which exactly zero is valid. */
664
665 static bool
666 size_must_be_zero_p (tree size)
667 {
668 if (integer_zerop (size))
669 return true;
670
671 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
672 return false;
673
674 tree type = TREE_TYPE (size);
675 int prec = TYPE_PRECISION (type);
676
677 /* Compute the value of SSIZE_MAX, the largest positive value that
678 can be stored in ssize_t, the signed counterpart of size_t. */
679 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
680 value_range valid_range (build_int_cst (type, 0),
681 wide_int_to_tree (type, ssize_max));
682 value_range vr;
683 get_range_info (size, vr);
684 vr.intersect (&valid_range);
685 return vr.zero_p ();
686 }
687
688 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
689 diagnose (otherwise undefined) overlapping copies without preventing
690 folding. When folded, GCC guarantees that overlapping memcpy has
691 the same semantics as memmove. Call to the library memcpy need not
692 provide the same guarantee. Return false if no simplification can
693 be made. */
694
695 static bool
696 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
697 tree dest, tree src, enum built_in_function code)
698 {
699 gimple *stmt = gsi_stmt (*gsi);
700 tree lhs = gimple_call_lhs (stmt);
701 tree len = gimple_call_arg (stmt, 2);
702 tree destvar, srcvar;
703 location_t loc = gimple_location (stmt);
704
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len))
708 {
709 gimple *repl;
710 if (gimple_call_lhs (stmt))
711 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
716 {
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
720 gsi_replace (gsi, repl, false);
721 return true;
722 }
723
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src, dest, 0))
727 {
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
730 32667). */
731 unlink_stmt_vdef (stmt);
732 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
733 release_ssa_name (gimple_vdef (stmt));
734 if (!lhs)
735 {
736 gsi_replace (gsi, gimple_build_nop (), false);
737 return true;
738 }
739 goto done;
740 }
741 else
742 {
743 tree srctype, desttype;
744 unsigned int src_align, dest_align;
745 tree off0;
746 const char *tmp_str;
747 unsigned HOST_WIDE_INT tmp_len;
748
749 /* Build accesses at offset zero with a ref-all character type. */
750 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
751 ptr_mode, true), 0);
752
753 /* If we can perform the copy efficiently with first doing all loads
754 and then all stores inline it that way. Currently efficiently
755 means that we can load all the memory into a single integer
756 register which is what MOVE_MAX gives us. */
757 src_align = get_pointer_alignment (src);
758 dest_align = get_pointer_alignment (dest);
759 if (tree_fits_uhwi_p (len)
760 && compare_tree_int (len, MOVE_MAX) <= 0
761 /* FIXME: Don't transform copies from strings with known length.
762 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
763 from being handled, and the case was XFAILed for that reason.
764 Now that it is handled and the XFAIL removed, as soon as other
765 strlenopt tests that rely on it for passing are adjusted, this
766 hack can be removed. */
767 && !c_strlen (src, 1)
768 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
769 && memchr (tmp_str, 0, tmp_len) == NULL))
770 {
771 unsigned ilen = tree_to_uhwi (len);
772 if (pow2p_hwi (ilen))
773 {
774 /* Detect out-of-bounds accesses without issuing warnings.
775 Avoid folding out-of-bounds copies but to avoid false
776 positives for unreachable code defer warning until after
777 DCE has worked its magic.
778 -Wrestrict is still diagnosed. */
779 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
780 dest, src, len, len,
781 false, false))
782 if (warning != OPT_Wrestrict)
783 return false;
784
785 scalar_int_mode mode;
786 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
787 if (type
788 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
789 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
790 /* If the destination pointer is not aligned we must be able
791 to emit an unaligned store. */
792 && (dest_align >= GET_MODE_ALIGNMENT (mode)
793 || !targetm.slow_unaligned_access (mode, dest_align)
794 || (optab_handler (movmisalign_optab, mode)
795 != CODE_FOR_nothing)))
796 {
797 tree srctype = type;
798 tree desttype = type;
799 if (src_align < GET_MODE_ALIGNMENT (mode))
800 srctype = build_aligned_type (type, src_align);
801 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
802 tree tem = fold_const_aggregate_ref (srcmem);
803 if (tem)
804 srcmem = tem;
805 else if (src_align < GET_MODE_ALIGNMENT (mode)
806 && targetm.slow_unaligned_access (mode, src_align)
807 && (optab_handler (movmisalign_optab, mode)
808 == CODE_FOR_nothing))
809 srcmem = NULL_TREE;
810 if (srcmem)
811 {
812 gimple *new_stmt;
813 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
814 {
815 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
816 srcmem
817 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
818 new_stmt);
819 gimple_assign_set_lhs (new_stmt, srcmem);
820 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
821 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
822 }
823 if (dest_align < GET_MODE_ALIGNMENT (mode))
824 desttype = build_aligned_type (type, dest_align);
825 new_stmt
826 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
827 dest, off0),
828 srcmem);
829 gimple_move_vops (new_stmt, stmt);
830 if (!lhs)
831 {
832 gsi_replace (gsi, new_stmt, false);
833 return true;
834 }
835 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
836 goto done;
837 }
838 }
839 }
840 }
841
842 if (code == BUILT_IN_MEMMOVE)
843 {
844 /* Both DEST and SRC must be pointer types.
845 ??? This is what old code did. Is the testing for pointer types
846 really mandatory?
847
848 If either SRC is readonly or length is 1, we can use memcpy. */
849 if (!dest_align || !src_align)
850 return false;
851 if (readonly_data_expr (src)
852 || (tree_fits_uhwi_p (len)
853 && (MIN (src_align, dest_align) / BITS_PER_UNIT
854 >= tree_to_uhwi (len))))
855 {
856 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
857 if (!fn)
858 return false;
859 gimple_call_set_fndecl (stmt, fn);
860 gimple_call_set_arg (stmt, 0, dest);
861 gimple_call_set_arg (stmt, 1, src);
862 fold_stmt (gsi);
863 return true;
864 }
865
866 /* If *src and *dest can't overlap, optimize into memcpy as well. */
867 if (TREE_CODE (src) == ADDR_EXPR
868 && TREE_CODE (dest) == ADDR_EXPR)
869 {
870 tree src_base, dest_base, fn;
871 poly_int64 src_offset = 0, dest_offset = 0;
872 poly_uint64 maxsize;
873
874 srcvar = TREE_OPERAND (src, 0);
875 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
876 if (src_base == NULL)
877 src_base = srcvar;
878 destvar = TREE_OPERAND (dest, 0);
879 dest_base = get_addr_base_and_unit_offset (destvar,
880 &dest_offset);
881 if (dest_base == NULL)
882 dest_base = destvar;
883 if (!poly_int_tree_p (len, &maxsize))
884 maxsize = -1;
885 if (SSA_VAR_P (src_base)
886 && SSA_VAR_P (dest_base))
887 {
888 if (operand_equal_p (src_base, dest_base, 0)
889 && ranges_maybe_overlap_p (src_offset, maxsize,
890 dest_offset, maxsize))
891 return false;
892 }
893 else if (TREE_CODE (src_base) == MEM_REF
894 && TREE_CODE (dest_base) == MEM_REF)
895 {
896 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
897 TREE_OPERAND (dest_base, 0), 0))
898 return false;
899 poly_offset_int full_src_offset
900 = mem_ref_offset (src_base) + src_offset;
901 poly_offset_int full_dest_offset
902 = mem_ref_offset (dest_base) + dest_offset;
903 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
904 full_dest_offset, maxsize))
905 return false;
906 }
907 else
908 return false;
909
910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 if (!fn)
912 return false;
913 gimple_call_set_fndecl (stmt, fn);
914 gimple_call_set_arg (stmt, 0, dest);
915 gimple_call_set_arg (stmt, 1, src);
916 fold_stmt (gsi);
917 return true;
918 }
919
920 /* If the destination and source do not alias optimize into
921 memcpy as well. */
922 if ((is_gimple_min_invariant (dest)
923 || TREE_CODE (dest) == SSA_NAME)
924 && (is_gimple_min_invariant (src)
925 || TREE_CODE (src) == SSA_NAME))
926 {
927 ao_ref destr, srcr;
928 ao_ref_init_from_ptr_and_size (&destr, dest, len);
929 ao_ref_init_from_ptr_and_size (&srcr, src, len);
930 if (!refs_may_alias_p_1 (&destr, &srcr, false))
931 {
932 tree fn;
933 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
934 if (!fn)
935 return false;
936 gimple_call_set_fndecl (stmt, fn);
937 gimple_call_set_arg (stmt, 0, dest);
938 gimple_call_set_arg (stmt, 1, src);
939 fold_stmt (gsi);
940 return true;
941 }
942 }
943
944 return false;
945 }
946
947 if (!tree_fits_shwi_p (len))
948 return false;
949 if (!POINTER_TYPE_P (TREE_TYPE (src))
950 || !POINTER_TYPE_P (TREE_TYPE (dest)))
951 return false;
952 /* In the following try to find a type that is most natural to be
953 used for the memcpy source and destination and that allows
954 the most optimization when memcpy is turned into a plain assignment
955 using that type. In theory we could always use a char[len] type
956 but that only gains us that the destination and source possibly
957 no longer will have their address taken. */
958 srctype = TREE_TYPE (TREE_TYPE (src));
959 if (TREE_CODE (srctype) == ARRAY_TYPE
960 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
961 srctype = TREE_TYPE (srctype);
962 desttype = TREE_TYPE (TREE_TYPE (dest));
963 if (TREE_CODE (desttype) == ARRAY_TYPE
964 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
965 desttype = TREE_TYPE (desttype);
966 if (TREE_ADDRESSABLE (srctype)
967 || TREE_ADDRESSABLE (desttype))
968 return false;
969
970 /* Make sure we are not copying using a floating-point mode or
971 a type whose size possibly does not match its precision. */
972 if (FLOAT_MODE_P (TYPE_MODE (desttype))
973 || TREE_CODE (desttype) == BOOLEAN_TYPE
974 || TREE_CODE (desttype) == ENUMERAL_TYPE)
975 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
976 if (FLOAT_MODE_P (TYPE_MODE (srctype))
977 || TREE_CODE (srctype) == BOOLEAN_TYPE
978 || TREE_CODE (srctype) == ENUMERAL_TYPE)
979 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
980 if (!srctype)
981 srctype = desttype;
982 if (!desttype)
983 desttype = srctype;
984 if (!srctype)
985 return false;
986
987 src_align = get_pointer_alignment (src);
988 dest_align = get_pointer_alignment (dest);
989 if (dest_align < TYPE_ALIGN (desttype)
990 || src_align < TYPE_ALIGN (srctype))
991 return false;
992
993 destvar = NULL_TREE;
994 if (TREE_CODE (dest) == ADDR_EXPR
995 && var_decl_component_p (TREE_OPERAND (dest, 0))
996 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
997 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
998
999 srcvar = NULL_TREE;
1000 if (TREE_CODE (src) == ADDR_EXPR
1001 && var_decl_component_p (TREE_OPERAND (src, 0))
1002 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1003 {
1004 if (!destvar
1005 || src_align >= TYPE_ALIGN (desttype))
1006 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1007 src, off0);
1008 else if (!STRICT_ALIGNMENT)
1009 {
1010 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1011 src_align);
1012 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1013 }
1014 }
1015
1016 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1017 return false;
1018
1019 if (srcvar == NULL_TREE)
1020 {
1021 if (src_align >= TYPE_ALIGN (desttype))
1022 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1023 else
1024 {
1025 if (STRICT_ALIGNMENT)
1026 return false;
1027 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1028 src_align);
1029 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1030 }
1031 }
1032 else if (destvar == NULL_TREE)
1033 {
1034 if (dest_align >= TYPE_ALIGN (srctype))
1035 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1036 else
1037 {
1038 if (STRICT_ALIGNMENT)
1039 return false;
1040 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1041 dest_align);
1042 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1043 }
1044 }
1045
1046 /* Same as above, detect out-of-bounds accesses without issuing
1047 warnings. Avoid folding out-of-bounds copies but to avoid
1048 false positives for unreachable code defer warning until
1049 after DCE has worked its magic.
1050 -Wrestrict is still diagnosed. */
1051 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1052 dest, src, len, len,
1053 false, false))
1054 if (warning != OPT_Wrestrict)
1055 return false;
1056
1057 gimple *new_stmt;
1058 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1059 {
1060 tree tem = fold_const_aggregate_ref (srcvar);
1061 if (tem)
1062 srcvar = tem;
1063 if (! is_gimple_min_invariant (srcvar))
1064 {
1065 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1066 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1067 new_stmt);
1068 gimple_assign_set_lhs (new_stmt, srcvar);
1069 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1071 }
1072 new_stmt = gimple_build_assign (destvar, srcvar);
1073 goto set_vop_and_replace;
1074 }
1075
1076 /* We get an aggregate copy. Use an unsigned char[] type to
1077 perform the copying to preserve padding and to avoid any issues
1078 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1079 desttype = build_array_type_nelts (unsigned_char_type_node,
1080 tree_to_uhwi (len));
1081 srctype = desttype;
1082 if (src_align > TYPE_ALIGN (srctype))
1083 srctype = build_aligned_type (srctype, src_align);
1084 if (dest_align > TYPE_ALIGN (desttype))
1085 desttype = build_aligned_type (desttype, dest_align);
1086 new_stmt
1087 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1088 fold_build2 (MEM_REF, srctype, src, off0));
1089 set_vop_and_replace:
1090 gimple_move_vops (new_stmt, stmt);
1091 if (!lhs)
1092 {
1093 gsi_replace (gsi, new_stmt, false);
1094 return true;
1095 }
1096 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1097 }
1098
1099 done:
1100 gimple_seq stmts = NULL;
1101 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1102 len = NULL_TREE;
1103 else if (code == BUILT_IN_MEMPCPY)
1104 {
1105 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1106 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1107 TREE_TYPE (dest), dest, len);
1108 }
1109 else
1110 gcc_unreachable ();
1111
1112 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1113 gimple *repl = gimple_build_assign (lhs, dest);
1114 gsi_replace (gsi, repl, false);
1115 return true;
1116 }
1117
1118 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1119 to built-in memcmp (a, b, len). */
1120
1121 static bool
1122 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1123 {
1124 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1125
1126 if (!fn)
1127 return false;
1128
1129 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1130
1131 gimple *stmt = gsi_stmt (*gsi);
1132 tree a = gimple_call_arg (stmt, 0);
1133 tree b = gimple_call_arg (stmt, 1);
1134 tree len = gimple_call_arg (stmt, 2);
1135
1136 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1137 replace_call_with_call_and_fold (gsi, repl);
1138
1139 return true;
1140 }
1141
1142 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1143 to built-in memmove (dest, src, len). */
1144
1145 static bool
1146 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1147 {
1148 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1149
1150 if (!fn)
1151 return false;
1152
1153 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1154 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1155 len) into memmove (dest, src, len). */
1156
1157 gimple *stmt = gsi_stmt (*gsi);
1158 tree src = gimple_call_arg (stmt, 0);
1159 tree dest = gimple_call_arg (stmt, 1);
1160 tree len = gimple_call_arg (stmt, 2);
1161
1162 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1163 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1164 replace_call_with_call_and_fold (gsi, repl);
1165
1166 return true;
1167 }
1168
1169 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1170 to built-in memset (dest, 0, len). */
1171
1172 static bool
1173 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1174 {
1175 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1176
1177 if (!fn)
1178 return false;
1179
1180 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1181
1182 gimple *stmt = gsi_stmt (*gsi);
1183 tree dest = gimple_call_arg (stmt, 0);
1184 tree len = gimple_call_arg (stmt, 1);
1185
1186 gimple_seq seq = NULL;
1187 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1188 gimple_seq_add_stmt_without_update (&seq, repl);
1189 gsi_replace_with_seq_vops (gsi, seq);
1190 fold_stmt (gsi);
1191
1192 return true;
1193 }
1194
1195 /* Fold function call to builtin memset or bzero at *GSI setting the
1196 memory of size LEN to VAL. Return whether a simplification was made. */
1197
1198 static bool
1199 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1200 {
1201 gimple *stmt = gsi_stmt (*gsi);
1202 tree etype;
1203 unsigned HOST_WIDE_INT length, cval;
1204
1205 /* If the LEN parameter is zero, return DEST. */
1206 if (integer_zerop (len))
1207 {
1208 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1209 return true;
1210 }
1211
1212 if (! tree_fits_uhwi_p (len))
1213 return false;
1214
1215 if (TREE_CODE (c) != INTEGER_CST)
1216 return false;
1217
1218 tree dest = gimple_call_arg (stmt, 0);
1219 tree var = dest;
1220 if (TREE_CODE (var) != ADDR_EXPR)
1221 return false;
1222
1223 var = TREE_OPERAND (var, 0);
1224 if (TREE_THIS_VOLATILE (var))
1225 return false;
1226
1227 etype = TREE_TYPE (var);
1228 if (TREE_CODE (etype) == ARRAY_TYPE)
1229 etype = TREE_TYPE (etype);
1230
1231 if (!INTEGRAL_TYPE_P (etype)
1232 && !POINTER_TYPE_P (etype))
1233 return NULL_TREE;
1234
1235 if (! var_decl_component_p (var))
1236 return NULL_TREE;
1237
1238 length = tree_to_uhwi (len);
1239 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1240 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1241 return NULL_TREE;
1242
1243 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1244 return NULL_TREE;
1245
1246 if (integer_zerop (c))
1247 cval = 0;
1248 else
1249 {
1250 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1251 return NULL_TREE;
1252
1253 cval = TREE_INT_CST_LOW (c);
1254 cval &= 0xff;
1255 cval |= cval << 8;
1256 cval |= cval << 16;
1257 cval |= (cval << 31) << 1;
1258 }
1259
1260 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1261 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1262 gimple_move_vops (store, stmt);
1263 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1264 if (gimple_call_lhs (stmt))
1265 {
1266 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1267 gsi_replace (gsi, asgn, false);
1268 }
1269 else
1270 {
1271 gimple_stmt_iterator gsi2 = *gsi;
1272 gsi_prev (gsi);
1273 gsi_remove (&gsi2, true);
1274 }
1275
1276 return true;
1277 }
1278
1279 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1280
1281 static bool
1282 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1283 c_strlen_data *pdata, unsigned eltsize)
1284 {
1285 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1286
1287 /* The length computed by this invocation of the function. */
1288 tree val = NULL_TREE;
1289
1290 /* True if VAL is an optimistic (tight) bound determined from
1291 the size of the character array in which the string may be
1292 stored. In that case, the computed VAL is used to set
1293 PDATA->MAXBOUND. */
1294 bool tight_bound = false;
1295
1296 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1297 if (TREE_CODE (arg) == ADDR_EXPR
1298 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1299 {
1300 tree op = TREE_OPERAND (arg, 0);
1301 if (integer_zerop (TREE_OPERAND (op, 1)))
1302 {
1303 tree aop0 = TREE_OPERAND (op, 0);
1304 if (TREE_CODE (aop0) == INDIRECT_REF
1305 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1306 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1307 pdata, eltsize);
1308 }
1309 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1310 && rkind == SRK_LENRANGE)
1311 {
1312 /* Fail if an array is the last member of a struct object
1313 since it could be treated as a (fake) flexible array
1314 member. */
1315 tree idx = TREE_OPERAND (op, 1);
1316
1317 arg = TREE_OPERAND (op, 0);
1318 tree optype = TREE_TYPE (arg);
1319 if (tree dom = TYPE_DOMAIN (optype))
1320 if (tree bound = TYPE_MAX_VALUE (dom))
1321 if (TREE_CODE (bound) == INTEGER_CST
1322 && TREE_CODE (idx) == INTEGER_CST
1323 && tree_int_cst_lt (bound, idx))
1324 return false;
1325 }
1326 }
1327
1328 if (rkind == SRK_INT_VALUE)
1329 {
1330 /* We are computing the maximum value (not string length). */
1331 val = arg;
1332 if (TREE_CODE (val) != INTEGER_CST
1333 || tree_int_cst_sgn (val) < 0)
1334 return false;
1335 }
1336 else
1337 {
1338 c_strlen_data lendata = { };
1339 val = c_strlen (arg, 1, &lendata, eltsize);
1340
1341 if (!val && lendata.decl)
1342 {
1343 /* ARG refers to an unterminated const character array.
1344 DATA.DECL with size DATA.LEN. */
1345 val = lendata.minlen;
1346 pdata->decl = lendata.decl;
1347 }
1348 }
1349
1350 /* Set if VAL represents the maximum length based on array size (set
1351 when exact length cannot be determined). */
1352 bool maxbound = false;
1353
1354 if (!val && rkind == SRK_LENRANGE)
1355 {
1356 if (TREE_CODE (arg) == ADDR_EXPR)
1357 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1358 pdata, eltsize);
1359
1360 if (TREE_CODE (arg) == ARRAY_REF)
1361 {
1362 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1363
1364 /* Determine the "innermost" array type. */
1365 while (TREE_CODE (optype) == ARRAY_TYPE
1366 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1367 optype = TREE_TYPE (optype);
1368
1369 /* Avoid arrays of pointers. */
1370 tree eltype = TREE_TYPE (optype);
1371 if (TREE_CODE (optype) != ARRAY_TYPE
1372 || !INTEGRAL_TYPE_P (eltype))
1373 return false;
1374
1375 /* Fail when the array bound is unknown or zero. */
1376 val = TYPE_SIZE_UNIT (optype);
1377 if (!val || integer_zerop (val))
1378 return false;
1379
1380 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1381 integer_one_node);
1382
1383 /* Set the minimum size to zero since the string in
1384 the array could have zero length. */
1385 pdata->minlen = ssize_int (0);
1386
1387 tight_bound = true;
1388 }
1389 else if (TREE_CODE (arg) == COMPONENT_REF
1390 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1391 == ARRAY_TYPE))
1392 {
1393 /* Use the type of the member array to determine the upper
1394 bound on the length of the array. This may be overly
1395 optimistic if the array itself isn't NUL-terminated and
1396 the caller relies on the subsequent member to contain
1397 the NUL but that would only be considered valid if
1398 the array were the last member of a struct. */
1399
1400 tree fld = TREE_OPERAND (arg, 1);
1401
1402 tree optype = TREE_TYPE (fld);
1403
1404 /* Determine the "innermost" array type. */
1405 while (TREE_CODE (optype) == ARRAY_TYPE
1406 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1407 optype = TREE_TYPE (optype);
1408
1409 /* Fail when the array bound is unknown or zero. */
1410 val = TYPE_SIZE_UNIT (optype);
1411 if (!val || integer_zerop (val))
1412 return false;
1413 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1414 integer_one_node);
1415
1416 /* Set the minimum size to zero since the string in
1417 the array could have zero length. */
1418 pdata->minlen = ssize_int (0);
1419
1420 /* The array size determined above is an optimistic bound
1421 on the length. If the array isn't nul-terminated the
1422 length computed by the library function would be greater.
1423 Even though using strlen to cross the subobject boundary
1424 is undefined, avoid drawing conclusions from the member
1425 type about the length here. */
1426 tight_bound = true;
1427 }
1428 else if (VAR_P (arg))
1429 {
1430 /* Avoid handling pointers to arrays. GCC might misuse
1431 a pointer to an array of one bound to point to an array
1432 object of a greater bound. */
1433 tree argtype = TREE_TYPE (arg);
1434 if (TREE_CODE (argtype) == ARRAY_TYPE)
1435 {
1436 val = TYPE_SIZE_UNIT (argtype);
1437 if (!val
1438 || TREE_CODE (val) != INTEGER_CST
1439 || integer_zerop (val))
1440 return false;
1441 val = wide_int_to_tree (TREE_TYPE (val),
1442 wi::sub (wi::to_wide (val), 1));
1443
1444 /* Set the minimum size to zero since the string in
1445 the array could have zero length. */
1446 pdata->minlen = ssize_int (0);
1447 }
1448 }
1449 maxbound = true;
1450 }
1451
1452 if (!val)
1453 return false;
1454
1455 /* Adjust the lower bound on the string length as necessary. */
1456 if (!pdata->minlen
1457 || (rkind != SRK_STRLEN
1458 && TREE_CODE (pdata->minlen) == INTEGER_CST
1459 && TREE_CODE (val) == INTEGER_CST
1460 && tree_int_cst_lt (val, pdata->minlen)))
1461 pdata->minlen = val;
1462
1463 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1464 {
1465 /* Adjust the tighter (more optimistic) string length bound
1466 if necessary and proceed to adjust the more conservative
1467 bound. */
1468 if (TREE_CODE (val) == INTEGER_CST)
1469 {
1470 if (tree_int_cst_lt (pdata->maxbound, val))
1471 pdata->maxbound = val;
1472 }
1473 else
1474 pdata->maxbound = val;
1475 }
1476 else if (pdata->maxbound || maxbound)
1477 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1478 if VAL corresponds to the maximum length determined based
1479 on the type of the object. */
1480 pdata->maxbound = val;
1481
1482 if (tight_bound)
1483 {
1484 /* VAL computed above represents an optimistically tight bound
1485 on the length of the string based on the referenced object's
1486 or subobject's type. Determine the conservative upper bound
1487 based on the enclosing object's size if possible. */
1488 if (rkind == SRK_LENRANGE)
1489 {
1490 poly_int64 offset;
1491 tree base = get_addr_base_and_unit_offset (arg, &offset);
1492 if (!base)
1493 {
1494 /* When the call above fails due to a non-constant offset
1495 assume the offset is zero and use the size of the whole
1496 enclosing object instead. */
1497 base = get_base_address (arg);
1498 offset = 0;
1499 }
1500 /* If the base object is a pointer no upper bound on the length
1501 can be determined. Otherwise the maximum length is equal to
1502 the size of the enclosing object minus the offset of
1503 the referenced subobject minus 1 (for the terminating nul). */
1504 tree type = TREE_TYPE (base);
1505 if (TREE_CODE (type) == POINTER_TYPE
1506 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1507 val = build_all_ones_cst (size_type_node);
1508 else
1509 {
1510 val = DECL_SIZE_UNIT (base);
1511 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1512 size_int (offset + 1));
1513 }
1514 }
1515 else
1516 return false;
1517 }
1518
1519 if (pdata->maxlen)
1520 {
1521 /* Adjust the more conservative bound if possible/necessary
1522 and fail otherwise. */
1523 if (rkind != SRK_STRLEN)
1524 {
1525 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1526 || TREE_CODE (val) != INTEGER_CST)
1527 return false;
1528
1529 if (tree_int_cst_lt (pdata->maxlen, val))
1530 pdata->maxlen = val;
1531 return true;
1532 }
1533 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1534 {
1535 /* Fail if the length of this ARG is different from that
1536 previously determined from another ARG. */
1537 return false;
1538 }
1539 }
1540
1541 pdata->maxlen = val;
1542 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1543 }
1544
1545 /* For an ARG referencing one or more strings, try to obtain the range
1546 of their lengths, or the size of the largest array ARG referes to if
1547 the range of lengths cannot be determined, and store all in *PDATA.
1548 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1549 the maximum constant value.
1550 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1551 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1552 length or if we are unable to determine the length, return false.
1553 VISITED is a bitmap of visited variables.
1554 RKIND determines the kind of value or range to obtain (see
1555 strlen_range_kind).
1556 Set PDATA->DECL if ARG refers to an unterminated constant array.
1557 On input, set ELTSIZE to 1 for normal single byte character strings,
1558 and either 2 or 4 for wide characer strings (the size of wchar_t).
1559 Return true if *PDATA was successfully populated and false otherwise. */
1560
1561 static bool
1562 get_range_strlen (tree arg, bitmap *visited,
1563 strlen_range_kind rkind,
1564 c_strlen_data *pdata, unsigned eltsize)
1565 {
1566
1567 if (TREE_CODE (arg) != SSA_NAME)
1568 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1569
1570 /* If ARG is registered for SSA update we cannot look at its defining
1571 statement. */
1572 if (name_registered_for_update_p (arg))
1573 return false;
1574
1575 /* If we were already here, break the infinite cycle. */
1576 if (!*visited)
1577 *visited = BITMAP_ALLOC (NULL);
1578 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1579 return true;
1580
1581 tree var = arg;
1582 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1583
1584 switch (gimple_code (def_stmt))
1585 {
1586 case GIMPLE_ASSIGN:
1587 /* The RHS of the statement defining VAR must either have a
1588 constant length or come from another SSA_NAME with a constant
1589 length. */
1590 if (gimple_assign_single_p (def_stmt)
1591 || gimple_assign_unary_nop_p (def_stmt))
1592 {
1593 tree rhs = gimple_assign_rhs1 (def_stmt);
1594 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1595 }
1596 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1597 {
1598 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1599 gimple_assign_rhs3 (def_stmt) };
1600
1601 for (unsigned int i = 0; i < 2; i++)
1602 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1603 {
1604 if (rkind != SRK_LENRANGE)
1605 return false;
1606 /* Set the upper bound to the maximum to prevent
1607 it from being adjusted in the next iteration but
1608 leave MINLEN and the more conservative MAXBOUND
1609 determined so far alone (or leave them null if
1610 they haven't been set yet). That the MINLEN is
1611 in fact zero can be determined from MAXLEN being
1612 unbounded but the discovered minimum is used for
1613 diagnostics. */
1614 pdata->maxlen = build_all_ones_cst (size_type_node);
1615 }
1616 return true;
1617 }
1618 return false;
1619
1620 case GIMPLE_PHI:
1621 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1622 must have a constant length. */
1623 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1624 {
1625 tree arg = gimple_phi_arg (def_stmt, i)->def;
1626
1627 /* If this PHI has itself as an argument, we cannot
1628 determine the string length of this argument. However,
1629 if we can find a constant string length for the other
1630 PHI args then we can still be sure that this is a
1631 constant string length. So be optimistic and just
1632 continue with the next argument. */
1633 if (arg == gimple_phi_result (def_stmt))
1634 continue;
1635
1636 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1637 {
1638 if (rkind != SRK_LENRANGE)
1639 return false;
1640 /* Set the upper bound to the maximum to prevent
1641 it from being adjusted in the next iteration but
1642 leave MINLEN and the more conservative MAXBOUND
1643 determined so far alone (or leave them null if
1644 they haven't been set yet). That the MINLEN is
1645 in fact zero can be determined from MAXLEN being
1646 unbounded but the discovered minimum is used for
1647 diagnostics. */
1648 pdata->maxlen = build_all_ones_cst (size_type_node);
1649 }
1650 }
1651 return true;
1652
1653 default:
1654 return false;
1655 }
1656 }
1657
1658 /* Try to obtain the range of the lengths of the string(s) referenced
1659 by ARG, or the size of the largest array ARG refers to if the range
1660 of lengths cannot be determined, and store all in *PDATA which must
1661 be zero-initialized on input except PDATA->MAXBOUND may be set to
1662 a non-null tree node other than INTEGER_CST to request to have it
1663 set to the length of the longest string in a PHI. ELTSIZE is
1664 the expected size of the string element in bytes: 1 for char and
1665 some power of 2 for wide characters.
1666 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1667 for optimization. Returning false means that a nonzero PDATA->MINLEN
1668 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1669 is -1 (in that case, the actual range is indeterminate, i.e.,
1670 [0, PTRDIFF_MAX - 2]. */
1671
1672 bool
1673 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1674 {
1675 bitmap visited = NULL;
1676 tree maxbound = pdata->maxbound;
1677
1678 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1679 {
1680 /* On failure extend the length range to an impossible maximum
1681 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1682 members can stay unchanged regardless. */
1683 pdata->minlen = ssize_int (0);
1684 pdata->maxlen = build_all_ones_cst (size_type_node);
1685 }
1686 else if (!pdata->minlen)
1687 pdata->minlen = ssize_int (0);
1688
1689 /* If it's unchanged from it initial non-null value, set the conservative
1690 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1691 if (maxbound && pdata->maxbound == maxbound)
1692 pdata->maxbound = build_all_ones_cst (size_type_node);
1693
1694 if (visited)
1695 BITMAP_FREE (visited);
1696
1697 return !integer_all_onesp (pdata->maxlen);
1698 }
1699
1700 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1701 For ARG of pointer types, NONSTR indicates if the caller is prepared
1702 to handle unterminated strings. For integer ARG and when RKIND ==
1703 SRK_INT_VALUE, NONSTR must be null.
1704
1705 If an unterminated array is discovered and our caller handles
1706 unterminated arrays, then bubble up the offending DECL and
1707 return the maximum size. Otherwise return NULL. */
1708
1709 static tree
1710 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1711 {
1712 /* A non-null NONSTR is meaningless when determining the maximum
1713 value of an integer ARG. */
1714 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1715 /* ARG must have an integral type when RKIND says so. */
1716 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1717
1718 bitmap visited = NULL;
1719
1720 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1721 is unbounded. */
1722 c_strlen_data lendata = { };
1723 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1724 lendata.maxlen = NULL_TREE;
1725 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1726 lendata.maxlen = NULL_TREE;
1727
1728 if (visited)
1729 BITMAP_FREE (visited);
1730
1731 if (nonstr)
1732 {
1733 /* For callers prepared to handle unterminated arrays set
1734 *NONSTR to point to the declaration of the array and return
1735 the maximum length/size. */
1736 *nonstr = lendata.decl;
1737 return lendata.maxlen;
1738 }
1739
1740 /* Fail if the constant array isn't nul-terminated. */
1741 return lendata.decl ? NULL_TREE : lendata.maxlen;
1742 }
1743
1744
1745 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1746 If LEN is not NULL, it represents the length of the string to be
1747 copied. Return NULL_TREE if no simplification can be made. */
1748
1749 static bool
1750 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1751 tree dest, tree src)
1752 {
1753 gimple *stmt = gsi_stmt (*gsi);
1754 location_t loc = gimple_location (stmt);
1755 tree fn;
1756
1757 /* If SRC and DEST are the same (and not volatile), return DEST. */
1758 if (operand_equal_p (src, dest, 0))
1759 {
1760 /* Issue -Wrestrict unless the pointers are null (those do
1761 not point to objects and so do not indicate an overlap;
1762 such calls could be the result of sanitization and jump
1763 threading). */
1764 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1765 {
1766 tree func = gimple_call_fndecl (stmt);
1767
1768 warning_at (loc, OPT_Wrestrict,
1769 "%qD source argument is the same as destination",
1770 func);
1771 }
1772
1773 replace_call_with_value (gsi, dest);
1774 return true;
1775 }
1776
1777 if (optimize_function_for_size_p (cfun))
1778 return false;
1779
1780 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1781 if (!fn)
1782 return false;
1783
1784 /* Set to non-null if ARG refers to an unterminated array. */
1785 tree nonstr = NULL;
1786 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1787
1788 if (nonstr)
1789 {
1790 /* Avoid folding calls with unterminated arrays. */
1791 if (!gimple_no_warning_p (stmt))
1792 warn_string_no_nul (loc, "strcpy", src, nonstr);
1793 gimple_set_no_warning (stmt, true);
1794 return false;
1795 }
1796
1797 if (!len)
1798 return false;
1799
1800 len = fold_convert_loc (loc, size_type_node, len);
1801 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1802 len = force_gimple_operand_gsi (gsi, len, true,
1803 NULL_TREE, true, GSI_SAME_STMT);
1804 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1805 replace_call_with_call_and_fold (gsi, repl);
1806 return true;
1807 }
1808
1809 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1810 If SLEN is not NULL, it represents the length of the source string.
1811 Return NULL_TREE if no simplification can be made. */
1812
1813 static bool
1814 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1815 tree dest, tree src, tree len)
1816 {
1817 gimple *stmt = gsi_stmt (*gsi);
1818 location_t loc = gimple_location (stmt);
1819 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1820
1821 /* If the LEN parameter is zero, return DEST. */
1822 if (integer_zerop (len))
1823 {
1824 /* Avoid warning if the destination refers to a an array/pointer
1825 decorate with attribute nonstring. */
1826 if (!nonstring)
1827 {
1828 tree fndecl = gimple_call_fndecl (stmt);
1829
1830 /* Warn about the lack of nul termination: the result is not
1831 a (nul-terminated) string. */
1832 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1833 if (slen && !integer_zerop (slen))
1834 warning_at (loc, OPT_Wstringop_truncation,
1835 "%G%qD destination unchanged after copying no bytes "
1836 "from a string of length %E",
1837 stmt, fndecl, slen);
1838 else
1839 warning_at (loc, OPT_Wstringop_truncation,
1840 "%G%qD destination unchanged after copying no bytes",
1841 stmt, fndecl);
1842 }
1843
1844 replace_call_with_value (gsi, dest);
1845 return true;
1846 }
1847
1848 /* We can't compare slen with len as constants below if len is not a
1849 constant. */
1850 if (TREE_CODE (len) != INTEGER_CST)
1851 return false;
1852
1853 /* Now, we must be passed a constant src ptr parameter. */
1854 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1855 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1856 return false;
1857
1858 /* The size of the source string including the terminating nul. */
1859 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1860
1861 /* We do not support simplification of this case, though we do
1862 support it when expanding trees into RTL. */
1863 /* FIXME: generate a call to __builtin_memset. */
1864 if (tree_int_cst_lt (ssize, len))
1865 return false;
1866
1867 /* Diagnose truncation that leaves the copy unterminated. */
1868 maybe_diag_stxncpy_trunc (*gsi, src, len);
1869
1870 /* OK transform into builtin memcpy. */
1871 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1872 if (!fn)
1873 return false;
1874
1875 len = fold_convert_loc (loc, size_type_node, len);
1876 len = force_gimple_operand_gsi (gsi, len, true,
1877 NULL_TREE, true, GSI_SAME_STMT);
1878 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1879 replace_call_with_call_and_fold (gsi, repl);
1880
1881 return true;
1882 }
1883
1884 /* Fold function call to builtin strchr or strrchr.
1885 If both arguments are constant, evaluate and fold the result,
1886 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1887 In general strlen is significantly faster than strchr
1888 due to being a simpler operation. */
1889 static bool
1890 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1891 {
1892 gimple *stmt = gsi_stmt (*gsi);
1893 tree str = gimple_call_arg (stmt, 0);
1894 tree c = gimple_call_arg (stmt, 1);
1895 location_t loc = gimple_location (stmt);
1896 const char *p;
1897 char ch;
1898
1899 if (!gimple_call_lhs (stmt))
1900 return false;
1901
1902 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1903 {
1904 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1905
1906 if (p1 == NULL)
1907 {
1908 replace_call_with_value (gsi, integer_zero_node);
1909 return true;
1910 }
1911
1912 tree len = build_int_cst (size_type_node, p1 - p);
1913 gimple_seq stmts = NULL;
1914 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1915 POINTER_PLUS_EXPR, str, len);
1916 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1917 gsi_replace_with_seq_vops (gsi, stmts);
1918 return true;
1919 }
1920
1921 if (!integer_zerop (c))
1922 return false;
1923
1924 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1925 if (is_strrchr && optimize_function_for_size_p (cfun))
1926 {
1927 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1928
1929 if (strchr_fn)
1930 {
1931 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1932 replace_call_with_call_and_fold (gsi, repl);
1933 return true;
1934 }
1935
1936 return false;
1937 }
1938
1939 tree len;
1940 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1941
1942 if (!strlen_fn)
1943 return false;
1944
1945 /* Create newstr = strlen (str). */
1946 gimple_seq stmts = NULL;
1947 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1948 gimple_set_location (new_stmt, loc);
1949 len = create_tmp_reg_or_ssa_name (size_type_node);
1950 gimple_call_set_lhs (new_stmt, len);
1951 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1952
1953 /* Create (str p+ strlen (str)). */
1954 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1955 POINTER_PLUS_EXPR, str, len);
1956 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1957 gsi_replace_with_seq_vops (gsi, stmts);
1958 /* gsi now points at the assignment to the lhs, get a
1959 stmt iterator to the strlen.
1960 ??? We can't use gsi_for_stmt as that doesn't work when the
1961 CFG isn't built yet. */
1962 gimple_stmt_iterator gsi2 = *gsi;
1963 gsi_prev (&gsi2);
1964 fold_stmt (&gsi2);
1965 return true;
1966 }
1967
1968 /* Fold function call to builtin strstr.
1969 If both arguments are constant, evaluate and fold the result,
1970 additionally fold strstr (x, "") into x and strstr (x, "c")
1971 into strchr (x, 'c'). */
1972 static bool
1973 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1974 {
1975 gimple *stmt = gsi_stmt (*gsi);
1976 tree haystack = gimple_call_arg (stmt, 0);
1977 tree needle = gimple_call_arg (stmt, 1);
1978 const char *p, *q;
1979
1980 if (!gimple_call_lhs (stmt))
1981 return false;
1982
1983 q = c_getstr (needle);
1984 if (q == NULL)
1985 return false;
1986
1987 if ((p = c_getstr (haystack)))
1988 {
1989 const char *r = strstr (p, q);
1990
1991 if (r == NULL)
1992 {
1993 replace_call_with_value (gsi, integer_zero_node);
1994 return true;
1995 }
1996
1997 tree len = build_int_cst (size_type_node, r - p);
1998 gimple_seq stmts = NULL;
1999 gimple *new_stmt
2000 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2001 haystack, len);
2002 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2003 gsi_replace_with_seq_vops (gsi, stmts);
2004 return true;
2005 }
2006
2007 /* For strstr (x, "") return x. */
2008 if (q[0] == '\0')
2009 {
2010 replace_call_with_value (gsi, haystack);
2011 return true;
2012 }
2013
2014 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2015 if (q[1] == '\0')
2016 {
2017 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2018 if (strchr_fn)
2019 {
2020 tree c = build_int_cst (integer_type_node, q[0]);
2021 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2022 replace_call_with_call_and_fold (gsi, repl);
2023 return true;
2024 }
2025 }
2026
2027 return false;
2028 }
2029
2030 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2031 to the call.
2032
2033 Return NULL_TREE if no simplification was possible, otherwise return the
2034 simplified form of the call as a tree.
2035
2036 The simplified form may be a constant or other expression which
2037 computes the same value, but in a more efficient manner (including
2038 calls to other builtin functions).
2039
2040 The call may contain arguments which need to be evaluated, but
2041 which are not useful to determine the result of the call. In
2042 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2043 COMPOUND_EXPR will be an argument which must be evaluated.
2044 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2045 COMPOUND_EXPR in the chain will contain the tree for the simplified
2046 form of the builtin function call. */
2047
2048 static bool
2049 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2050 {
2051 gimple *stmt = gsi_stmt (*gsi);
2052 location_t loc = gimple_location (stmt);
2053
2054 const char *p = c_getstr (src);
2055
2056 /* If the string length is zero, return the dst parameter. */
2057 if (p && *p == '\0')
2058 {
2059 replace_call_with_value (gsi, dst);
2060 return true;
2061 }
2062
2063 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2064 return false;
2065
2066 /* See if we can store by pieces into (dst + strlen(dst)). */
2067 tree newdst;
2068 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2069 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2070
2071 if (!strlen_fn || !memcpy_fn)
2072 return false;
2073
2074 /* If the length of the source string isn't computable don't
2075 split strcat into strlen and memcpy. */
2076 tree len = get_maxval_strlen (src, SRK_STRLEN);
2077 if (! len)
2078 return false;
2079
2080 /* Create strlen (dst). */
2081 gimple_seq stmts = NULL, stmts2;
2082 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2083 gimple_set_location (repl, loc);
2084 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2085 gimple_call_set_lhs (repl, newdst);
2086 gimple_seq_add_stmt_without_update (&stmts, repl);
2087
2088 /* Create (dst p+ strlen (dst)). */
2089 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2090 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2091 gimple_seq_add_seq_without_update (&stmts, stmts2);
2092
2093 len = fold_convert_loc (loc, size_type_node, len);
2094 len = size_binop_loc (loc, PLUS_EXPR, len,
2095 build_int_cst (size_type_node, 1));
2096 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2097 gimple_seq_add_seq_without_update (&stmts, stmts2);
2098
2099 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2100 gimple_seq_add_stmt_without_update (&stmts, repl);
2101 if (gimple_call_lhs (stmt))
2102 {
2103 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2104 gimple_seq_add_stmt_without_update (&stmts, repl);
2105 gsi_replace_with_seq_vops (gsi, stmts);
2106 /* gsi now points at the assignment to the lhs, get a
2107 stmt iterator to the memcpy call.
2108 ??? We can't use gsi_for_stmt as that doesn't work when the
2109 CFG isn't built yet. */
2110 gimple_stmt_iterator gsi2 = *gsi;
2111 gsi_prev (&gsi2);
2112 fold_stmt (&gsi2);
2113 }
2114 else
2115 {
2116 gsi_replace_with_seq_vops (gsi, stmts);
2117 fold_stmt (gsi);
2118 }
2119 return true;
2120 }
2121
2122 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2123 are the arguments to the call. */
2124
2125 static bool
2126 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2127 {
2128 gimple *stmt = gsi_stmt (*gsi);
2129 tree dest = gimple_call_arg (stmt, 0);
2130 tree src = gimple_call_arg (stmt, 1);
2131 tree size = gimple_call_arg (stmt, 2);
2132 tree fn;
2133 const char *p;
2134
2135
2136 p = c_getstr (src);
2137 /* If the SRC parameter is "", return DEST. */
2138 if (p && *p == '\0')
2139 {
2140 replace_call_with_value (gsi, dest);
2141 return true;
2142 }
2143
2144 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2145 return false;
2146
2147 /* If __builtin_strcat_chk is used, assume strcat is available. */
2148 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2149 if (!fn)
2150 return false;
2151
2152 gimple *repl = gimple_build_call (fn, 2, dest, src);
2153 replace_call_with_call_and_fold (gsi, repl);
2154 return true;
2155 }
2156
2157 /* Simplify a call to the strncat builtin. */
2158
2159 static bool
2160 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2161 {
2162 gimple *stmt = gsi_stmt (*gsi);
2163 tree dst = gimple_call_arg (stmt, 0);
2164 tree src = gimple_call_arg (stmt, 1);
2165 tree len = gimple_call_arg (stmt, 2);
2166
2167 const char *p = c_getstr (src);
2168
2169 /* If the requested length is zero, or the src parameter string
2170 length is zero, return the dst parameter. */
2171 if (integer_zerop (len) || (p && *p == '\0'))
2172 {
2173 replace_call_with_value (gsi, dst);
2174 return true;
2175 }
2176
2177 if (TREE_CODE (len) != INTEGER_CST || !p)
2178 return false;
2179
2180 unsigned srclen = strlen (p);
2181
2182 int cmpsrc = compare_tree_int (len, srclen);
2183
2184 /* Return early if the requested len is less than the string length.
2185 Warnings will be issued elsewhere later. */
2186 if (cmpsrc < 0)
2187 return false;
2188
2189 unsigned HOST_WIDE_INT dstsize;
2190
2191 bool nowarn = gimple_no_warning_p (stmt);
2192
2193 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2194 {
2195 int cmpdst = compare_tree_int (len, dstsize);
2196
2197 if (cmpdst >= 0)
2198 {
2199 tree fndecl = gimple_call_fndecl (stmt);
2200
2201 /* Strncat copies (at most) LEN bytes and always appends
2202 the terminating NUL so the specified bound should never
2203 be equal to (or greater than) the size of the destination.
2204 If it is, the copy could overflow. */
2205 location_t loc = gimple_location (stmt);
2206 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2207 cmpdst == 0
2208 ? G_("%G%qD specified bound %E equals "
2209 "destination size")
2210 : G_("%G%qD specified bound %E exceeds "
2211 "destination size %wu"),
2212 stmt, fndecl, len, dstsize);
2213 if (nowarn)
2214 gimple_set_no_warning (stmt, true);
2215 }
2216 }
2217
2218 if (!nowarn && cmpsrc == 0)
2219 {
2220 tree fndecl = gimple_call_fndecl (stmt);
2221 location_t loc = gimple_location (stmt);
2222
2223 /* To avoid possible overflow the specified bound should also
2224 not be equal to the length of the source, even when the size
2225 of the destination is unknown (it's not an uncommon mistake
2226 to specify as the bound to strncpy the length of the source). */
2227 if (warning_at (loc, OPT_Wstringop_overflow_,
2228 "%G%qD specified bound %E equals source length",
2229 stmt, fndecl, len))
2230 gimple_set_no_warning (stmt, true);
2231 }
2232
2233 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2234
2235 /* If the replacement _DECL isn't initialized, don't do the
2236 transformation. */
2237 if (!fn)
2238 return false;
2239
2240 /* Otherwise, emit a call to strcat. */
2241 gcall *repl = gimple_build_call (fn, 2, dst, src);
2242 replace_call_with_call_and_fold (gsi, repl);
2243 return true;
2244 }
2245
2246 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2247 LEN, and SIZE. */
2248
2249 static bool
2250 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2251 {
2252 gimple *stmt = gsi_stmt (*gsi);
2253 tree dest = gimple_call_arg (stmt, 0);
2254 tree src = gimple_call_arg (stmt, 1);
2255 tree len = gimple_call_arg (stmt, 2);
2256 tree size = gimple_call_arg (stmt, 3);
2257 tree fn;
2258 const char *p;
2259
2260 p = c_getstr (src);
2261 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2262 if ((p && *p == '\0')
2263 || integer_zerop (len))
2264 {
2265 replace_call_with_value (gsi, dest);
2266 return true;
2267 }
2268
2269 if (! tree_fits_uhwi_p (size))
2270 return false;
2271
2272 if (! integer_all_onesp (size))
2273 {
2274 tree src_len = c_strlen (src, 1);
2275 if (src_len
2276 && tree_fits_uhwi_p (src_len)
2277 && tree_fits_uhwi_p (len)
2278 && ! tree_int_cst_lt (len, src_len))
2279 {
2280 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2281 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2282 if (!fn)
2283 return false;
2284
2285 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2286 replace_call_with_call_and_fold (gsi, repl);
2287 return true;
2288 }
2289 return false;
2290 }
2291
2292 /* If __builtin_strncat_chk is used, assume strncat is available. */
2293 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2294 if (!fn)
2295 return false;
2296
2297 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2298 replace_call_with_call_and_fold (gsi, repl);
2299 return true;
2300 }
2301
2302 /* Build and append gimple statements to STMTS that would load a first
2303 character of a memory location identified by STR. LOC is location
2304 of the statement. */
2305
2306 static tree
2307 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2308 {
2309 tree var;
2310
2311 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2312 tree cst_uchar_ptr_node
2313 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2314 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2315
2316 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2317 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2318 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2319
2320 gimple_assign_set_lhs (stmt, var);
2321 gimple_seq_add_stmt_without_update (stmts, stmt);
2322
2323 return var;
2324 }
2325
2326 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2327
2328 static bool
2329 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2330 {
2331 gimple *stmt = gsi_stmt (*gsi);
2332 tree callee = gimple_call_fndecl (stmt);
2333 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2334
2335 tree type = integer_type_node;
2336 tree str1 = gimple_call_arg (stmt, 0);
2337 tree str2 = gimple_call_arg (stmt, 1);
2338 tree lhs = gimple_call_lhs (stmt);
2339 tree len = NULL_TREE;
2340 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2341
2342 /* Handle strncmp and strncasecmp functions. */
2343 if (gimple_call_num_args (stmt) == 3)
2344 {
2345 len = gimple_call_arg (stmt, 2);
2346 if (tree_fits_uhwi_p (len))
2347 bound = tree_to_uhwi (len);
2348 }
2349
2350 /* If the LEN parameter is zero, return zero. */
2351 if (bound == 0)
2352 {
2353 replace_call_with_value (gsi, integer_zero_node);
2354 return true;
2355 }
2356
2357 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2358 if (operand_equal_p (str1, str2, 0))
2359 {
2360 replace_call_with_value (gsi, integer_zero_node);
2361 return true;
2362 }
2363
2364 /* Initially set to the number of characters, including the terminating
2365 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2366 the array Sx is not terminated by a nul.
2367 For nul-terminated strings then adjusted to their length so that
2368 LENx == NULPOSx holds. */
2369 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2370 const char *p1 = c_getstr (str1, &len1);
2371 const char *p2 = c_getstr (str2, &len2);
2372
2373 /* The position of the terminating nul character if one exists, otherwise
2374 a value greater than LENx. */
2375 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2376
2377 if (p1)
2378 {
2379 size_t n = strnlen (p1, len1);
2380 if (n < len1)
2381 len1 = nulpos1 = n;
2382 }
2383
2384 if (p2)
2385 {
2386 size_t n = strnlen (p2, len2);
2387 if (n < len2)
2388 len2 = nulpos2 = n;
2389 }
2390
2391 /* For known strings, return an immediate value. */
2392 if (p1 && p2)
2393 {
2394 int r = 0;
2395 bool known_result = false;
2396
2397 switch (fcode)
2398 {
2399 case BUILT_IN_STRCMP:
2400 case BUILT_IN_STRCMP_EQ:
2401 if (len1 != nulpos1 || len2 != nulpos2)
2402 break;
2403
2404 r = strcmp (p1, p2);
2405 known_result = true;
2406 break;
2407
2408 case BUILT_IN_STRNCMP:
2409 case BUILT_IN_STRNCMP_EQ:
2410 {
2411 /* Reduce the bound to be no more than the length
2412 of the shorter of the two strings, or the sizes
2413 of the unterminated arrays. */
2414 unsigned HOST_WIDE_INT n = bound;
2415
2416 if (len1 == nulpos1 && len1 < n)
2417 n = len1 + 1;
2418 if (len2 == nulpos2 && len2 < n)
2419 n = len2 + 1;
2420
2421 if (MIN (nulpos1, nulpos2) + 1 < n)
2422 break;
2423
2424 r = strncmp (p1, p2, n);
2425 known_result = true;
2426 break;
2427 }
2428 /* Only handleable situation is where the string are equal (result 0),
2429 which is already handled by operand_equal_p case. */
2430 case BUILT_IN_STRCASECMP:
2431 break;
2432 case BUILT_IN_STRNCASECMP:
2433 {
2434 if (bound == HOST_WIDE_INT_M1U)
2435 break;
2436 r = strncmp (p1, p2, bound);
2437 if (r == 0)
2438 known_result = true;
2439 break;
2440 }
2441 default:
2442 gcc_unreachable ();
2443 }
2444
2445 if (known_result)
2446 {
2447 replace_call_with_value (gsi, build_cmp_result (type, r));
2448 return true;
2449 }
2450 }
2451
2452 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2453 || fcode == BUILT_IN_STRCMP
2454 || fcode == BUILT_IN_STRCMP_EQ
2455 || fcode == BUILT_IN_STRCASECMP;
2456
2457 location_t loc = gimple_location (stmt);
2458
2459 /* If the second arg is "", return *(const unsigned char*)arg1. */
2460 if (p2 && *p2 == '\0' && nonzero_bound)
2461 {
2462 gimple_seq stmts = NULL;
2463 tree var = gimple_load_first_char (loc, str1, &stmts);
2464 if (lhs)
2465 {
2466 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2467 gimple_seq_add_stmt_without_update (&stmts, stmt);
2468 }
2469
2470 gsi_replace_with_seq_vops (gsi, stmts);
2471 return true;
2472 }
2473
2474 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2475 if (p1 && *p1 == '\0' && nonzero_bound)
2476 {
2477 gimple_seq stmts = NULL;
2478 tree var = gimple_load_first_char (loc, str2, &stmts);
2479
2480 if (lhs)
2481 {
2482 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2483 stmt = gimple_build_assign (c, NOP_EXPR, var);
2484 gimple_seq_add_stmt_without_update (&stmts, stmt);
2485
2486 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2487 gimple_seq_add_stmt_without_update (&stmts, stmt);
2488 }
2489
2490 gsi_replace_with_seq_vops (gsi, stmts);
2491 return true;
2492 }
2493
2494 /* If BOUND is one, return an expression corresponding to
2495 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2496 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2497 {
2498 gimple_seq stmts = NULL;
2499 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2500 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2501
2502 if (lhs)
2503 {
2504 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2505 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2506 gimple_seq_add_stmt_without_update (&stmts, convert1);
2507
2508 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2509 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2510 gimple_seq_add_stmt_without_update (&stmts, convert2);
2511
2512 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2513 gimple_seq_add_stmt_without_update (&stmts, stmt);
2514 }
2515
2516 gsi_replace_with_seq_vops (gsi, stmts);
2517 return true;
2518 }
2519
2520 /* If BOUND is greater than the length of one constant string,
2521 and the other argument is also a nul-terminated string, replace
2522 strncmp with strcmp. */
2523 if (fcode == BUILT_IN_STRNCMP
2524 && bound > 0 && bound < HOST_WIDE_INT_M1U
2525 && ((p2 && len2 < bound && len2 == nulpos2)
2526 || (p1 && len1 < bound && len1 == nulpos1)))
2527 {
2528 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2529 if (!fn)
2530 return false;
2531 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2532 replace_call_with_call_and_fold (gsi, repl);
2533 return true;
2534 }
2535
2536 return false;
2537 }
2538
2539 /* Fold a call to the memchr pointed by GSI iterator. */
2540
2541 static bool
2542 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2543 {
2544 gimple *stmt = gsi_stmt (*gsi);
2545 tree lhs = gimple_call_lhs (stmt);
2546 tree arg1 = gimple_call_arg (stmt, 0);
2547 tree arg2 = gimple_call_arg (stmt, 1);
2548 tree len = gimple_call_arg (stmt, 2);
2549
2550 /* If the LEN parameter is zero, return zero. */
2551 if (integer_zerop (len))
2552 {
2553 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2554 return true;
2555 }
2556
2557 char c;
2558 if (TREE_CODE (arg2) != INTEGER_CST
2559 || !tree_fits_uhwi_p (len)
2560 || !target_char_cst_p (arg2, &c))
2561 return false;
2562
2563 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2564 unsigned HOST_WIDE_INT string_length;
2565 const char *p1 = c_getstr (arg1, &string_length);
2566
2567 if (p1)
2568 {
2569 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2570 if (r == NULL)
2571 {
2572 tree mem_size, offset_node;
2573 string_constant (arg1, &offset_node, &mem_size, NULL);
2574 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2575 ? 0 : tree_to_uhwi (offset_node);
2576 /* MEM_SIZE is the size of the array the string literal
2577 is stored in. */
2578 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2579 gcc_checking_assert (string_length <= string_size);
2580 if (length <= string_size)
2581 {
2582 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2583 return true;
2584 }
2585 }
2586 else
2587 {
2588 unsigned HOST_WIDE_INT offset = r - p1;
2589 gimple_seq stmts = NULL;
2590 if (lhs != NULL_TREE)
2591 {
2592 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2593 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2594 arg1, offset_cst);
2595 gimple_seq_add_stmt_without_update (&stmts, stmt);
2596 }
2597 else
2598 gimple_seq_add_stmt_without_update (&stmts,
2599 gimple_build_nop ());
2600
2601 gsi_replace_with_seq_vops (gsi, stmts);
2602 return true;
2603 }
2604 }
2605
2606 return false;
2607 }
2608
2609 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2610 to the call. IGNORE is true if the value returned
2611 by the builtin will be ignored. UNLOCKED is true is true if this
2612 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2613 the known length of the string. Return NULL_TREE if no simplification
2614 was possible. */
2615
2616 static bool
2617 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2618 tree arg0, tree arg1,
2619 bool unlocked)
2620 {
2621 gimple *stmt = gsi_stmt (*gsi);
2622
2623 /* If we're using an unlocked function, assume the other unlocked
2624 functions exist explicitly. */
2625 tree const fn_fputc = (unlocked
2626 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2627 : builtin_decl_implicit (BUILT_IN_FPUTC));
2628 tree const fn_fwrite = (unlocked
2629 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2630 : builtin_decl_implicit (BUILT_IN_FWRITE));
2631
2632 /* If the return value is used, don't do the transformation. */
2633 if (gimple_call_lhs (stmt))
2634 return false;
2635
2636 /* Get the length of the string passed to fputs. If the length
2637 can't be determined, punt. */
2638 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2639 if (!len
2640 || TREE_CODE (len) != INTEGER_CST)
2641 return false;
2642
2643 switch (compare_tree_int (len, 1))
2644 {
2645 case -1: /* length is 0, delete the call entirely . */
2646 replace_call_with_value (gsi, integer_zero_node);
2647 return true;
2648
2649 case 0: /* length is 1, call fputc. */
2650 {
2651 const char *p = c_getstr (arg0);
2652 if (p != NULL)
2653 {
2654 if (!fn_fputc)
2655 return false;
2656
2657 gimple *repl = gimple_build_call (fn_fputc, 2,
2658 build_int_cst
2659 (integer_type_node, p[0]), arg1);
2660 replace_call_with_call_and_fold (gsi, repl);
2661 return true;
2662 }
2663 }
2664 /* FALLTHROUGH */
2665 case 1: /* length is greater than 1, call fwrite. */
2666 {
2667 /* If optimizing for size keep fputs. */
2668 if (optimize_function_for_size_p (cfun))
2669 return false;
2670 /* New argument list transforming fputs(string, stream) to
2671 fwrite(string, 1, len, stream). */
2672 if (!fn_fwrite)
2673 return false;
2674
2675 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2676 size_one_node, len, arg1);
2677 replace_call_with_call_and_fold (gsi, repl);
2678 return true;
2679 }
2680 default:
2681 gcc_unreachable ();
2682 }
2683 return false;
2684 }
2685
2686 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2687 DEST, SRC, LEN, and SIZE are the arguments to the call.
2688 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2689 code of the builtin. If MAXLEN is not NULL, it is maximum length
2690 passed as third argument. */
2691
2692 static bool
2693 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2694 tree dest, tree src, tree len, tree size,
2695 enum built_in_function fcode)
2696 {
2697 gimple *stmt = gsi_stmt (*gsi);
2698 location_t loc = gimple_location (stmt);
2699 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2700 tree fn;
2701
2702 /* If SRC and DEST are the same (and not volatile), return DEST
2703 (resp. DEST+LEN for __mempcpy_chk). */
2704 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2705 {
2706 if (fcode != BUILT_IN_MEMPCPY_CHK)
2707 {
2708 replace_call_with_value (gsi, dest);
2709 return true;
2710 }
2711 else
2712 {
2713 gimple_seq stmts = NULL;
2714 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2715 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2716 TREE_TYPE (dest), dest, len);
2717 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2718 replace_call_with_value (gsi, temp);
2719 return true;
2720 }
2721 }
2722
2723 if (! tree_fits_uhwi_p (size))
2724 return false;
2725
2726 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2727 if (! integer_all_onesp (size))
2728 {
2729 if (! tree_fits_uhwi_p (len))
2730 {
2731 /* If LEN is not constant, try MAXLEN too.
2732 For MAXLEN only allow optimizing into non-_ocs function
2733 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2734 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2735 {
2736 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2737 {
2738 /* (void) __mempcpy_chk () can be optimized into
2739 (void) __memcpy_chk (). */
2740 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2741 if (!fn)
2742 return false;
2743
2744 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2745 replace_call_with_call_and_fold (gsi, repl);
2746 return true;
2747 }
2748 return false;
2749 }
2750 }
2751 else
2752 maxlen = len;
2753
2754 if (tree_int_cst_lt (size, maxlen))
2755 return false;
2756 }
2757
2758 fn = NULL_TREE;
2759 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2760 mem{cpy,pcpy,move,set} is available. */
2761 switch (fcode)
2762 {
2763 case BUILT_IN_MEMCPY_CHK:
2764 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2765 break;
2766 case BUILT_IN_MEMPCPY_CHK:
2767 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2768 break;
2769 case BUILT_IN_MEMMOVE_CHK:
2770 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2771 break;
2772 case BUILT_IN_MEMSET_CHK:
2773 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2774 break;
2775 default:
2776 break;
2777 }
2778
2779 if (!fn)
2780 return false;
2781
2782 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2783 replace_call_with_call_and_fold (gsi, repl);
2784 return true;
2785 }
2786
2787 /* Fold a call to the __st[rp]cpy_chk builtin.
2788 DEST, SRC, and SIZE are the arguments to the call.
2789 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2790 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2791 strings passed as second argument. */
2792
2793 static bool
2794 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2795 tree dest,
2796 tree src, tree size,
2797 enum built_in_function fcode)
2798 {
2799 gimple *stmt = gsi_stmt (*gsi);
2800 location_t loc = gimple_location (stmt);
2801 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2802 tree len, fn;
2803
2804 /* If SRC and DEST are the same (and not volatile), return DEST. */
2805 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2806 {
2807 /* Issue -Wrestrict unless the pointers are null (those do
2808 not point to objects and so do not indicate an overlap;
2809 such calls could be the result of sanitization and jump
2810 threading). */
2811 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2812 {
2813 tree func = gimple_call_fndecl (stmt);
2814
2815 warning_at (loc, OPT_Wrestrict,
2816 "%qD source argument is the same as destination",
2817 func);
2818 }
2819
2820 replace_call_with_value (gsi, dest);
2821 return true;
2822 }
2823
2824 if (! tree_fits_uhwi_p (size))
2825 return false;
2826
2827 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2828 if (! integer_all_onesp (size))
2829 {
2830 len = c_strlen (src, 1);
2831 if (! len || ! tree_fits_uhwi_p (len))
2832 {
2833 /* If LEN is not constant, try MAXLEN too.
2834 For MAXLEN only allow optimizing into non-_ocs function
2835 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2836 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2837 {
2838 if (fcode == BUILT_IN_STPCPY_CHK)
2839 {
2840 if (! ignore)
2841 return false;
2842
2843 /* If return value of __stpcpy_chk is ignored,
2844 optimize into __strcpy_chk. */
2845 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2846 if (!fn)
2847 return false;
2848
2849 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2850 replace_call_with_call_and_fold (gsi, repl);
2851 return true;
2852 }
2853
2854 if (! len || TREE_SIDE_EFFECTS (len))
2855 return false;
2856
2857 /* If c_strlen returned something, but not a constant,
2858 transform __strcpy_chk into __memcpy_chk. */
2859 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2860 if (!fn)
2861 return false;
2862
2863 gimple_seq stmts = NULL;
2864 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2865 len = gimple_convert (&stmts, loc, size_type_node, len);
2866 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2867 build_int_cst (size_type_node, 1));
2868 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2869 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2870 replace_call_with_call_and_fold (gsi, repl);
2871 return true;
2872 }
2873 }
2874 else
2875 maxlen = len;
2876
2877 if (! tree_int_cst_lt (maxlen, size))
2878 return false;
2879 }
2880
2881 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2882 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2883 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2884 if (!fn)
2885 return false;
2886
2887 gimple *repl = gimple_build_call (fn, 2, dest, src);
2888 replace_call_with_call_and_fold (gsi, repl);
2889 return true;
2890 }
2891
2892 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2893 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2894 length passed as third argument. IGNORE is true if return value can be
2895 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2896
2897 static bool
2898 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2899 tree dest, tree src,
2900 tree len, tree size,
2901 enum built_in_function fcode)
2902 {
2903 gimple *stmt = gsi_stmt (*gsi);
2904 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2905 tree fn;
2906
2907 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2908 {
2909 /* If return value of __stpncpy_chk is ignored,
2910 optimize into __strncpy_chk. */
2911 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2912 if (fn)
2913 {
2914 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2915 replace_call_with_call_and_fold (gsi, repl);
2916 return true;
2917 }
2918 }
2919
2920 if (! tree_fits_uhwi_p (size))
2921 return false;
2922
2923 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2924 if (! integer_all_onesp (size))
2925 {
2926 if (! tree_fits_uhwi_p (len))
2927 {
2928 /* If LEN is not constant, try MAXLEN too.
2929 For MAXLEN only allow optimizing into non-_ocs function
2930 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2931 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2932 return false;
2933 }
2934 else
2935 maxlen = len;
2936
2937 if (tree_int_cst_lt (size, maxlen))
2938 return false;
2939 }
2940
2941 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2942 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2943 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2944 if (!fn)
2945 return false;
2946
2947 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2948 replace_call_with_call_and_fold (gsi, repl);
2949 return true;
2950 }
2951
2952 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2953 Return NULL_TREE if no simplification can be made. */
2954
2955 static bool
2956 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2957 {
2958 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2959 location_t loc = gimple_location (stmt);
2960 tree dest = gimple_call_arg (stmt, 0);
2961 tree src = gimple_call_arg (stmt, 1);
2962 tree fn, lenp1;
2963
2964 /* If the result is unused, replace stpcpy with strcpy. */
2965 if (gimple_call_lhs (stmt) == NULL_TREE)
2966 {
2967 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2968 if (!fn)
2969 return false;
2970 gimple_call_set_fndecl (stmt, fn);
2971 fold_stmt (gsi);
2972 return true;
2973 }
2974
2975 /* Set to non-null if ARG refers to an unterminated array. */
2976 c_strlen_data data = { };
2977 tree len = c_strlen (src, 1, &data, 1);
2978 if (!len
2979 || TREE_CODE (len) != INTEGER_CST)
2980 {
2981 data.decl = unterminated_array (src);
2982 if (!data.decl)
2983 return false;
2984 }
2985
2986 if (data.decl)
2987 {
2988 /* Avoid folding calls with unterminated arrays. */
2989 if (!gimple_no_warning_p (stmt))
2990 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2991 gimple_set_no_warning (stmt, true);
2992 return false;
2993 }
2994
2995 if (optimize_function_for_size_p (cfun)
2996 /* If length is zero it's small enough. */
2997 && !integer_zerop (len))
2998 return false;
2999
3000 /* If the source has a known length replace stpcpy with memcpy. */
3001 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3002 if (!fn)
3003 return false;
3004
3005 gimple_seq stmts = NULL;
3006 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3007 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3008 tem, build_int_cst (size_type_node, 1));
3009 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3010 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3011 gimple_move_vops (repl, stmt);
3012 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3013 /* Replace the result with dest + len. */
3014 stmts = NULL;
3015 tem = gimple_convert (&stmts, loc, sizetype, len);
3016 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3017 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3018 POINTER_PLUS_EXPR, dest, tem);
3019 gsi_replace (gsi, ret, false);
3020 /* Finally fold the memcpy call. */
3021 gimple_stmt_iterator gsi2 = *gsi;
3022 gsi_prev (&gsi2);
3023 fold_stmt (&gsi2);
3024 return true;
3025 }
3026
3027 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3028 NULL_TREE if a normal call should be emitted rather than expanding
3029 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3030 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3031 passed as second argument. */
3032
3033 static bool
3034 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3035 enum built_in_function fcode)
3036 {
3037 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3038 tree dest, size, len, fn, fmt, flag;
3039 const char *fmt_str;
3040
3041 /* Verify the required arguments in the original call. */
3042 if (gimple_call_num_args (stmt) < 5)
3043 return false;
3044
3045 dest = gimple_call_arg (stmt, 0);
3046 len = gimple_call_arg (stmt, 1);
3047 flag = gimple_call_arg (stmt, 2);
3048 size = gimple_call_arg (stmt, 3);
3049 fmt = gimple_call_arg (stmt, 4);
3050
3051 if (! tree_fits_uhwi_p (size))
3052 return false;
3053
3054 if (! integer_all_onesp (size))
3055 {
3056 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3057 if (! tree_fits_uhwi_p (len))
3058 {
3059 /* If LEN is not constant, try MAXLEN too.
3060 For MAXLEN only allow optimizing into non-_ocs function
3061 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3062 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3063 return false;
3064 }
3065 else
3066 maxlen = len;
3067
3068 if (tree_int_cst_lt (size, maxlen))
3069 return false;
3070 }
3071
3072 if (!init_target_chars ())
3073 return false;
3074
3075 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3076 or if format doesn't contain % chars or is "%s". */
3077 if (! integer_zerop (flag))
3078 {
3079 fmt_str = c_getstr (fmt);
3080 if (fmt_str == NULL)
3081 return false;
3082 if (strchr (fmt_str, target_percent) != NULL
3083 && strcmp (fmt_str, target_percent_s))
3084 return false;
3085 }
3086
3087 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3088 available. */
3089 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3090 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3091 if (!fn)
3092 return false;
3093
3094 /* Replace the called function and the first 5 argument by 3 retaining
3095 trailing varargs. */
3096 gimple_call_set_fndecl (stmt, fn);
3097 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3098 gimple_call_set_arg (stmt, 0, dest);
3099 gimple_call_set_arg (stmt, 1, len);
3100 gimple_call_set_arg (stmt, 2, fmt);
3101 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3102 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3103 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3104 fold_stmt (gsi);
3105 return true;
3106 }
3107
3108 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3109 Return NULL_TREE if a normal call should be emitted rather than
3110 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3111 or BUILT_IN_VSPRINTF_CHK. */
3112
3113 static bool
3114 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3115 enum built_in_function fcode)
3116 {
3117 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3118 tree dest, size, len, fn, fmt, flag;
3119 const char *fmt_str;
3120 unsigned nargs = gimple_call_num_args (stmt);
3121
3122 /* Verify the required arguments in the original call. */
3123 if (nargs < 4)
3124 return false;
3125 dest = gimple_call_arg (stmt, 0);
3126 flag = gimple_call_arg (stmt, 1);
3127 size = gimple_call_arg (stmt, 2);
3128 fmt = gimple_call_arg (stmt, 3);
3129
3130 if (! tree_fits_uhwi_p (size))
3131 return false;
3132
3133 len = NULL_TREE;
3134
3135 if (!init_target_chars ())
3136 return false;
3137
3138 /* Check whether the format is a literal string constant. */
3139 fmt_str = c_getstr (fmt);
3140 if (fmt_str != NULL)
3141 {
3142 /* If the format doesn't contain % args or %%, we know the size. */
3143 if (strchr (fmt_str, target_percent) == 0)
3144 {
3145 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3146 len = build_int_cstu (size_type_node, strlen (fmt_str));
3147 }
3148 /* If the format is "%s" and first ... argument is a string literal,
3149 we know the size too. */
3150 else if (fcode == BUILT_IN_SPRINTF_CHK
3151 && strcmp (fmt_str, target_percent_s) == 0)
3152 {
3153 tree arg;
3154
3155 if (nargs == 5)
3156 {
3157 arg = gimple_call_arg (stmt, 4);
3158 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3159 {
3160 len = c_strlen (arg, 1);
3161 if (! len || ! tree_fits_uhwi_p (len))
3162 len = NULL_TREE;
3163 }
3164 }
3165 }
3166 }
3167
3168 if (! integer_all_onesp (size))
3169 {
3170 if (! len || ! tree_int_cst_lt (len, size))
3171 return false;
3172 }
3173
3174 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3175 or if format doesn't contain % chars or is "%s". */
3176 if (! integer_zerop (flag))
3177 {
3178 if (fmt_str == NULL)
3179 return false;
3180 if (strchr (fmt_str, target_percent) != NULL
3181 && strcmp (fmt_str, target_percent_s))
3182 return false;
3183 }
3184
3185 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3186 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3187 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3188 if (!fn)
3189 return false;
3190
3191 /* Replace the called function and the first 4 argument by 2 retaining
3192 trailing varargs. */
3193 gimple_call_set_fndecl (stmt, fn);
3194 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3195 gimple_call_set_arg (stmt, 0, dest);
3196 gimple_call_set_arg (stmt, 1, fmt);
3197 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3198 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3199 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3200 fold_stmt (gsi);
3201 return true;
3202 }
3203
3204 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3205 ORIG may be null if this is a 2-argument call. We don't attempt to
3206 simplify calls with more than 3 arguments.
3207
3208 Return true if simplification was possible, otherwise false. */
3209
3210 bool
3211 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3212 {
3213 gimple *stmt = gsi_stmt (*gsi);
3214 tree dest = gimple_call_arg (stmt, 0);
3215 tree fmt = gimple_call_arg (stmt, 1);
3216 tree orig = NULL_TREE;
3217 const char *fmt_str = NULL;
3218
3219 /* Verify the required arguments in the original call. We deal with two
3220 types of sprintf() calls: 'sprintf (str, fmt)' and
3221 'sprintf (dest, "%s", orig)'. */
3222 if (gimple_call_num_args (stmt) > 3)
3223 return false;
3224
3225 if (gimple_call_num_args (stmt) == 3)
3226 orig = gimple_call_arg (stmt, 2);
3227
3228 /* Check whether the format is a literal string constant. */
3229 fmt_str = c_getstr (fmt);
3230 if (fmt_str == NULL)
3231 return false;
3232
3233 if (!init_target_chars ())
3234 return false;
3235
3236 /* If the format doesn't contain % args or %%, use strcpy. */
3237 if (strchr (fmt_str, target_percent) == NULL)
3238 {
3239 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3240
3241 if (!fn)
3242 return false;
3243
3244 /* Don't optimize sprintf (buf, "abc", ptr++). */
3245 if (orig)
3246 return false;
3247
3248 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3249 'format' is known to contain no % formats. */
3250 gimple_seq stmts = NULL;
3251 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3252
3253 /* Propagate the NO_WARNING bit to avoid issuing the same
3254 warning more than once. */
3255 if (gimple_no_warning_p (stmt))
3256 gimple_set_no_warning (repl, true);
3257
3258 gimple_seq_add_stmt_without_update (&stmts, repl);
3259 if (tree lhs = gimple_call_lhs (stmt))
3260 {
3261 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3262 strlen (fmt_str)));
3263 gimple_seq_add_stmt_without_update (&stmts, repl);
3264 gsi_replace_with_seq_vops (gsi, stmts);
3265 /* gsi now points at the assignment to the lhs, get a
3266 stmt iterator to the memcpy call.
3267 ??? We can't use gsi_for_stmt as that doesn't work when the
3268 CFG isn't built yet. */
3269 gimple_stmt_iterator gsi2 = *gsi;
3270 gsi_prev (&gsi2);
3271 fold_stmt (&gsi2);
3272 }
3273 else
3274 {
3275 gsi_replace_with_seq_vops (gsi, stmts);
3276 fold_stmt (gsi);
3277 }
3278 return true;
3279 }
3280
3281 /* If the format is "%s", use strcpy if the result isn't used. */
3282 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3283 {
3284 tree fn;
3285 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3286
3287 if (!fn)
3288 return false;
3289
3290 /* Don't crash on sprintf (str1, "%s"). */
3291 if (!orig)
3292 return false;
3293
3294 tree orig_len = NULL_TREE;
3295 if (gimple_call_lhs (stmt))
3296 {
3297 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3298 if (!orig_len)
3299 return false;
3300 }
3301
3302 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3303 gimple_seq stmts = NULL;
3304 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3305
3306 /* Propagate the NO_WARNING bit to avoid issuing the same
3307 warning more than once. */
3308 if (gimple_no_warning_p (stmt))
3309 gimple_set_no_warning (repl, true);
3310
3311 gimple_seq_add_stmt_without_update (&stmts, repl);
3312 if (tree lhs = gimple_call_lhs (stmt))
3313 {
3314 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3315 TREE_TYPE (orig_len)))
3316 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3317 repl = gimple_build_assign (lhs, orig_len);
3318 gimple_seq_add_stmt_without_update (&stmts, repl);
3319 gsi_replace_with_seq_vops (gsi, stmts);
3320 /* gsi now points at the assignment to the lhs, get a
3321 stmt iterator to the memcpy call.
3322 ??? We can't use gsi_for_stmt as that doesn't work when the
3323 CFG isn't built yet. */
3324 gimple_stmt_iterator gsi2 = *gsi;
3325 gsi_prev (&gsi2);
3326 fold_stmt (&gsi2);
3327 }
3328 else
3329 {
3330 gsi_replace_with_seq_vops (gsi, stmts);
3331 fold_stmt (gsi);
3332 }
3333 return true;
3334 }
3335 return false;
3336 }
3337
3338 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3339 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3340 attempt to simplify calls with more than 4 arguments.
3341
3342 Return true if simplification was possible, otherwise false. */
3343
3344 bool
3345 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3346 {
3347 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3348 tree dest = gimple_call_arg (stmt, 0);
3349 tree destsize = gimple_call_arg (stmt, 1);
3350 tree fmt = gimple_call_arg (stmt, 2);
3351 tree orig = NULL_TREE;
3352 const char *fmt_str = NULL;
3353
3354 if (gimple_call_num_args (stmt) > 4)
3355 return false;
3356
3357 if (gimple_call_num_args (stmt) == 4)
3358 orig = gimple_call_arg (stmt, 3);
3359
3360 if (!tree_fits_uhwi_p (destsize))
3361 return false;
3362 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3363
3364 /* Check whether the format is a literal string constant. */
3365 fmt_str = c_getstr (fmt);
3366 if (fmt_str == NULL)
3367 return false;
3368
3369 if (!init_target_chars ())
3370 return false;
3371
3372 /* If the format doesn't contain % args or %%, use strcpy. */
3373 if (strchr (fmt_str, target_percent) == NULL)
3374 {
3375 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3376 if (!fn)
3377 return false;
3378
3379 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3380 if (orig)
3381 return false;
3382
3383 /* We could expand this as
3384 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3385 or to
3386 memcpy (str, fmt_with_nul_at_cstm1, cst);
3387 but in the former case that might increase code size
3388 and in the latter case grow .rodata section too much.
3389 So punt for now. */
3390 size_t len = strlen (fmt_str);
3391 if (len >= destlen)
3392 return false;
3393
3394 gimple_seq stmts = NULL;
3395 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3396 gimple_seq_add_stmt_without_update (&stmts, repl);
3397 if (tree lhs = gimple_call_lhs (stmt))
3398 {
3399 repl = gimple_build_assign (lhs,
3400 build_int_cst (TREE_TYPE (lhs), len));
3401 gimple_seq_add_stmt_without_update (&stmts, repl);
3402 gsi_replace_with_seq_vops (gsi, stmts);
3403 /* gsi now points at the assignment to the lhs, get a
3404 stmt iterator to the memcpy call.
3405 ??? We can't use gsi_for_stmt as that doesn't work when the
3406 CFG isn't built yet. */
3407 gimple_stmt_iterator gsi2 = *gsi;
3408 gsi_prev (&gsi2);
3409 fold_stmt (&gsi2);
3410 }
3411 else
3412 {
3413 gsi_replace_with_seq_vops (gsi, stmts);
3414 fold_stmt (gsi);
3415 }
3416 return true;
3417 }
3418
3419 /* If the format is "%s", use strcpy if the result isn't used. */
3420 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3421 {
3422 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3423 if (!fn)
3424 return false;
3425
3426 /* Don't crash on snprintf (str1, cst, "%s"). */
3427 if (!orig)
3428 return false;
3429
3430 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3431 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3432 return false;
3433
3434 /* We could expand this as
3435 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3436 or to
3437 memcpy (str1, str2_with_nul_at_cstm1, cst);
3438 but in the former case that might increase code size
3439 and in the latter case grow .rodata section too much.
3440 So punt for now. */
3441 if (compare_tree_int (orig_len, destlen) >= 0)
3442 return false;
3443
3444 /* Convert snprintf (str1, cst, "%s", str2) into
3445 strcpy (str1, str2) if strlen (str2) < cst. */
3446 gimple_seq stmts = NULL;
3447 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3448 gimple_seq_add_stmt_without_update (&stmts, repl);
3449 if (tree lhs = gimple_call_lhs (stmt))
3450 {
3451 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3452 TREE_TYPE (orig_len)))
3453 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3454 repl = gimple_build_assign (lhs, orig_len);
3455 gimple_seq_add_stmt_without_update (&stmts, repl);
3456 gsi_replace_with_seq_vops (gsi, stmts);
3457 /* gsi now points at the assignment to the lhs, get a
3458 stmt iterator to the memcpy call.
3459 ??? We can't use gsi_for_stmt as that doesn't work when the
3460 CFG isn't built yet. */
3461 gimple_stmt_iterator gsi2 = *gsi;
3462 gsi_prev (&gsi2);
3463 fold_stmt (&gsi2);
3464 }
3465 else
3466 {
3467 gsi_replace_with_seq_vops (gsi, stmts);
3468 fold_stmt (gsi);
3469 }
3470 return true;
3471 }
3472 return false;
3473 }
3474
3475 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3476 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3477 more than 3 arguments, and ARG may be null in the 2-argument case.
3478
3479 Return NULL_TREE if no simplification was possible, otherwise return the
3480 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3481 code of the function to be simplified. */
3482
3483 static bool
3484 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3485 tree fp, tree fmt, tree arg,
3486 enum built_in_function fcode)
3487 {
3488 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3489 tree fn_fputc, fn_fputs;
3490 const char *fmt_str = NULL;
3491
3492 /* If the return value is used, don't do the transformation. */
3493 if (gimple_call_lhs (stmt) != NULL_TREE)
3494 return false;
3495
3496 /* Check whether the format is a literal string constant. */
3497 fmt_str = c_getstr (fmt);
3498 if (fmt_str == NULL)
3499 return false;
3500
3501 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3502 {
3503 /* If we're using an unlocked function, assume the other
3504 unlocked functions exist explicitly. */
3505 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3506 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3507 }
3508 else
3509 {
3510 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3511 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3512 }
3513
3514 if (!init_target_chars ())
3515 return false;
3516
3517 /* If the format doesn't contain % args or %%, use strcpy. */
3518 if (strchr (fmt_str, target_percent) == NULL)
3519 {
3520 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3521 && arg)
3522 return false;
3523
3524 /* If the format specifier was "", fprintf does nothing. */
3525 if (fmt_str[0] == '\0')
3526 {
3527 replace_call_with_value (gsi, NULL_TREE);
3528 return true;
3529 }
3530
3531 /* When "string" doesn't contain %, replace all cases of
3532 fprintf (fp, string) with fputs (string, fp). The fputs
3533 builtin will take care of special cases like length == 1. */
3534 if (fn_fputs)
3535 {
3536 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3537 replace_call_with_call_and_fold (gsi, repl);
3538 return true;
3539 }
3540 }
3541
3542 /* The other optimizations can be done only on the non-va_list variants. */
3543 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3544 return false;
3545
3546 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3547 else if (strcmp (fmt_str, target_percent_s) == 0)
3548 {
3549 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3550 return false;
3551 if (fn_fputs)
3552 {
3553 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3554 replace_call_with_call_and_fold (gsi, repl);
3555 return true;
3556 }
3557 }
3558
3559 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3560 else if (strcmp (fmt_str, target_percent_c) == 0)
3561 {
3562 if (!arg
3563 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3564 return false;
3565 if (fn_fputc)
3566 {
3567 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3568 replace_call_with_call_and_fold (gsi, repl);
3569 return true;
3570 }
3571 }
3572
3573 return false;
3574 }
3575
3576 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3577 FMT and ARG are the arguments to the call; we don't fold cases with
3578 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3579
3580 Return NULL_TREE if no simplification was possible, otherwise return the
3581 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3582 code of the function to be simplified. */
3583
3584 static bool
3585 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3586 tree arg, enum built_in_function fcode)
3587 {
3588 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3589 tree fn_putchar, fn_puts, newarg;
3590 const char *fmt_str = NULL;
3591
3592 /* If the return value is used, don't do the transformation. */
3593 if (gimple_call_lhs (stmt) != NULL_TREE)
3594 return false;
3595
3596 /* Check whether the format is a literal string constant. */
3597 fmt_str = c_getstr (fmt);
3598 if (fmt_str == NULL)
3599 return false;
3600
3601 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3602 {
3603 /* If we're using an unlocked function, assume the other
3604 unlocked functions exist explicitly. */
3605 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3606 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3607 }
3608 else
3609 {
3610 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3611 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3612 }
3613
3614 if (!init_target_chars ())
3615 return false;
3616
3617 if (strcmp (fmt_str, target_percent_s) == 0
3618 || strchr (fmt_str, target_percent) == NULL)
3619 {
3620 const char *str;
3621
3622 if (strcmp (fmt_str, target_percent_s) == 0)
3623 {
3624 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3625 return false;
3626
3627 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3628 return false;
3629
3630 str = c_getstr (arg);
3631 if (str == NULL)
3632 return false;
3633 }
3634 else
3635 {
3636 /* The format specifier doesn't contain any '%' characters. */
3637 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3638 && arg)
3639 return false;
3640 str = fmt_str;
3641 }
3642
3643 /* If the string was "", printf does nothing. */
3644 if (str[0] == '\0')
3645 {
3646 replace_call_with_value (gsi, NULL_TREE);
3647 return true;
3648 }
3649
3650 /* If the string has length of 1, call putchar. */
3651 if (str[1] == '\0')
3652 {
3653 /* Given printf("c"), (where c is any one character,)
3654 convert "c"[0] to an int and pass that to the replacement
3655 function. */
3656 newarg = build_int_cst (integer_type_node, str[0]);
3657 if (fn_putchar)
3658 {
3659 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3660 replace_call_with_call_and_fold (gsi, repl);
3661 return true;
3662 }
3663 }
3664 else
3665 {
3666 /* If the string was "string\n", call puts("string"). */
3667 size_t len = strlen (str);
3668 if ((unsigned char)str[len - 1] == target_newline
3669 && (size_t) (int) len == len
3670 && (int) len > 0)
3671 {
3672 char *newstr;
3673
3674 /* Create a NUL-terminated string that's one char shorter
3675 than the original, stripping off the trailing '\n'. */
3676 newstr = xstrdup (str);
3677 newstr[len - 1] = '\0';
3678 newarg = build_string_literal (len, newstr);
3679 free (newstr);
3680 if (fn_puts)
3681 {
3682 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3683 replace_call_with_call_and_fold (gsi, repl);
3684 return true;
3685 }
3686 }
3687 else
3688 /* We'd like to arrange to call fputs(string,stdout) here,
3689 but we need stdout and don't have a way to get it yet. */
3690 return false;
3691 }
3692 }
3693
3694 /* The other optimizations can be done only on the non-va_list variants. */
3695 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3696 return false;
3697
3698 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3699 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3700 {
3701 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3702 return false;
3703 if (fn_puts)
3704 {
3705 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3706 replace_call_with_call_and_fold (gsi, repl);
3707 return true;
3708 }
3709 }
3710
3711 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3712 else if (strcmp (fmt_str, target_percent_c) == 0)
3713 {
3714 if (!arg || ! useless_type_conversion_p (integer_type_node,
3715 TREE_TYPE (arg)))
3716 return false;
3717 if (fn_putchar)
3718 {
3719 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3720 replace_call_with_call_and_fold (gsi, repl);
3721 return true;
3722 }
3723 }
3724
3725 return false;
3726 }
3727
3728
3729
3730 /* Fold a call to __builtin_strlen with known length LEN. */
3731
3732 static bool
3733 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3734 {
3735 gimple *stmt = gsi_stmt (*gsi);
3736 tree arg = gimple_call_arg (stmt, 0);
3737
3738 wide_int minlen;
3739 wide_int maxlen;
3740
3741 c_strlen_data lendata = { };
3742 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3743 && !lendata.decl
3744 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3745 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3746 {
3747 /* The range of lengths refers to either a single constant
3748 string or to the longest and shortest constant string
3749 referenced by the argument of the strlen() call, or to
3750 the strings that can possibly be stored in the arrays
3751 the argument refers to. */
3752 minlen = wi::to_wide (lendata.minlen);
3753 maxlen = wi::to_wide (lendata.maxlen);
3754 }
3755 else
3756 {
3757 unsigned prec = TYPE_PRECISION (sizetype);
3758
3759 minlen = wi::shwi (0, prec);
3760 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3761 }
3762
3763 if (minlen == maxlen)
3764 {
3765 /* Fold the strlen call to a constant. */
3766 tree type = TREE_TYPE (lendata.minlen);
3767 tree len = force_gimple_operand_gsi (gsi,
3768 wide_int_to_tree (type, minlen),
3769 true, NULL, true, GSI_SAME_STMT);
3770 replace_call_with_value (gsi, len);
3771 return true;
3772 }
3773
3774 /* Set the strlen() range to [0, MAXLEN]. */
3775 if (tree lhs = gimple_call_lhs (stmt))
3776 set_strlen_range (lhs, minlen, maxlen);
3777
3778 return false;
3779 }
3780
3781 /* Fold a call to __builtin_acc_on_device. */
3782
3783 static bool
3784 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3785 {
3786 /* Defer folding until we know which compiler we're in. */
3787 if (symtab->state != EXPANSION)
3788 return false;
3789
3790 unsigned val_host = GOMP_DEVICE_HOST;
3791 unsigned val_dev = GOMP_DEVICE_NONE;
3792
3793 #ifdef ACCEL_COMPILER
3794 val_host = GOMP_DEVICE_NOT_HOST;
3795 val_dev = ACCEL_COMPILER_acc_device;
3796 #endif
3797
3798 location_t loc = gimple_location (gsi_stmt (*gsi));
3799
3800 tree host_eq = make_ssa_name (boolean_type_node);
3801 gimple *host_ass = gimple_build_assign
3802 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3803 gimple_set_location (host_ass, loc);
3804 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3805
3806 tree dev_eq = make_ssa_name (boolean_type_node);
3807 gimple *dev_ass = gimple_build_assign
3808 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3809 gimple_set_location (dev_ass, loc);
3810 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3811
3812 tree result = make_ssa_name (boolean_type_node);
3813 gimple *result_ass = gimple_build_assign
3814 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3815 gimple_set_location (result_ass, loc);
3816 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3817
3818 replace_call_with_value (gsi, result);
3819
3820 return true;
3821 }
3822
3823 /* Fold realloc (0, n) -> malloc (n). */
3824
3825 static bool
3826 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3827 {
3828 gimple *stmt = gsi_stmt (*gsi);
3829 tree arg = gimple_call_arg (stmt, 0);
3830 tree size = gimple_call_arg (stmt, 1);
3831
3832 if (operand_equal_p (arg, null_pointer_node, 0))
3833 {
3834 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3835 if (fn_malloc)
3836 {
3837 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3838 replace_call_with_call_and_fold (gsi, repl);
3839 return true;
3840 }
3841 }
3842 return false;
3843 }
3844
3845 /* Fold the non-target builtin at *GSI and return whether any simplification
3846 was made. */
3847
3848 static bool
3849 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3850 {
3851 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3852 tree callee = gimple_call_fndecl (stmt);
3853
3854 /* Give up for always_inline inline builtins until they are
3855 inlined. */
3856 if (avoid_folding_inline_builtin (callee))
3857 return false;
3858
3859 unsigned n = gimple_call_num_args (stmt);
3860 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3861 switch (fcode)
3862 {
3863 case BUILT_IN_BCMP:
3864 return gimple_fold_builtin_bcmp (gsi);
3865 case BUILT_IN_BCOPY:
3866 return gimple_fold_builtin_bcopy (gsi);
3867 case BUILT_IN_BZERO:
3868 return gimple_fold_builtin_bzero (gsi);
3869
3870 case BUILT_IN_MEMSET:
3871 return gimple_fold_builtin_memset (gsi,
3872 gimple_call_arg (stmt, 1),
3873 gimple_call_arg (stmt, 2));
3874 case BUILT_IN_MEMCPY:
3875 case BUILT_IN_MEMPCPY:
3876 case BUILT_IN_MEMMOVE:
3877 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3878 gimple_call_arg (stmt, 1), fcode);
3879 case BUILT_IN_SPRINTF_CHK:
3880 case BUILT_IN_VSPRINTF_CHK:
3881 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3882 case BUILT_IN_STRCAT_CHK:
3883 return gimple_fold_builtin_strcat_chk (gsi);
3884 case BUILT_IN_STRNCAT_CHK:
3885 return gimple_fold_builtin_strncat_chk (gsi);
3886 case BUILT_IN_STRLEN:
3887 return gimple_fold_builtin_strlen (gsi);
3888 case BUILT_IN_STRCPY:
3889 return gimple_fold_builtin_strcpy (gsi,
3890 gimple_call_arg (stmt, 0),
3891 gimple_call_arg (stmt, 1));
3892 case BUILT_IN_STRNCPY:
3893 return gimple_fold_builtin_strncpy (gsi,
3894 gimple_call_arg (stmt, 0),
3895 gimple_call_arg (stmt, 1),
3896 gimple_call_arg (stmt, 2));
3897 case BUILT_IN_STRCAT:
3898 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3899 gimple_call_arg (stmt, 1));
3900 case BUILT_IN_STRNCAT:
3901 return gimple_fold_builtin_strncat (gsi);
3902 case BUILT_IN_INDEX:
3903 case BUILT_IN_STRCHR:
3904 return gimple_fold_builtin_strchr (gsi, false);
3905 case BUILT_IN_RINDEX:
3906 case BUILT_IN_STRRCHR:
3907 return gimple_fold_builtin_strchr (gsi, true);
3908 case BUILT_IN_STRSTR:
3909 return gimple_fold_builtin_strstr (gsi);
3910 case BUILT_IN_STRCMP:
3911 case BUILT_IN_STRCMP_EQ:
3912 case BUILT_IN_STRCASECMP:
3913 case BUILT_IN_STRNCMP:
3914 case BUILT_IN_STRNCMP_EQ:
3915 case BUILT_IN_STRNCASECMP:
3916 return gimple_fold_builtin_string_compare (gsi);
3917 case BUILT_IN_MEMCHR:
3918 return gimple_fold_builtin_memchr (gsi);
3919 case BUILT_IN_FPUTS:
3920 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3921 gimple_call_arg (stmt, 1), false);
3922 case BUILT_IN_FPUTS_UNLOCKED:
3923 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3924 gimple_call_arg (stmt, 1), true);
3925 case BUILT_IN_MEMCPY_CHK:
3926 case BUILT_IN_MEMPCPY_CHK:
3927 case BUILT_IN_MEMMOVE_CHK:
3928 case BUILT_IN_MEMSET_CHK:
3929 return gimple_fold_builtin_memory_chk (gsi,
3930 gimple_call_arg (stmt, 0),
3931 gimple_call_arg (stmt, 1),
3932 gimple_call_arg (stmt, 2),
3933 gimple_call_arg (stmt, 3),
3934 fcode);
3935 case BUILT_IN_STPCPY:
3936 return gimple_fold_builtin_stpcpy (gsi);
3937 case BUILT_IN_STRCPY_CHK:
3938 case BUILT_IN_STPCPY_CHK:
3939 return gimple_fold_builtin_stxcpy_chk (gsi,
3940 gimple_call_arg (stmt, 0),
3941 gimple_call_arg (stmt, 1),
3942 gimple_call_arg (stmt, 2),
3943 fcode);
3944 case BUILT_IN_STRNCPY_CHK:
3945 case BUILT_IN_STPNCPY_CHK:
3946 return gimple_fold_builtin_stxncpy_chk (gsi,
3947 gimple_call_arg (stmt, 0),
3948 gimple_call_arg (stmt, 1),
3949 gimple_call_arg (stmt, 2),
3950 gimple_call_arg (stmt, 3),
3951 fcode);
3952 case BUILT_IN_SNPRINTF_CHK:
3953 case BUILT_IN_VSNPRINTF_CHK:
3954 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3955
3956 case BUILT_IN_FPRINTF:
3957 case BUILT_IN_FPRINTF_UNLOCKED:
3958 case BUILT_IN_VFPRINTF:
3959 if (n == 2 || n == 3)
3960 return gimple_fold_builtin_fprintf (gsi,
3961 gimple_call_arg (stmt, 0),
3962 gimple_call_arg (stmt, 1),
3963 n == 3
3964 ? gimple_call_arg (stmt, 2)
3965 : NULL_TREE,
3966 fcode);
3967 break;
3968 case BUILT_IN_FPRINTF_CHK:
3969 case BUILT_IN_VFPRINTF_CHK:
3970 if (n == 3 || n == 4)
3971 return gimple_fold_builtin_fprintf (gsi,
3972 gimple_call_arg (stmt, 0),
3973 gimple_call_arg (stmt, 2),
3974 n == 4
3975 ? gimple_call_arg (stmt, 3)
3976 : NULL_TREE,
3977 fcode);
3978 break;
3979 case BUILT_IN_PRINTF:
3980 case BUILT_IN_PRINTF_UNLOCKED:
3981 case BUILT_IN_VPRINTF:
3982 if (n == 1 || n == 2)
3983 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3984 n == 2
3985 ? gimple_call_arg (stmt, 1)
3986 : NULL_TREE, fcode);
3987 break;
3988 case BUILT_IN_PRINTF_CHK:
3989 case BUILT_IN_VPRINTF_CHK:
3990 if (n == 2 || n == 3)
3991 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3992 n == 3
3993 ? gimple_call_arg (stmt, 2)
3994 : NULL_TREE, fcode);
3995 break;
3996 case BUILT_IN_ACC_ON_DEVICE:
3997 return gimple_fold_builtin_acc_on_device (gsi,
3998 gimple_call_arg (stmt, 0));
3999 case BUILT_IN_REALLOC:
4000 return gimple_fold_builtin_realloc (gsi);
4001
4002 default:;
4003 }
4004
4005 /* Try the generic builtin folder. */
4006 bool ignore = (gimple_call_lhs (stmt) == NULL);
4007 tree result = fold_call_stmt (stmt, ignore);
4008 if (result)
4009 {
4010 if (ignore)
4011 STRIP_NOPS (result);
4012 else
4013 result = fold_convert (gimple_call_return_type (stmt), result);
4014 if (!update_call_from_tree (gsi, result))
4015 gimplify_and_update_call_from_tree (gsi, result);
4016 return true;
4017 }
4018
4019 return false;
4020 }
4021
4022 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4023 function calls to constants, where possible. */
4024
4025 static tree
4026 fold_internal_goacc_dim (const gimple *call)
4027 {
4028 int axis = oacc_get_ifn_dim_arg (call);
4029 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4030 tree result = NULL_TREE;
4031 tree type = TREE_TYPE (gimple_call_lhs (call));
4032
4033 switch (gimple_call_internal_fn (call))
4034 {
4035 case IFN_GOACC_DIM_POS:
4036 /* If the size is 1, we know the answer. */
4037 if (size == 1)
4038 result = build_int_cst (type, 0);
4039 break;
4040 case IFN_GOACC_DIM_SIZE:
4041 /* If the size is not dynamic, we know the answer. */
4042 if (size)
4043 result = build_int_cst (type, size);
4044 break;
4045 default:
4046 break;
4047 }
4048
4049 return result;
4050 }
4051
4052 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4053 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4054 &var where var is only addressable because of such calls. */
4055
4056 bool
4057 optimize_atomic_compare_exchange_p (gimple *stmt)
4058 {
4059 if (gimple_call_num_args (stmt) != 6
4060 || !flag_inline_atomics
4061 || !optimize
4062 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4063 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4064 || !gimple_vdef (stmt)
4065 || !gimple_vuse (stmt))
4066 return false;
4067
4068 tree fndecl = gimple_call_fndecl (stmt);
4069 switch (DECL_FUNCTION_CODE (fndecl))
4070 {
4071 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4072 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4073 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4074 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4075 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4076 break;
4077 default:
4078 return false;
4079 }
4080
4081 tree expected = gimple_call_arg (stmt, 1);
4082 if (TREE_CODE (expected) != ADDR_EXPR
4083 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4084 return false;
4085
4086 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4087 if (!is_gimple_reg_type (etype)
4088 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4089 || TREE_THIS_VOLATILE (etype)
4090 || VECTOR_TYPE_P (etype)
4091 || TREE_CODE (etype) == COMPLEX_TYPE
4092 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4093 might not preserve all the bits. See PR71716. */
4094 || SCALAR_FLOAT_TYPE_P (etype)
4095 || maybe_ne (TYPE_PRECISION (etype),
4096 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4097 return false;
4098
4099 tree weak = gimple_call_arg (stmt, 3);
4100 if (!integer_zerop (weak) && !integer_onep (weak))
4101 return false;
4102
4103 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4104 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4105 machine_mode mode = TYPE_MODE (itype);
4106
4107 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4108 == CODE_FOR_nothing
4109 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4110 return false;
4111
4112 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4113 return false;
4114
4115 return true;
4116 }
4117
4118 /* Fold
4119 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4120 into
4121 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4122 i = IMAGPART_EXPR <t>;
4123 r = (_Bool) i;
4124 e = REALPART_EXPR <t>; */
4125
4126 void
4127 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4128 {
4129 gimple *stmt = gsi_stmt (*gsi);
4130 tree fndecl = gimple_call_fndecl (stmt);
4131 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4132 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4133 tree ctype = build_complex_type (itype);
4134 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4135 bool throws = false;
4136 edge e = NULL;
4137 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4138 expected);
4139 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4140 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4141 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4142 {
4143 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4144 build1 (VIEW_CONVERT_EXPR, itype,
4145 gimple_assign_lhs (g)));
4146 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4147 }
4148 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4149 + int_size_in_bytes (itype);
4150 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4151 gimple_call_arg (stmt, 0),
4152 gimple_assign_lhs (g),
4153 gimple_call_arg (stmt, 2),
4154 build_int_cst (integer_type_node, flag),
4155 gimple_call_arg (stmt, 4),
4156 gimple_call_arg (stmt, 5));
4157 tree lhs = make_ssa_name (ctype);
4158 gimple_call_set_lhs (g, lhs);
4159 gimple_move_vops (g, stmt);
4160 tree oldlhs = gimple_call_lhs (stmt);
4161 if (stmt_can_throw_internal (cfun, stmt))
4162 {
4163 throws = true;
4164 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4165 }
4166 gimple_call_set_nothrow (as_a <gcall *> (g),
4167 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4168 gimple_call_set_lhs (stmt, NULL_TREE);
4169 gsi_replace (gsi, g, true);
4170 if (oldlhs)
4171 {
4172 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4173 build1 (IMAGPART_EXPR, itype, lhs));
4174 if (throws)
4175 {
4176 gsi_insert_on_edge_immediate (e, g);
4177 *gsi = gsi_for_stmt (g);
4178 }
4179 else
4180 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4181 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4182 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4183 }
4184 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4185 build1 (REALPART_EXPR, itype, lhs));
4186 if (throws && oldlhs == NULL_TREE)
4187 {
4188 gsi_insert_on_edge_immediate (e, g);
4189 *gsi = gsi_for_stmt (g);
4190 }
4191 else
4192 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4193 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4194 {
4195 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4196 VIEW_CONVERT_EXPR,
4197 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4198 gimple_assign_lhs (g)));
4199 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4200 }
4201 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4202 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4203 *gsi = gsiret;
4204 }
4205
4206 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4207 doesn't fit into TYPE. The test for overflow should be regardless of
4208 -fwrapv, and even for unsigned types. */
4209
4210 bool
4211 arith_overflowed_p (enum tree_code code, const_tree type,
4212 const_tree arg0, const_tree arg1)
4213 {
4214 widest2_int warg0 = widest2_int_cst (arg0);
4215 widest2_int warg1 = widest2_int_cst (arg1);
4216 widest2_int wres;
4217 switch (code)
4218 {
4219 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4220 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4221 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4222 default: gcc_unreachable ();
4223 }
4224 signop sign = TYPE_SIGN (type);
4225 if (sign == UNSIGNED && wi::neg_p (wres))
4226 return true;
4227 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4228 }
4229
4230 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4231 for the memory it references, otherwise return null. VECTYPE is the
4232 type of the memory vector. */
4233
4234 static tree
4235 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4236 {
4237 tree ptr = gimple_call_arg (call, 0);
4238 tree alias_align = gimple_call_arg (call, 1);
4239 tree mask = gimple_call_arg (call, 2);
4240 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4241 return NULL_TREE;
4242
4243 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4244 if (TYPE_ALIGN (vectype) != align)
4245 vectype = build_aligned_type (vectype, align);
4246 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4247 return fold_build2 (MEM_REF, vectype, ptr, offset);
4248 }
4249
4250 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4251
4252 static bool
4253 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4254 {
4255 tree lhs = gimple_call_lhs (call);
4256 if (!lhs)
4257 return false;
4258
4259 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4260 {
4261 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4262 gimple_set_location (new_stmt, gimple_location (call));
4263 gimple_move_vops (new_stmt, call);
4264 gsi_replace (gsi, new_stmt, false);
4265 return true;
4266 }
4267 return false;
4268 }
4269
4270 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4271
4272 static bool
4273 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4274 {
4275 tree rhs = gimple_call_arg (call, 3);
4276 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4277 {
4278 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4279 gimple_set_location (new_stmt, gimple_location (call));
4280 gimple_move_vops (new_stmt, call);
4281 gsi_replace (gsi, new_stmt, false);
4282 return true;
4283 }
4284 return false;
4285 }
4286
4287 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4288 The statement may be replaced by another statement, e.g., if the call
4289 simplifies to a constant value. Return true if any changes were made.
4290 It is assumed that the operands have been previously folded. */
4291
4292 static bool
4293 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4294 {
4295 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4296 tree callee;
4297 bool changed = false;
4298 unsigned i;
4299
4300 /* Fold *& in call arguments. */
4301 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4302 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4303 {
4304 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4305 if (tmp)
4306 {
4307 gimple_call_set_arg (stmt, i, tmp);
4308 changed = true;
4309 }
4310 }
4311
4312 /* Check for virtual calls that became direct calls. */
4313 callee = gimple_call_fn (stmt);
4314 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4315 {
4316 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4317 {
4318 if (dump_file && virtual_method_call_p (callee)
4319 && !possible_polymorphic_call_target_p
4320 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4321 (OBJ_TYPE_REF_EXPR (callee)))))
4322 {
4323 fprintf (dump_file,
4324 "Type inheritance inconsistent devirtualization of ");
4325 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4326 fprintf (dump_file, " to ");
4327 print_generic_expr (dump_file, callee, TDF_SLIM);
4328 fprintf (dump_file, "\n");
4329 }
4330
4331 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4332 changed = true;
4333 }
4334 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4335 {
4336 bool final;
4337 vec <cgraph_node *>targets
4338 = possible_polymorphic_call_targets (callee, stmt, &final);
4339 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4340 {
4341 tree lhs = gimple_call_lhs (stmt);
4342 if (dump_enabled_p ())
4343 {
4344 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4345 "folding virtual function call to %s\n",
4346 targets.length () == 1
4347 ? targets[0]->name ()
4348 : "__builtin_unreachable");
4349 }
4350 if (targets.length () == 1)
4351 {
4352 tree fndecl = targets[0]->decl;
4353 gimple_call_set_fndecl (stmt, fndecl);
4354 changed = true;
4355 /* If changing the call to __cxa_pure_virtual
4356 or similar noreturn function, adjust gimple_call_fntype
4357 too. */
4358 if (gimple_call_noreturn_p (stmt)
4359 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4360 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4361 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4362 == void_type_node))
4363 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4364 /* If the call becomes noreturn, remove the lhs. */
4365 if (lhs
4366 && gimple_call_noreturn_p (stmt)
4367 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4368 || should_remove_lhs_p (lhs)))
4369 {
4370 if (TREE_CODE (lhs) == SSA_NAME)
4371 {
4372 tree var = create_tmp_var (TREE_TYPE (lhs));
4373 tree def = get_or_create_ssa_default_def (cfun, var);
4374 gimple *new_stmt = gimple_build_assign (lhs, def);
4375 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4376 }
4377 gimple_call_set_lhs (stmt, NULL_TREE);
4378 }
4379 maybe_remove_unused_call_args (cfun, stmt);
4380 }
4381 else
4382 {
4383 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4384 gimple *new_stmt = gimple_build_call (fndecl, 0);
4385 gimple_set_location (new_stmt, gimple_location (stmt));
4386 /* If the call had a SSA name as lhs morph that into
4387 an uninitialized value. */
4388 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4389 {
4390 tree var = create_tmp_var (TREE_TYPE (lhs));
4391 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4392 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4393 set_ssa_default_def (cfun, var, lhs);
4394 }
4395 gimple_move_vops (new_stmt, stmt);
4396 gsi_replace (gsi, new_stmt, false);
4397 return true;
4398 }
4399 }
4400 }
4401 }
4402
4403 /* Check for indirect calls that became direct calls, and then
4404 no longer require a static chain. */
4405 if (gimple_call_chain (stmt))
4406 {
4407 tree fn = gimple_call_fndecl (stmt);
4408 if (fn && !DECL_STATIC_CHAIN (fn))
4409 {
4410 gimple_call_set_chain (stmt, NULL);
4411 changed = true;
4412 }
4413 else
4414 {
4415 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4416 if (tmp)
4417 {
4418 gimple_call_set_chain (stmt, tmp);
4419 changed = true;
4420 }
4421 }
4422 }
4423
4424 if (inplace)
4425 return changed;
4426
4427 /* Check for builtins that CCP can handle using information not
4428 available in the generic fold routines. */
4429 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4430 {
4431 if (gimple_fold_builtin (gsi))
4432 changed = true;
4433 }
4434 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4435 {
4436 changed |= targetm.gimple_fold_builtin (gsi);
4437 }
4438 else if (gimple_call_internal_p (stmt))
4439 {
4440 enum tree_code subcode = ERROR_MARK;
4441 tree result = NULL_TREE;
4442 bool cplx_result = false;
4443 tree overflow = NULL_TREE;
4444 switch (gimple_call_internal_fn (stmt))
4445 {
4446 case IFN_BUILTIN_EXPECT:
4447 result = fold_builtin_expect (gimple_location (stmt),
4448 gimple_call_arg (stmt, 0),
4449 gimple_call_arg (stmt, 1),
4450 gimple_call_arg (stmt, 2),
4451 NULL_TREE);
4452 break;
4453 case IFN_UBSAN_OBJECT_SIZE:
4454 {
4455 tree offset = gimple_call_arg (stmt, 1);
4456 tree objsize = gimple_call_arg (stmt, 2);
4457 if (integer_all_onesp (objsize)
4458 || (TREE_CODE (offset) == INTEGER_CST
4459 && TREE_CODE (objsize) == INTEGER_CST
4460 && tree_int_cst_le (offset, objsize)))
4461 {
4462 replace_call_with_value (gsi, NULL_TREE);
4463 return true;
4464 }
4465 }
4466 break;
4467 case IFN_UBSAN_PTR:
4468 if (integer_zerop (gimple_call_arg (stmt, 1)))
4469 {
4470 replace_call_with_value (gsi, NULL_TREE);
4471 return true;
4472 }
4473 break;
4474 case IFN_UBSAN_BOUNDS:
4475 {
4476 tree index = gimple_call_arg (stmt, 1);
4477 tree bound = gimple_call_arg (stmt, 2);
4478 if (TREE_CODE (index) == INTEGER_CST
4479 && TREE_CODE (bound) == INTEGER_CST)
4480 {
4481 index = fold_convert (TREE_TYPE (bound), index);
4482 if (TREE_CODE (index) == INTEGER_CST
4483 && tree_int_cst_le (index, bound))
4484 {
4485 replace_call_with_value (gsi, NULL_TREE);
4486 return true;
4487 }
4488 }
4489 }
4490 break;
4491 case IFN_GOACC_DIM_SIZE:
4492 case IFN_GOACC_DIM_POS:
4493 result = fold_internal_goacc_dim (stmt);
4494 break;
4495 case IFN_UBSAN_CHECK_ADD:
4496 subcode = PLUS_EXPR;
4497 break;
4498 case IFN_UBSAN_CHECK_SUB:
4499 subcode = MINUS_EXPR;
4500 break;
4501 case IFN_UBSAN_CHECK_MUL:
4502 subcode = MULT_EXPR;
4503 break;
4504 case IFN_ADD_OVERFLOW:
4505 subcode = PLUS_EXPR;
4506 cplx_result = true;
4507 break;
4508 case IFN_SUB_OVERFLOW:
4509 subcode = MINUS_EXPR;
4510 cplx_result = true;
4511 break;
4512 case IFN_MUL_OVERFLOW:
4513 subcode = MULT_EXPR;
4514 cplx_result = true;
4515 break;
4516 case IFN_MASK_LOAD:
4517 changed |= gimple_fold_mask_load (gsi, stmt);
4518 break;
4519 case IFN_MASK_STORE:
4520 changed |= gimple_fold_mask_store (gsi, stmt);
4521 break;
4522 default:
4523 break;
4524 }
4525 if (subcode != ERROR_MARK)
4526 {
4527 tree arg0 = gimple_call_arg (stmt, 0);
4528 tree arg1 = gimple_call_arg (stmt, 1);
4529 tree type = TREE_TYPE (arg0);
4530 if (cplx_result)
4531 {
4532 tree lhs = gimple_call_lhs (stmt);
4533 if (lhs == NULL_TREE)
4534 type = NULL_TREE;
4535 else
4536 type = TREE_TYPE (TREE_TYPE (lhs));
4537 }
4538 if (type == NULL_TREE)
4539 ;
4540 /* x = y + 0; x = y - 0; x = y * 0; */
4541 else if (integer_zerop (arg1))
4542 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4543 /* x = 0 + y; x = 0 * y; */
4544 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4545 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4546 /* x = y - y; */
4547 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4548 result = integer_zero_node;
4549 /* x = y * 1; x = 1 * y; */
4550 else if (subcode == MULT_EXPR && integer_onep (arg1))
4551 result = arg0;
4552 else if (subcode == MULT_EXPR && integer_onep (arg0))
4553 result = arg1;
4554 else if (TREE_CODE (arg0) == INTEGER_CST
4555 && TREE_CODE (arg1) == INTEGER_CST)
4556 {
4557 if (cplx_result)
4558 result = int_const_binop (subcode, fold_convert (type, arg0),
4559 fold_convert (type, arg1));
4560 else
4561 result = int_const_binop (subcode, arg0, arg1);
4562 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4563 {
4564 if (cplx_result)
4565 overflow = build_one_cst (type);
4566 else
4567 result = NULL_TREE;
4568 }
4569 }
4570 if (result)
4571 {
4572 if (result == integer_zero_node)
4573 result = build_zero_cst (type);
4574 else if (cplx_result && TREE_TYPE (result) != type)
4575 {
4576 if (TREE_CODE (result) == INTEGER_CST)
4577 {
4578 if (arith_overflowed_p (PLUS_EXPR, type, result,
4579 integer_zero_node))
4580 overflow = build_one_cst (type);
4581 }
4582 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4583 && TYPE_UNSIGNED (type))
4584 || (TYPE_PRECISION (type)
4585 < (TYPE_PRECISION (TREE_TYPE (result))
4586 + (TYPE_UNSIGNED (TREE_TYPE (result))
4587 && !TYPE_UNSIGNED (type)))))
4588 result = NULL_TREE;
4589 if (result)
4590 result = fold_convert (type, result);
4591 }
4592 }
4593 }
4594
4595 if (result)
4596 {
4597 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4598 result = drop_tree_overflow (result);
4599 if (cplx_result)
4600 {
4601 if (overflow == NULL_TREE)
4602 overflow = build_zero_cst (TREE_TYPE (result));
4603 tree ctype = build_complex_type (TREE_TYPE (result));
4604 if (TREE_CODE (result) == INTEGER_CST
4605 && TREE_CODE (overflow) == INTEGER_CST)
4606 result = build_complex (ctype, result, overflow);
4607 else
4608 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4609 ctype, result, overflow);
4610 }
4611 if (!update_call_from_tree (gsi, result))
4612 gimplify_and_update_call_from_tree (gsi, result);
4613 changed = true;
4614 }
4615 }
4616
4617 return changed;
4618 }
4619
4620
4621 /* Return true whether NAME has a use on STMT. */
4622
4623 static bool
4624 has_use_on_stmt (tree name, gimple *stmt)
4625 {
4626 imm_use_iterator iter;
4627 use_operand_p use_p;
4628 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4629 if (USE_STMT (use_p) == stmt)
4630 return true;
4631 return false;
4632 }
4633
4634 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4635 gimple_simplify.
4636
4637 Replaces *GSI with the simplification result in RCODE and OPS
4638 and the associated statements in *SEQ. Does the replacement
4639 according to INPLACE and returns true if the operation succeeded. */
4640
4641 static bool
4642 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4643 gimple_match_op *res_op,
4644 gimple_seq *seq, bool inplace)
4645 {
4646 gimple *stmt = gsi_stmt (*gsi);
4647 tree *ops = res_op->ops;
4648 unsigned int num_ops = res_op->num_ops;
4649
4650 /* Play safe and do not allow abnormals to be mentioned in
4651 newly created statements. See also maybe_push_res_to_seq.
4652 As an exception allow such uses if there was a use of the
4653 same SSA name on the old stmt. */
4654 for (unsigned int i = 0; i < num_ops; ++i)
4655 if (TREE_CODE (ops[i]) == SSA_NAME
4656 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4657 && !has_use_on_stmt (ops[i], stmt))
4658 return false;
4659
4660 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4661 for (unsigned int i = 0; i < 2; ++i)
4662 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4663 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4664 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4665 return false;
4666
4667 /* Don't insert new statements when INPLACE is true, even if we could
4668 reuse STMT for the final statement. */
4669 if (inplace && !gimple_seq_empty_p (*seq))
4670 return false;
4671
4672 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4673 {
4674 gcc_assert (res_op->code.is_tree_code ());
4675 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4676 /* GIMPLE_CONDs condition may not throw. */
4677 && (!flag_exceptions
4678 || !cfun->can_throw_non_call_exceptions
4679 || !operation_could_trap_p (res_op->code,
4680 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4681 false, NULL_TREE)))
4682 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4683 else if (res_op->code == SSA_NAME)
4684 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4685 build_zero_cst (TREE_TYPE (ops[0])));
4686 else if (res_op->code == INTEGER_CST)
4687 {
4688 if (integer_zerop (ops[0]))
4689 gimple_cond_make_false (cond_stmt);
4690 else
4691 gimple_cond_make_true (cond_stmt);
4692 }
4693 else if (!inplace)
4694 {
4695 tree res = maybe_push_res_to_seq (res_op, seq);
4696 if (!res)
4697 return false;
4698 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4699 build_zero_cst (TREE_TYPE (res)));
4700 }
4701 else
4702 return false;
4703 if (dump_file && (dump_flags & TDF_DETAILS))
4704 {
4705 fprintf (dump_file, "gimple_simplified to ");
4706 if (!gimple_seq_empty_p (*seq))
4707 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4708 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4709 0, TDF_SLIM);
4710 }
4711 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4712 return true;
4713 }
4714 else if (is_gimple_assign (stmt)
4715 && res_op->code.is_tree_code ())
4716 {
4717 if (!inplace
4718 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4719 {
4720 maybe_build_generic_op (res_op);
4721 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4722 res_op->op_or_null (0),
4723 res_op->op_or_null (1),
4724 res_op->op_or_null (2));
4725 if (dump_file && (dump_flags & TDF_DETAILS))
4726 {
4727 fprintf (dump_file, "gimple_simplified to ");
4728 if (!gimple_seq_empty_p (*seq))
4729 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4730 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4731 0, TDF_SLIM);
4732 }
4733 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4734 return true;
4735 }
4736 }
4737 else if (res_op->code.is_fn_code ()
4738 && gimple_call_combined_fn (stmt) == res_op->code)
4739 {
4740 gcc_assert (num_ops == gimple_call_num_args (stmt));
4741 for (unsigned int i = 0; i < num_ops; ++i)
4742 gimple_call_set_arg (stmt, i, ops[i]);
4743 if (dump_file && (dump_flags & TDF_DETAILS))
4744 {
4745 fprintf (dump_file, "gimple_simplified to ");
4746 if (!gimple_seq_empty_p (*seq))
4747 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4748 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4749 }
4750 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4751 return true;
4752 }
4753 else if (!inplace)
4754 {
4755 if (gimple_has_lhs (stmt))
4756 {
4757 tree lhs = gimple_get_lhs (stmt);
4758 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4759 return false;
4760 if (dump_file && (dump_flags & TDF_DETAILS))
4761 {
4762 fprintf (dump_file, "gimple_simplified to ");
4763 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4764 }
4765 gsi_replace_with_seq_vops (gsi, *seq);
4766 return true;
4767 }
4768 else
4769 gcc_unreachable ();
4770 }
4771
4772 return false;
4773 }
4774
4775 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4776
4777 static bool
4778 maybe_canonicalize_mem_ref_addr (tree *t)
4779 {
4780 bool res = false;
4781
4782 if (TREE_CODE (*t) == ADDR_EXPR)
4783 t = &TREE_OPERAND (*t, 0);
4784
4785 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4786 generic vector extension. The actual vector referenced is
4787 view-converted to an array type for this purpose. If the index
4788 is constant the canonical representation in the middle-end is a
4789 BIT_FIELD_REF so re-write the former to the latter here. */
4790 if (TREE_CODE (*t) == ARRAY_REF
4791 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4792 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4793 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4794 {
4795 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4796 if (VECTOR_TYPE_P (vtype))
4797 {
4798 tree low = array_ref_low_bound (*t);
4799 if (TREE_CODE (low) == INTEGER_CST)
4800 {
4801 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4802 {
4803 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4804 wi::to_widest (low));
4805 idx = wi::mul (idx, wi::to_widest
4806 (TYPE_SIZE (TREE_TYPE (*t))));
4807 widest_int ext
4808 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4809 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4810 {
4811 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4812 TREE_TYPE (*t),
4813 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4814 TYPE_SIZE (TREE_TYPE (*t)),
4815 wide_int_to_tree (bitsizetype, idx));
4816 res = true;
4817 }
4818 }
4819 }
4820 }
4821 }
4822
4823 while (handled_component_p (*t))
4824 t = &TREE_OPERAND (*t, 0);
4825
4826 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4827 of invariant addresses into a SSA name MEM_REF address. */
4828 if (TREE_CODE (*t) == MEM_REF
4829 || TREE_CODE (*t) == TARGET_MEM_REF)
4830 {
4831 tree addr = TREE_OPERAND (*t, 0);
4832 if (TREE_CODE (addr) == ADDR_EXPR
4833 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4834 || handled_component_p (TREE_OPERAND (addr, 0))))
4835 {
4836 tree base;
4837 poly_int64 coffset;
4838 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4839 &coffset);
4840 if (!base)
4841 gcc_unreachable ();
4842
4843 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4844 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4845 TREE_OPERAND (*t, 1),
4846 size_int (coffset));
4847 res = true;
4848 }
4849 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4850 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4851 }
4852
4853 /* Canonicalize back MEM_REFs to plain reference trees if the object
4854 accessed is a decl that has the same access semantics as the MEM_REF. */
4855 if (TREE_CODE (*t) == MEM_REF
4856 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4857 && integer_zerop (TREE_OPERAND (*t, 1))
4858 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4859 {
4860 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4861 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4862 if (/* Same volatile qualification. */
4863 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4864 /* Same TBAA behavior with -fstrict-aliasing. */
4865 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4866 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4867 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4868 /* Same alignment. */
4869 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4870 /* We have to look out here to not drop a required conversion
4871 from the rhs to the lhs if *t appears on the lhs or vice-versa
4872 if it appears on the rhs. Thus require strict type
4873 compatibility. */
4874 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4875 {
4876 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4877 res = true;
4878 }
4879 }
4880
4881 /* Canonicalize TARGET_MEM_REF in particular with respect to
4882 the indexes becoming constant. */
4883 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4884 {
4885 tree tem = maybe_fold_tmr (*t);
4886 if (tem)
4887 {
4888 *t = tem;
4889 res = true;
4890 }
4891 }
4892
4893 return res;
4894 }
4895
4896 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4897 distinguishes both cases. */
4898
4899 static bool
4900 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4901 {
4902 bool changed = false;
4903 gimple *stmt = gsi_stmt (*gsi);
4904 bool nowarning = gimple_no_warning_p (stmt);
4905 unsigned i;
4906 fold_defer_overflow_warnings ();
4907
4908 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4909 after propagation.
4910 ??? This shouldn't be done in generic folding but in the
4911 propagation helpers which also know whether an address was
4912 propagated.
4913 Also canonicalize operand order. */
4914 switch (gimple_code (stmt))
4915 {
4916 case GIMPLE_ASSIGN:
4917 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4918 {
4919 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4920 if ((REFERENCE_CLASS_P (*rhs)
4921 || TREE_CODE (*rhs) == ADDR_EXPR)
4922 && maybe_canonicalize_mem_ref_addr (rhs))
4923 changed = true;
4924 tree *lhs = gimple_assign_lhs_ptr (stmt);
4925 if (REFERENCE_CLASS_P (*lhs)
4926 && maybe_canonicalize_mem_ref_addr (lhs))
4927 changed = true;
4928 }
4929 else
4930 {
4931 /* Canonicalize operand order. */
4932 enum tree_code code = gimple_assign_rhs_code (stmt);
4933 if (TREE_CODE_CLASS (code) == tcc_comparison
4934 || commutative_tree_code (code)
4935 || commutative_ternary_tree_code (code))
4936 {
4937 tree rhs1 = gimple_assign_rhs1 (stmt);
4938 tree rhs2 = gimple_assign_rhs2 (stmt);
4939 if (tree_swap_operands_p (rhs1, rhs2))
4940 {
4941 gimple_assign_set_rhs1 (stmt, rhs2);
4942 gimple_assign_set_rhs2 (stmt, rhs1);
4943 if (TREE_CODE_CLASS (code) == tcc_comparison)
4944 gimple_assign_set_rhs_code (stmt,
4945 swap_tree_comparison (code));
4946 changed = true;
4947 }
4948 }
4949 }
4950 break;
4951 case GIMPLE_CALL:
4952 {
4953 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4954 {
4955 tree *arg = gimple_call_arg_ptr (stmt, i);
4956 if (REFERENCE_CLASS_P (*arg)
4957 && maybe_canonicalize_mem_ref_addr (arg))
4958 changed = true;
4959 }
4960 tree *lhs = gimple_call_lhs_ptr (stmt);
4961 if (*lhs
4962 && REFERENCE_CLASS_P (*lhs)
4963 && maybe_canonicalize_mem_ref_addr (lhs))
4964 changed = true;
4965 break;
4966 }
4967 case GIMPLE_ASM:
4968 {
4969 gasm *asm_stmt = as_a <gasm *> (stmt);
4970 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4971 {
4972 tree link = gimple_asm_output_op (asm_stmt, i);
4973 tree op = TREE_VALUE (link);
4974 if (REFERENCE_CLASS_P (op)
4975 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4976 changed = true;
4977 }
4978 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4979 {
4980 tree link = gimple_asm_input_op (asm_stmt, i);
4981 tree op = TREE_VALUE (link);
4982 if ((REFERENCE_CLASS_P (op)
4983 || TREE_CODE (op) == ADDR_EXPR)
4984 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4985 changed = true;
4986 }
4987 }
4988 break;
4989 case GIMPLE_DEBUG:
4990 if (gimple_debug_bind_p (stmt))
4991 {
4992 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4993 if (*val
4994 && (REFERENCE_CLASS_P (*val)
4995 || TREE_CODE (*val) == ADDR_EXPR)
4996 && maybe_canonicalize_mem_ref_addr (val))
4997 changed = true;
4998 }
4999 break;
5000 case GIMPLE_COND:
5001 {
5002 /* Canonicalize operand order. */
5003 tree lhs = gimple_cond_lhs (stmt);
5004 tree rhs = gimple_cond_rhs (stmt);
5005 if (tree_swap_operands_p (lhs, rhs))
5006 {
5007 gcond *gc = as_a <gcond *> (stmt);
5008 gimple_cond_set_lhs (gc, rhs);
5009 gimple_cond_set_rhs (gc, lhs);
5010 gimple_cond_set_code (gc,
5011 swap_tree_comparison (gimple_cond_code (gc)));
5012 changed = true;
5013 }
5014 }
5015 default:;
5016 }
5017
5018 /* Dispatch to pattern-based folding. */
5019 if (!inplace
5020 || is_gimple_assign (stmt)
5021 || gimple_code (stmt) == GIMPLE_COND)
5022 {
5023 gimple_seq seq = NULL;
5024 gimple_match_op res_op;
5025 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
5026 valueize, valueize))
5027 {
5028 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
5029 changed = true;
5030 else
5031 gimple_seq_discard (seq);
5032 }
5033 }
5034
5035 stmt = gsi_stmt (*gsi);
5036
5037 /* Fold the main computation performed by the statement. */
5038 switch (gimple_code (stmt))
5039 {
5040 case GIMPLE_ASSIGN:
5041 {
5042 /* Try to canonicalize for boolean-typed X the comparisons
5043 X == 0, X == 1, X != 0, and X != 1. */
5044 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5045 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5046 {
5047 tree lhs = gimple_assign_lhs (stmt);
5048 tree op1 = gimple_assign_rhs1 (stmt);
5049 tree op2 = gimple_assign_rhs2 (stmt);
5050 tree type = TREE_TYPE (op1);
5051
5052 /* Check whether the comparison operands are of the same boolean
5053 type as the result type is.
5054 Check that second operand is an integer-constant with value
5055 one or zero. */
5056 if (TREE_CODE (op2) == INTEGER_CST
5057 && (integer_zerop (op2) || integer_onep (op2))
5058 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5059 {
5060 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5061 bool is_logical_not = false;
5062
5063 /* X == 0 and X != 1 is a logical-not.of X
5064 X == 1 and X != 0 is X */
5065 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5066 || (cmp_code == NE_EXPR && integer_onep (op2)))
5067 is_logical_not = true;
5068
5069 if (is_logical_not == false)
5070 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5071 /* Only for one-bit precision typed X the transformation
5072 !X -> ~X is valied. */
5073 else if (TYPE_PRECISION (type) == 1)
5074 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5075 /* Otherwise we use !X -> X ^ 1. */
5076 else
5077 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5078 build_int_cst (type, 1));
5079 changed = true;
5080 break;
5081 }
5082 }
5083
5084 unsigned old_num_ops = gimple_num_ops (stmt);
5085 tree lhs = gimple_assign_lhs (stmt);
5086 tree new_rhs = fold_gimple_assign (gsi);
5087 if (new_rhs
5088 && !useless_type_conversion_p (TREE_TYPE (lhs),
5089 TREE_TYPE (new_rhs)))
5090 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5091 if (new_rhs
5092 && (!inplace
5093 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5094 {
5095 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5096 changed = true;
5097 }
5098 break;
5099 }
5100
5101 case GIMPLE_CALL:
5102 changed |= gimple_fold_call (gsi, inplace);
5103 break;
5104
5105 case GIMPLE_ASM:
5106 /* Fold *& in asm operands. */
5107 {
5108 gasm *asm_stmt = as_a <gasm *> (stmt);
5109 size_t noutputs;
5110 const char **oconstraints;
5111 const char *constraint;
5112 bool allows_mem, allows_reg;
5113
5114 noutputs = gimple_asm_noutputs (asm_stmt);
5115 oconstraints = XALLOCAVEC (const char *, noutputs);
5116
5117 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5118 {
5119 tree link = gimple_asm_output_op (asm_stmt, i);
5120 tree op = TREE_VALUE (link);
5121 oconstraints[i]
5122 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5123 if (REFERENCE_CLASS_P (op)
5124 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5125 {
5126 TREE_VALUE (link) = op;
5127 changed = true;
5128 }
5129 }
5130 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5131 {
5132 tree link = gimple_asm_input_op (asm_stmt, i);
5133 tree op = TREE_VALUE (link);
5134 constraint
5135 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5136 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5137 oconstraints, &allows_mem, &allows_reg);
5138 if (REFERENCE_CLASS_P (op)
5139 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5140 != NULL_TREE)
5141 {
5142 TREE_VALUE (link) = op;
5143 changed = true;
5144 }
5145 }
5146 }
5147 break;
5148
5149 case GIMPLE_DEBUG:
5150 if (gimple_debug_bind_p (stmt))
5151 {
5152 tree val = gimple_debug_bind_get_value (stmt);
5153 if (val
5154 && REFERENCE_CLASS_P (val))
5155 {
5156 tree tem = maybe_fold_reference (val, false);
5157 if (tem)
5158 {
5159 gimple_debug_bind_set_value (stmt, tem);
5160 changed = true;
5161 }
5162 }
5163 else if (val
5164 && TREE_CODE (val) == ADDR_EXPR)
5165 {
5166 tree ref = TREE_OPERAND (val, 0);
5167 tree tem = maybe_fold_reference (ref, false);
5168 if (tem)
5169 {
5170 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5171 gimple_debug_bind_set_value (stmt, tem);
5172 changed = true;
5173 }
5174 }
5175 }
5176 break;
5177
5178 case GIMPLE_RETURN:
5179 {
5180 greturn *ret_stmt = as_a<greturn *> (stmt);
5181 tree ret = gimple_return_retval(ret_stmt);
5182
5183 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5184 {
5185 tree val = valueize (ret);
5186 if (val && val != ret
5187 && may_propagate_copy (ret, val))
5188 {
5189 gimple_return_set_retval (ret_stmt, val);
5190 changed = true;
5191 }
5192 }
5193 }
5194 break;
5195
5196 default:;
5197 }
5198
5199 stmt = gsi_stmt (*gsi);
5200
5201 /* Fold *& on the lhs. */
5202 if (gimple_has_lhs (stmt))
5203 {
5204 tree lhs = gimple_get_lhs (stmt);
5205 if (lhs && REFERENCE_CLASS_P (lhs))
5206 {
5207 tree new_lhs = maybe_fold_reference (lhs, true);
5208 if (new_lhs)
5209 {
5210 gimple_set_lhs (stmt, new_lhs);
5211 changed = true;
5212 }
5213 }
5214 }
5215
5216 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5217 return changed;
5218 }
5219
5220 /* Valueziation callback that ends up not following SSA edges. */
5221
5222 tree
5223 no_follow_ssa_edges (tree)
5224 {
5225 return NULL_TREE;
5226 }
5227
5228 /* Valueization callback that ends up following single-use SSA edges only. */
5229
5230 tree
5231 follow_single_use_edges (tree val)
5232 {
5233 if (TREE_CODE (val) == SSA_NAME
5234 && !has_single_use (val))
5235 return NULL_TREE;
5236 return val;
5237 }
5238
5239 /* Valueization callback that follows all SSA edges. */
5240
5241 tree
5242 follow_all_ssa_edges (tree val)
5243 {
5244 return val;
5245 }
5246
5247 /* Fold the statement pointed to by GSI. In some cases, this function may
5248 replace the whole statement with a new one. Returns true iff folding
5249 makes any changes.
5250 The statement pointed to by GSI should be in valid gimple form but may
5251 be in unfolded state as resulting from for example constant propagation
5252 which can produce *&x = 0. */
5253
5254 bool
5255 fold_stmt (gimple_stmt_iterator *gsi)
5256 {
5257 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5258 }
5259
5260 bool
5261 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5262 {
5263 return fold_stmt_1 (gsi, false, valueize);
5264 }
5265
5266 /* Perform the minimal folding on statement *GSI. Only operations like
5267 *&x created by constant propagation are handled. The statement cannot
5268 be replaced with a new one. Return true if the statement was
5269 changed, false otherwise.
5270 The statement *GSI should be in valid gimple form but may
5271 be in unfolded state as resulting from for example constant propagation
5272 which can produce *&x = 0. */
5273
5274 bool
5275 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5276 {
5277 gimple *stmt = gsi_stmt (*gsi);
5278 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5279 gcc_assert (gsi_stmt (*gsi) == stmt);
5280 return changed;
5281 }
5282
5283 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5284 if EXPR is null or we don't know how.
5285 If non-null, the result always has boolean type. */
5286
5287 static tree
5288 canonicalize_bool (tree expr, bool invert)
5289 {
5290 if (!expr)
5291 return NULL_TREE;
5292 else if (invert)
5293 {
5294 if (integer_nonzerop (expr))
5295 return boolean_false_node;
5296 else if (integer_zerop (expr))
5297 return boolean_true_node;
5298 else if (TREE_CODE (expr) == SSA_NAME)
5299 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5300 build_int_cst (TREE_TYPE (expr), 0));
5301 else if (COMPARISON_CLASS_P (expr))
5302 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5303 boolean_type_node,
5304 TREE_OPERAND (expr, 0),
5305 TREE_OPERAND (expr, 1));
5306 else
5307 return NULL_TREE;
5308 }
5309 else
5310 {
5311 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5312 return expr;
5313 if (integer_nonzerop (expr))
5314 return boolean_true_node;
5315 else if (integer_zerop (expr))
5316 return boolean_false_node;
5317 else if (TREE_CODE (expr) == SSA_NAME)
5318 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5319 build_int_cst (TREE_TYPE (expr), 0));
5320 else if (COMPARISON_CLASS_P (expr))
5321 return fold_build2 (TREE_CODE (expr),
5322 boolean_type_node,
5323 TREE_OPERAND (expr, 0),
5324 TREE_OPERAND (expr, 1));
5325 else
5326 return NULL_TREE;
5327 }
5328 }
5329
5330 /* Check to see if a boolean expression EXPR is logically equivalent to the
5331 comparison (OP1 CODE OP2). Check for various identities involving
5332 SSA_NAMEs. */
5333
5334 static bool
5335 same_bool_comparison_p (const_tree expr, enum tree_code code,
5336 const_tree op1, const_tree op2)
5337 {
5338 gimple *s;
5339
5340 /* The obvious case. */
5341 if (TREE_CODE (expr) == code
5342 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5343 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5344 return true;
5345
5346 /* Check for comparing (name, name != 0) and the case where expr
5347 is an SSA_NAME with a definition matching the comparison. */
5348 if (TREE_CODE (expr) == SSA_NAME
5349 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5350 {
5351 if (operand_equal_p (expr, op1, 0))
5352 return ((code == NE_EXPR && integer_zerop (op2))
5353 || (code == EQ_EXPR && integer_nonzerop (op2)));
5354 s = SSA_NAME_DEF_STMT (expr);
5355 if (is_gimple_assign (s)
5356 && gimple_assign_rhs_code (s) == code
5357 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5358 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5359 return true;
5360 }
5361
5362 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5363 of name is a comparison, recurse. */
5364 if (TREE_CODE (op1) == SSA_NAME
5365 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5366 {
5367 s = SSA_NAME_DEF_STMT (op1);
5368 if (is_gimple_assign (s)
5369 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5370 {
5371 enum tree_code c = gimple_assign_rhs_code (s);
5372 if ((c == NE_EXPR && integer_zerop (op2))
5373 || (c == EQ_EXPR && integer_nonzerop (op2)))
5374 return same_bool_comparison_p (expr, c,
5375 gimple_assign_rhs1 (s),
5376 gimple_assign_rhs2 (s));
5377 if ((c == EQ_EXPR && integer_zerop (op2))
5378 || (c == NE_EXPR && integer_nonzerop (op2)))
5379 return same_bool_comparison_p (expr,
5380 invert_tree_comparison (c, false),
5381 gimple_assign_rhs1 (s),
5382 gimple_assign_rhs2 (s));
5383 }
5384 }
5385 return false;
5386 }
5387
5388 /* Check to see if two boolean expressions OP1 and OP2 are logically
5389 equivalent. */
5390
5391 static bool
5392 same_bool_result_p (const_tree op1, const_tree op2)
5393 {
5394 /* Simple cases first. */
5395 if (operand_equal_p (op1, op2, 0))
5396 return true;
5397
5398 /* Check the cases where at least one of the operands is a comparison.
5399 These are a bit smarter than operand_equal_p in that they apply some
5400 identifies on SSA_NAMEs. */
5401 if (COMPARISON_CLASS_P (op2)
5402 && same_bool_comparison_p (op1, TREE_CODE (op2),
5403 TREE_OPERAND (op2, 0),
5404 TREE_OPERAND (op2, 1)))
5405 return true;
5406 if (COMPARISON_CLASS_P (op1)
5407 && same_bool_comparison_p (op2, TREE_CODE (op1),
5408 TREE_OPERAND (op1, 0),
5409 TREE_OPERAND (op1, 1)))
5410 return true;
5411
5412 /* Default case. */
5413 return false;
5414 }
5415
5416 /* Forward declarations for some mutually recursive functions. */
5417
5418 static tree
5419 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5420 enum tree_code code2, tree op2a, tree op2b);
5421 static tree
5422 and_var_with_comparison (tree type, tree var, bool invert,
5423 enum tree_code code2, tree op2a, tree op2b);
5424 static tree
5425 and_var_with_comparison_1 (tree type, gimple *stmt,
5426 enum tree_code code2, tree op2a, tree op2b);
5427 static tree
5428 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
5429 enum tree_code code2, tree op2a, tree op2b);
5430 static tree
5431 or_var_with_comparison (tree, tree var, bool invert,
5432 enum tree_code code2, tree op2a, tree op2b);
5433 static tree
5434 or_var_with_comparison_1 (tree, gimple *stmt,
5435 enum tree_code code2, tree op2a, tree op2b);
5436
5437 /* Helper function for and_comparisons_1: try to simplify the AND of the
5438 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5439 If INVERT is true, invert the value of the VAR before doing the AND.
5440 Return NULL_EXPR if we can't simplify this to a single expression. */
5441
5442 static tree
5443 and_var_with_comparison (tree type, tree var, bool invert,
5444 enum tree_code code2, tree op2a, tree op2b)
5445 {
5446 tree t;
5447 gimple *stmt = SSA_NAME_DEF_STMT (var);
5448
5449 /* We can only deal with variables whose definitions are assignments. */
5450 if (!is_gimple_assign (stmt))
5451 return NULL_TREE;
5452
5453 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5454 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5455 Then we only have to consider the simpler non-inverted cases. */
5456 if (invert)
5457 t = or_var_with_comparison_1 (type, stmt,
5458 invert_tree_comparison (code2, false),
5459 op2a, op2b);
5460 else
5461 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5462 return canonicalize_bool (t, invert);
5463 }
5464
5465 /* Try to simplify the AND of the ssa variable defined by the assignment
5466 STMT with the comparison specified by (OP2A CODE2 OP2B).
5467 Return NULL_EXPR if we can't simplify this to a single expression. */
5468
5469 static tree
5470 and_var_with_comparison_1 (tree type, gimple *stmt,
5471 enum tree_code code2, tree op2a, tree op2b)
5472 {
5473 tree var = gimple_assign_lhs (stmt);
5474 tree true_test_var = NULL_TREE;
5475 tree false_test_var = NULL_TREE;
5476 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5477
5478 /* Check for identities like (var AND (var == 0)) => false. */
5479 if (TREE_CODE (op2a) == SSA_NAME
5480 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5481 {
5482 if ((code2 == NE_EXPR && integer_zerop (op2b))
5483 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5484 {
5485 true_test_var = op2a;
5486 if (var == true_test_var)
5487 return var;
5488 }
5489 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5490 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5491 {
5492 false_test_var = op2a;
5493 if (var == false_test_var)
5494 return boolean_false_node;
5495 }
5496 }
5497
5498 /* If the definition is a comparison, recurse on it. */
5499 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5500 {
5501 tree t = and_comparisons_1 (type, innercode,
5502 gimple_assign_rhs1 (stmt),
5503 gimple_assign_rhs2 (stmt),
5504 code2,
5505 op2a,
5506 op2b);
5507 if (t)
5508 return t;
5509 }
5510
5511 /* If the definition is an AND or OR expression, we may be able to
5512 simplify by reassociating. */
5513 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5514 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5515 {
5516 tree inner1 = gimple_assign_rhs1 (stmt);
5517 tree inner2 = gimple_assign_rhs2 (stmt);
5518 gimple *s;
5519 tree t;
5520 tree partial = NULL_TREE;
5521 bool is_and = (innercode == BIT_AND_EXPR);
5522
5523 /* Check for boolean identities that don't require recursive examination
5524 of inner1/inner2:
5525 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5526 inner1 AND (inner1 OR inner2) => inner1
5527 !inner1 AND (inner1 AND inner2) => false
5528 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5529 Likewise for similar cases involving inner2. */
5530 if (inner1 == true_test_var)
5531 return (is_and ? var : inner1);
5532 else if (inner2 == true_test_var)
5533 return (is_and ? var : inner2);
5534 else if (inner1 == false_test_var)
5535 return (is_and
5536 ? boolean_false_node
5537 : and_var_with_comparison (type, inner2, false, code2, op2a,
5538 op2b));
5539 else if (inner2 == false_test_var)
5540 return (is_and
5541 ? boolean_false_node
5542 : and_var_with_comparison (type, inner1, false, code2, op2a,
5543 op2b));
5544
5545 /* Next, redistribute/reassociate the AND across the inner tests.
5546 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5547 if (TREE_CODE (inner1) == SSA_NAME
5548 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5549 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5550 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5551 gimple_assign_rhs1 (s),
5552 gimple_assign_rhs2 (s),
5553 code2, op2a, op2b)))
5554 {
5555 /* Handle the AND case, where we are reassociating:
5556 (inner1 AND inner2) AND (op2a code2 op2b)
5557 => (t AND inner2)
5558 If the partial result t is a constant, we win. Otherwise
5559 continue on to try reassociating with the other inner test. */
5560 if (is_and)
5561 {
5562 if (integer_onep (t))
5563 return inner2;
5564 else if (integer_zerop (t))
5565 return boolean_false_node;
5566 }
5567
5568 /* Handle the OR case, where we are redistributing:
5569 (inner1 OR inner2) AND (op2a code2 op2b)
5570 => (t OR (inner2 AND (op2a code2 op2b))) */
5571 else if (integer_onep (t))
5572 return boolean_true_node;
5573
5574 /* Save partial result for later. */
5575 partial = t;
5576 }
5577
5578 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5579 if (TREE_CODE (inner2) == SSA_NAME
5580 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5581 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5582 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5583 gimple_assign_rhs1 (s),
5584 gimple_assign_rhs2 (s),
5585 code2, op2a, op2b)))
5586 {
5587 /* Handle the AND case, where we are reassociating:
5588 (inner1 AND inner2) AND (op2a code2 op2b)
5589 => (inner1 AND t) */
5590 if (is_and)
5591 {
5592 if (integer_onep (t))
5593 return inner1;
5594 else if (integer_zerop (t))
5595 return boolean_false_node;
5596 /* If both are the same, we can apply the identity
5597 (x AND x) == x. */
5598 else if (partial && same_bool_result_p (t, partial))
5599 return t;
5600 }
5601
5602 /* Handle the OR case. where we are redistributing:
5603 (inner1 OR inner2) AND (op2a code2 op2b)
5604 => (t OR (inner1 AND (op2a code2 op2b)))
5605 => (t OR partial) */
5606 else
5607 {
5608 if (integer_onep (t))
5609 return boolean_true_node;
5610 else if (partial)
5611 {
5612 /* We already got a simplification for the other
5613 operand to the redistributed OR expression. The
5614 interesting case is when at least one is false.
5615 Or, if both are the same, we can apply the identity
5616 (x OR x) == x. */
5617 if (integer_zerop (partial))
5618 return t;
5619 else if (integer_zerop (t))
5620 return partial;
5621 else if (same_bool_result_p (t, partial))
5622 return t;
5623 }
5624 }
5625 }
5626 }
5627 return NULL_TREE;
5628 }
5629
5630 /* Try to simplify the AND of two comparisons defined by
5631 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5632 If this can be done without constructing an intermediate value,
5633 return the resulting tree; otherwise NULL_TREE is returned.
5634 This function is deliberately asymmetric as it recurses on SSA_DEFs
5635 in the first comparison but not the second. */
5636
5637 static tree
5638 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5639 enum tree_code code2, tree op2a, tree op2b)
5640 {
5641 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5642
5643 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5644 if (operand_equal_p (op1a, op2a, 0)
5645 && operand_equal_p (op1b, op2b, 0))
5646 {
5647 /* Result will be either NULL_TREE, or a combined comparison. */
5648 tree t = combine_comparisons (UNKNOWN_LOCATION,
5649 TRUTH_ANDIF_EXPR, code1, code2,
5650 truth_type, op1a, op1b);
5651 if (t)
5652 return t;
5653 }
5654
5655 /* Likewise the swapped case of the above. */
5656 if (operand_equal_p (op1a, op2b, 0)
5657 && operand_equal_p (op1b, op2a, 0))
5658 {
5659 /* Result will be either NULL_TREE, or a combined comparison. */
5660 tree t = combine_comparisons (UNKNOWN_LOCATION,
5661 TRUTH_ANDIF_EXPR, code1,
5662 swap_tree_comparison (code2),
5663 truth_type, op1a, op1b);
5664 if (t)
5665 return t;
5666 }
5667
5668 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5669 NAME's definition is a truth value. See if there are any simplifications
5670 that can be done against the NAME's definition. */
5671 if (TREE_CODE (op1a) == SSA_NAME
5672 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5673 && (integer_zerop (op1b) || integer_onep (op1b)))
5674 {
5675 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5676 || (code1 == NE_EXPR && integer_onep (op1b)));
5677 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5678 switch (gimple_code (stmt))
5679 {
5680 case GIMPLE_ASSIGN:
5681 /* Try to simplify by copy-propagating the definition. */
5682 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5683 op2b);
5684
5685 case GIMPLE_PHI:
5686 /* If every argument to the PHI produces the same result when
5687 ANDed with the second comparison, we win.
5688 Do not do this unless the type is bool since we need a bool
5689 result here anyway. */
5690 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5691 {
5692 tree result = NULL_TREE;
5693 unsigned i;
5694 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5695 {
5696 tree arg = gimple_phi_arg_def (stmt, i);
5697
5698 /* If this PHI has itself as an argument, ignore it.
5699 If all the other args produce the same result,
5700 we're still OK. */
5701 if (arg == gimple_phi_result (stmt))
5702 continue;
5703 else if (TREE_CODE (arg) == INTEGER_CST)
5704 {
5705 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5706 {
5707 if (!result)
5708 result = boolean_false_node;
5709 else if (!integer_zerop (result))
5710 return NULL_TREE;
5711 }
5712 else if (!result)
5713 result = fold_build2 (code2, boolean_type_node,
5714 op2a, op2b);
5715 else if (!same_bool_comparison_p (result,
5716 code2, op2a, op2b))
5717 return NULL_TREE;
5718 }
5719 else if (TREE_CODE (arg) == SSA_NAME
5720 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5721 {
5722 tree temp;
5723 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5724 /* In simple cases we can look through PHI nodes,
5725 but we have to be careful with loops.
5726 See PR49073. */
5727 if (! dom_info_available_p (CDI_DOMINATORS)
5728 || gimple_bb (def_stmt) == gimple_bb (stmt)
5729 || dominated_by_p (CDI_DOMINATORS,
5730 gimple_bb (def_stmt),
5731 gimple_bb (stmt)))
5732 return NULL_TREE;
5733 temp = and_var_with_comparison (type, arg, invert, code2,
5734 op2a, op2b);
5735 if (!temp)
5736 return NULL_TREE;
5737 else if (!result)
5738 result = temp;
5739 else if (!same_bool_result_p (result, temp))
5740 return NULL_TREE;
5741 }
5742 else
5743 return NULL_TREE;
5744 }
5745 return result;
5746 }
5747
5748 default:
5749 break;
5750 }
5751 }
5752 return NULL_TREE;
5753 }
5754
5755 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5756 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5757 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5758 simplify this to a single expression. As we are going to lower the cost
5759 of building SSA names / gimple stmts significantly, we need to allocate
5760 them ont the stack. This will cause the code to be a bit ugly. */
5761
5762 static tree
5763 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5764 enum tree_code code1,
5765 tree op1a, tree op1b,
5766 enum tree_code code2, tree op2a,
5767 tree op2b)
5768 {
5769 /* Allocate gimple stmt1 on the stack. */
5770 gassign *stmt1
5771 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5772 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5773 gimple_assign_set_rhs_code (stmt1, code1);
5774 gimple_assign_set_rhs1 (stmt1, op1a);
5775 gimple_assign_set_rhs2 (stmt1, op1b);
5776
5777 /* Allocate gimple stmt2 on the stack. */
5778 gassign *stmt2
5779 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5780 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5781 gimple_assign_set_rhs_code (stmt2, code2);
5782 gimple_assign_set_rhs1 (stmt2, op2a);
5783 gimple_assign_set_rhs2 (stmt2, op2b);
5784
5785 /* Allocate SSA names(lhs1) on the stack. */
5786 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5787 memset (lhs1, 0, sizeof (tree_ssa_name));
5788 TREE_SET_CODE (lhs1, SSA_NAME);
5789 TREE_TYPE (lhs1) = type;
5790 init_ssa_name_imm_use (lhs1);
5791
5792 /* Allocate SSA names(lhs2) on the stack. */
5793 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5794 memset (lhs2, 0, sizeof (tree_ssa_name));
5795 TREE_SET_CODE (lhs2, SSA_NAME);
5796 TREE_TYPE (lhs2) = type;
5797 init_ssa_name_imm_use (lhs2);
5798
5799 gimple_assign_set_lhs (stmt1, lhs1);
5800 gimple_assign_set_lhs (stmt2, lhs2);
5801
5802 gimple_match_op op (gimple_match_cond::UNCOND, code,
5803 type, gimple_assign_lhs (stmt1),
5804 gimple_assign_lhs (stmt2));
5805 if (op.resimplify (NULL, follow_all_ssa_edges))
5806 {
5807 if (gimple_simplified_result_is_gimple_val (&op))
5808 {
5809 tree res = op.ops[0];
5810 if (res == lhs1)
5811 return build2 (code1, type, op1a, op1b);
5812 else if (res == lhs2)
5813 return build2 (code2, type, op2a, op2b);
5814 else
5815 return res;
5816 }
5817 else if (op.code.is_tree_code ()
5818 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5819 {
5820 tree op0 = op.ops[0];
5821 tree op1 = op.ops[1];
5822 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5823 return NULL_TREE; /* not simple */
5824
5825 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5826 }
5827 }
5828
5829 return NULL_TREE;
5830 }
5831
5832 /* Try to simplify the AND of two comparisons, specified by
5833 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5834 If this can be simplified to a single expression (without requiring
5835 introducing more SSA variables to hold intermediate values),
5836 return the resulting tree. Otherwise return NULL_TREE.
5837 If the result expression is non-null, it has boolean type. */
5838
5839 tree
5840 maybe_fold_and_comparisons (tree type,
5841 enum tree_code code1, tree op1a, tree op1b,
5842 enum tree_code code2, tree op2a, tree op2b)
5843 {
5844 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
5845 return t;
5846
5847 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5848 return t;
5849
5850 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5851 op1a, op1b, code2, op2a,
5852 op2b))
5853 return t;
5854
5855 return NULL_TREE;
5856 }
5857
5858 /* Helper function for or_comparisons_1: try to simplify the OR of the
5859 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5860 If INVERT is true, invert the value of VAR before doing the OR.
5861 Return NULL_EXPR if we can't simplify this to a single expression. */
5862
5863 static tree
5864 or_var_with_comparison (tree type, tree var, bool invert,
5865 enum tree_code code2, tree op2a, tree op2b)
5866 {
5867 tree t;
5868 gimple *stmt = SSA_NAME_DEF_STMT (var);
5869
5870 /* We can only deal with variables whose definitions are assignments. */
5871 if (!is_gimple_assign (stmt))
5872 return NULL_TREE;
5873
5874 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5875 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5876 Then we only have to consider the simpler non-inverted cases. */
5877 if (invert)
5878 t = and_var_with_comparison_1 (type, stmt,
5879 invert_tree_comparison (code2, false),
5880 op2a, op2b);
5881 else
5882 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5883 return canonicalize_bool (t, invert);
5884 }
5885
5886 /* Try to simplify the OR of the ssa variable defined by the assignment
5887 STMT with the comparison specified by (OP2A CODE2 OP2B).
5888 Return NULL_EXPR if we can't simplify this to a single expression. */
5889
5890 static tree
5891 or_var_with_comparison_1 (tree type, gimple *stmt,
5892 enum tree_code code2, tree op2a, tree op2b)
5893 {
5894 tree var = gimple_assign_lhs (stmt);
5895 tree true_test_var = NULL_TREE;
5896 tree false_test_var = NULL_TREE;
5897 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5898
5899 /* Check for identities like (var OR (var != 0)) => true . */
5900 if (TREE_CODE (op2a) == SSA_NAME
5901 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5902 {
5903 if ((code2 == NE_EXPR && integer_zerop (op2b))
5904 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5905 {
5906 true_test_var = op2a;
5907 if (var == true_test_var)
5908 return var;
5909 }
5910 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5911 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5912 {
5913 false_test_var = op2a;
5914 if (var == false_test_var)
5915 return boolean_true_node;
5916 }
5917 }
5918
5919 /* If the definition is a comparison, recurse on it. */
5920 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5921 {
5922 tree t = or_comparisons_1 (type, innercode,
5923 gimple_assign_rhs1 (stmt),
5924 gimple_assign_rhs2 (stmt),
5925 code2,
5926 op2a,
5927 op2b);
5928 if (t)
5929 return t;
5930 }
5931
5932 /* If the definition is an AND or OR expression, we may be able to
5933 simplify by reassociating. */
5934 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5935 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5936 {
5937 tree inner1 = gimple_assign_rhs1 (stmt);
5938 tree inner2 = gimple_assign_rhs2 (stmt);
5939 gimple *s;
5940 tree t;
5941 tree partial = NULL_TREE;
5942 bool is_or = (innercode == BIT_IOR_EXPR);
5943
5944 /* Check for boolean identities that don't require recursive examination
5945 of inner1/inner2:
5946 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5947 inner1 OR (inner1 AND inner2) => inner1
5948 !inner1 OR (inner1 OR inner2) => true
5949 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5950 */
5951 if (inner1 == true_test_var)
5952 return (is_or ? var : inner1);
5953 else if (inner2 == true_test_var)
5954 return (is_or ? var : inner2);
5955 else if (inner1 == false_test_var)
5956 return (is_or
5957 ? boolean_true_node
5958 : or_var_with_comparison (type, inner2, false, code2, op2a,
5959 op2b));
5960 else if (inner2 == false_test_var)
5961 return (is_or
5962 ? boolean_true_node
5963 : or_var_with_comparison (type, inner1, false, code2, op2a,
5964 op2b));
5965
5966 /* Next, redistribute/reassociate the OR across the inner tests.
5967 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5968 if (TREE_CODE (inner1) == SSA_NAME
5969 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5970 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5971 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
5972 gimple_assign_rhs1 (s),
5973 gimple_assign_rhs2 (s),
5974 code2, op2a, op2b)))
5975 {
5976 /* Handle the OR case, where we are reassociating:
5977 (inner1 OR inner2) OR (op2a code2 op2b)
5978 => (t OR inner2)
5979 If the partial result t is a constant, we win. Otherwise
5980 continue on to try reassociating with the other inner test. */
5981 if (is_or)
5982 {
5983 if (integer_onep (t))
5984 return boolean_true_node;
5985 else if (integer_zerop (t))
5986 return inner2;
5987 }
5988
5989 /* Handle the AND case, where we are redistributing:
5990 (inner1 AND inner2) OR (op2a code2 op2b)
5991 => (t AND (inner2 OR (op2a code op2b))) */
5992 else if (integer_zerop (t))
5993 return boolean_false_node;
5994
5995 /* Save partial result for later. */
5996 partial = t;
5997 }
5998
5999 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6000 if (TREE_CODE (inner2) == SSA_NAME
6001 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6002 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6003 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6004 gimple_assign_rhs1 (s),
6005 gimple_assign_rhs2 (s),
6006 code2, op2a, op2b)))
6007 {
6008 /* Handle the OR case, where we are reassociating:
6009 (inner1 OR inner2) OR (op2a code2 op2b)
6010 => (inner1 OR t)
6011 => (t OR partial) */
6012 if (is_or)
6013 {
6014 if (integer_zerop (t))
6015 return inner1;
6016 else if (integer_onep (t))
6017 return boolean_true_node;
6018 /* If both are the same, we can apply the identity
6019 (x OR x) == x. */
6020 else if (partial && same_bool_result_p (t, partial))
6021 return t;
6022 }
6023
6024 /* Handle the AND case, where we are redistributing:
6025 (inner1 AND inner2) OR (op2a code2 op2b)
6026 => (t AND (inner1 OR (op2a code2 op2b)))
6027 => (t AND partial) */
6028 else
6029 {
6030 if (integer_zerop (t))
6031 return boolean_false_node;
6032 else if (partial)
6033 {
6034 /* We already got a simplification for the other
6035 operand to the redistributed AND expression. The
6036 interesting case is when at least one is true.
6037 Or, if both are the same, we can apply the identity
6038 (x AND x) == x. */
6039 if (integer_onep (partial))
6040 return t;
6041 else if (integer_onep (t))
6042 return partial;
6043 else if (same_bool_result_p (t, partial))
6044 return t;
6045 }
6046 }
6047 }
6048 }
6049 return NULL_TREE;
6050 }
6051
6052 /* Try to simplify the OR of two comparisons defined by
6053 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6054 If this can be done without constructing an intermediate value,
6055 return the resulting tree; otherwise NULL_TREE is returned.
6056 This function is deliberately asymmetric as it recurses on SSA_DEFs
6057 in the first comparison but not the second. */
6058
6059 static tree
6060 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6061 enum tree_code code2, tree op2a, tree op2b)
6062 {
6063 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6064
6065 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6066 if (operand_equal_p (op1a, op2a, 0)
6067 && operand_equal_p (op1b, op2b, 0))
6068 {
6069 /* Result will be either NULL_TREE, or a combined comparison. */
6070 tree t = combine_comparisons (UNKNOWN_LOCATION,
6071 TRUTH_ORIF_EXPR, code1, code2,
6072 truth_type, op1a, op1b);
6073 if (t)
6074 return t;
6075 }
6076
6077 /* Likewise the swapped case of the above. */
6078 if (operand_equal_p (op1a, op2b, 0)
6079 && operand_equal_p (op1b, op2a, 0))
6080 {
6081 /* Result will be either NULL_TREE, or a combined comparison. */
6082 tree t = combine_comparisons (UNKNOWN_LOCATION,
6083 TRUTH_ORIF_EXPR, code1,
6084 swap_tree_comparison (code2),
6085 truth_type, op1a, op1b);
6086 if (t)
6087 return t;
6088 }
6089
6090 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6091 NAME's definition is a truth value. See if there are any simplifications
6092 that can be done against the NAME's definition. */
6093 if (TREE_CODE (op1a) == SSA_NAME
6094 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6095 && (integer_zerop (op1b) || integer_onep (op1b)))
6096 {
6097 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6098 || (code1 == NE_EXPR && integer_onep (op1b)));
6099 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6100 switch (gimple_code (stmt))
6101 {
6102 case GIMPLE_ASSIGN:
6103 /* Try to simplify by copy-propagating the definition. */
6104 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6105 op2b);
6106
6107 case GIMPLE_PHI:
6108 /* If every argument to the PHI produces the same result when
6109 ORed with the second comparison, we win.
6110 Do not do this unless the type is bool since we need a bool
6111 result here anyway. */
6112 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6113 {
6114 tree result = NULL_TREE;
6115 unsigned i;
6116 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6117 {
6118 tree arg = gimple_phi_arg_def (stmt, i);
6119
6120 /* If this PHI has itself as an argument, ignore it.
6121 If all the other args produce the same result,
6122 we're still OK. */
6123 if (arg == gimple_phi_result (stmt))
6124 continue;
6125 else if (TREE_CODE (arg) == INTEGER_CST)
6126 {
6127 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6128 {
6129 if (!result)
6130 result = boolean_true_node;
6131 else if (!integer_onep (result))
6132 return NULL_TREE;
6133 }
6134 else if (!result)
6135 result = fold_build2 (code2, boolean_type_node,
6136 op2a, op2b);
6137 else if (!same_bool_comparison_p (result,
6138 code2, op2a, op2b))
6139 return NULL_TREE;
6140 }
6141 else if (TREE_CODE (arg) == SSA_NAME
6142 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6143 {
6144 tree temp;
6145 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6146 /* In simple cases we can look through PHI nodes,
6147 but we have to be careful with loops.
6148 See PR49073. */
6149 if (! dom_info_available_p (CDI_DOMINATORS)
6150 || gimple_bb (def_stmt) == gimple_bb (stmt)
6151 || dominated_by_p (CDI_DOMINATORS,
6152 gimple_bb (def_stmt),
6153 gimple_bb (stmt)))
6154 return NULL_TREE;
6155 temp = or_var_with_comparison (type, arg, invert, code2,
6156 op2a, op2b);
6157 if (!temp)
6158 return NULL_TREE;
6159 else if (!result)
6160 result = temp;
6161 else if (!same_bool_result_p (result, temp))
6162 return NULL_TREE;
6163 }
6164 else
6165 return NULL_TREE;
6166 }
6167 return result;
6168 }
6169
6170 default:
6171 break;
6172 }
6173 }
6174 return NULL_TREE;
6175 }
6176
6177 /* Try to simplify the OR of two comparisons, specified by
6178 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6179 If this can be simplified to a single expression (without requiring
6180 introducing more SSA variables to hold intermediate values),
6181 return the resulting tree. Otherwise return NULL_TREE.
6182 If the result expression is non-null, it has boolean type. */
6183
6184 tree
6185 maybe_fold_or_comparisons (tree type,
6186 enum tree_code code1, tree op1a, tree op1b,
6187 enum tree_code code2, tree op2a, tree op2b)
6188 {
6189 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6190 return t;
6191
6192 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6193 return t;
6194
6195 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6196 op1a, op1b, code2, op2a,
6197 op2b))
6198 return t;
6199
6200 return NULL_TREE;
6201 }
6202
6203 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6204
6205 Either NULL_TREE, a simplified but non-constant or a constant
6206 is returned.
6207
6208 ??? This should go into a gimple-fold-inline.h file to be eventually
6209 privatized with the single valueize function used in the various TUs
6210 to avoid the indirect function call overhead. */
6211
6212 tree
6213 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6214 tree (*gvalueize) (tree))
6215 {
6216 gimple_match_op res_op;
6217 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6218 edges if there are intermediate VARYING defs. For this reason
6219 do not follow SSA edges here even though SCCVN can technically
6220 just deal fine with that. */
6221 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6222 {
6223 tree res = NULL_TREE;
6224 if (gimple_simplified_result_is_gimple_val (&res_op))
6225 res = res_op.ops[0];
6226 else if (mprts_hook)
6227 res = mprts_hook (&res_op);
6228 if (res)
6229 {
6230 if (dump_file && dump_flags & TDF_DETAILS)
6231 {
6232 fprintf (dump_file, "Match-and-simplified ");
6233 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6234 fprintf (dump_file, " to ");
6235 print_generic_expr (dump_file, res);
6236 fprintf (dump_file, "\n");
6237 }
6238 return res;
6239 }
6240 }
6241
6242 location_t loc = gimple_location (stmt);
6243 switch (gimple_code (stmt))
6244 {
6245 case GIMPLE_ASSIGN:
6246 {
6247 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6248
6249 switch (get_gimple_rhs_class (subcode))
6250 {
6251 case GIMPLE_SINGLE_RHS:
6252 {
6253 tree rhs = gimple_assign_rhs1 (stmt);
6254 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6255
6256 if (TREE_CODE (rhs) == SSA_NAME)
6257 {
6258 /* If the RHS is an SSA_NAME, return its known constant value,
6259 if any. */
6260 return (*valueize) (rhs);
6261 }
6262 /* Handle propagating invariant addresses into address
6263 operations. */
6264 else if (TREE_CODE (rhs) == ADDR_EXPR
6265 && !is_gimple_min_invariant (rhs))
6266 {
6267 poly_int64 offset = 0;
6268 tree base;
6269 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6270 &offset,
6271 valueize);
6272 if (base
6273 && (CONSTANT_CLASS_P (base)
6274 || decl_address_invariant_p (base)))
6275 return build_invariant_address (TREE_TYPE (rhs),
6276 base, offset);
6277 }
6278 else if (TREE_CODE (rhs) == CONSTRUCTOR
6279 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6280 && known_eq (CONSTRUCTOR_NELTS (rhs),
6281 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6282 {
6283 unsigned i, nelts;
6284 tree val;
6285
6286 nelts = CONSTRUCTOR_NELTS (rhs);
6287 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6288 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6289 {
6290 val = (*valueize) (val);
6291 if (TREE_CODE (val) == INTEGER_CST
6292 || TREE_CODE (val) == REAL_CST
6293 || TREE_CODE (val) == FIXED_CST)
6294 vec.quick_push (val);
6295 else
6296 return NULL_TREE;
6297 }
6298
6299 return vec.build ();
6300 }
6301 if (subcode == OBJ_TYPE_REF)
6302 {
6303 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6304 /* If callee is constant, we can fold away the wrapper. */
6305 if (is_gimple_min_invariant (val))
6306 return val;
6307 }
6308
6309 if (kind == tcc_reference)
6310 {
6311 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6312 || TREE_CODE (rhs) == REALPART_EXPR
6313 || TREE_CODE (rhs) == IMAGPART_EXPR)
6314 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6315 {
6316 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6317 return fold_unary_loc (EXPR_LOCATION (rhs),
6318 TREE_CODE (rhs),
6319 TREE_TYPE (rhs), val);
6320 }
6321 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6322 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6323 {
6324 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6325 return fold_ternary_loc (EXPR_LOCATION (rhs),
6326 TREE_CODE (rhs),
6327 TREE_TYPE (rhs), val,
6328 TREE_OPERAND (rhs, 1),
6329 TREE_OPERAND (rhs, 2));
6330 }
6331 else if (TREE_CODE (rhs) == MEM_REF
6332 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6333 {
6334 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6335 if (TREE_CODE (val) == ADDR_EXPR
6336 && is_gimple_min_invariant (val))
6337 {
6338 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6339 unshare_expr (val),
6340 TREE_OPERAND (rhs, 1));
6341 if (tem)
6342 rhs = tem;
6343 }
6344 }
6345 return fold_const_aggregate_ref_1 (rhs, valueize);
6346 }
6347 else if (kind == tcc_declaration)
6348 return get_symbol_constant_value (rhs);
6349 return rhs;
6350 }
6351
6352 case GIMPLE_UNARY_RHS:
6353 return NULL_TREE;
6354
6355 case GIMPLE_BINARY_RHS:
6356 /* Translate &x + CST into an invariant form suitable for
6357 further propagation. */
6358 if (subcode == POINTER_PLUS_EXPR)
6359 {
6360 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6361 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6362 if (TREE_CODE (op0) == ADDR_EXPR
6363 && TREE_CODE (op1) == INTEGER_CST)
6364 {
6365 tree off = fold_convert (ptr_type_node, op1);
6366 return build_fold_addr_expr_loc
6367 (loc,
6368 fold_build2 (MEM_REF,
6369 TREE_TYPE (TREE_TYPE (op0)),
6370 unshare_expr (op0), off));
6371 }
6372 }
6373 /* Canonicalize bool != 0 and bool == 0 appearing after
6374 valueization. While gimple_simplify handles this
6375 it can get confused by the ~X == 1 -> X == 0 transform
6376 which we cant reduce to a SSA name or a constant
6377 (and we have no way to tell gimple_simplify to not
6378 consider those transforms in the first place). */
6379 else if (subcode == EQ_EXPR
6380 || subcode == NE_EXPR)
6381 {
6382 tree lhs = gimple_assign_lhs (stmt);
6383 tree op0 = gimple_assign_rhs1 (stmt);
6384 if (useless_type_conversion_p (TREE_TYPE (lhs),
6385 TREE_TYPE (op0)))
6386 {
6387 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6388 op0 = (*valueize) (op0);
6389 if (TREE_CODE (op0) == INTEGER_CST)
6390 std::swap (op0, op1);
6391 if (TREE_CODE (op1) == INTEGER_CST
6392 && ((subcode == NE_EXPR && integer_zerop (op1))
6393 || (subcode == EQ_EXPR && integer_onep (op1))))
6394 return op0;
6395 }
6396 }
6397 return NULL_TREE;
6398
6399 case GIMPLE_TERNARY_RHS:
6400 {
6401 /* Handle ternary operators that can appear in GIMPLE form. */
6402 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6403 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6404 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6405 return fold_ternary_loc (loc, subcode,
6406 gimple_expr_type (stmt), op0, op1, op2);
6407 }
6408
6409 default:
6410 gcc_unreachable ();
6411 }
6412 }
6413
6414 case GIMPLE_CALL:
6415 {
6416 tree fn;
6417 gcall *call_stmt = as_a <gcall *> (stmt);
6418
6419 if (gimple_call_internal_p (stmt))
6420 {
6421 enum tree_code subcode = ERROR_MARK;
6422 switch (gimple_call_internal_fn (stmt))
6423 {
6424 case IFN_UBSAN_CHECK_ADD:
6425 subcode = PLUS_EXPR;
6426 break;
6427 case IFN_UBSAN_CHECK_SUB:
6428 subcode = MINUS_EXPR;
6429 break;
6430 case IFN_UBSAN_CHECK_MUL:
6431 subcode = MULT_EXPR;
6432 break;
6433 case IFN_BUILTIN_EXPECT:
6434 {
6435 tree arg0 = gimple_call_arg (stmt, 0);
6436 tree op0 = (*valueize) (arg0);
6437 if (TREE_CODE (op0) == INTEGER_CST)
6438 return op0;
6439 return NULL_TREE;
6440 }
6441 default:
6442 return NULL_TREE;
6443 }
6444 tree arg0 = gimple_call_arg (stmt, 0);
6445 tree arg1 = gimple_call_arg (stmt, 1);
6446 tree op0 = (*valueize) (arg0);
6447 tree op1 = (*valueize) (arg1);
6448
6449 if (TREE_CODE (op0) != INTEGER_CST
6450 || TREE_CODE (op1) != INTEGER_CST)
6451 {
6452 switch (subcode)
6453 {
6454 case MULT_EXPR:
6455 /* x * 0 = 0 * x = 0 without overflow. */
6456 if (integer_zerop (op0) || integer_zerop (op1))
6457 return build_zero_cst (TREE_TYPE (arg0));
6458 break;
6459 case MINUS_EXPR:
6460 /* y - y = 0 without overflow. */
6461 if (operand_equal_p (op0, op1, 0))
6462 return build_zero_cst (TREE_TYPE (arg0));
6463 break;
6464 default:
6465 break;
6466 }
6467 }
6468 tree res
6469 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6470 if (res
6471 && TREE_CODE (res) == INTEGER_CST
6472 && !TREE_OVERFLOW (res))
6473 return res;
6474 return NULL_TREE;
6475 }
6476
6477 fn = (*valueize) (gimple_call_fn (stmt));
6478 if (TREE_CODE (fn) == ADDR_EXPR
6479 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6480 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6481 && gimple_builtin_call_types_compatible_p (stmt,
6482 TREE_OPERAND (fn, 0)))
6483 {
6484 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6485 tree retval;
6486 unsigned i;
6487 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6488 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6489 retval = fold_builtin_call_array (loc,
6490 gimple_call_return_type (call_stmt),
6491 fn, gimple_call_num_args (stmt), args);
6492 if (retval)
6493 {
6494 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6495 STRIP_NOPS (retval);
6496 retval = fold_convert (gimple_call_return_type (call_stmt),
6497 retval);
6498 }
6499 return retval;
6500 }
6501 return NULL_TREE;
6502 }
6503
6504 default:
6505 return NULL_TREE;
6506 }
6507 }
6508
6509 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6510 Returns NULL_TREE if folding to a constant is not possible, otherwise
6511 returns a constant according to is_gimple_min_invariant. */
6512
6513 tree
6514 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6515 {
6516 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6517 if (res && is_gimple_min_invariant (res))
6518 return res;
6519 return NULL_TREE;
6520 }
6521
6522
6523 /* The following set of functions are supposed to fold references using
6524 their constant initializers. */
6525
6526 /* See if we can find constructor defining value of BASE.
6527 When we know the consructor with constant offset (such as
6528 base is array[40] and we do know constructor of array), then
6529 BIT_OFFSET is adjusted accordingly.
6530
6531 As a special case, return error_mark_node when constructor
6532 is not explicitly available, but it is known to be zero
6533 such as 'static const int a;'. */
6534 static tree
6535 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6536 tree (*valueize)(tree))
6537 {
6538 poly_int64 bit_offset2, size, max_size;
6539 bool reverse;
6540
6541 if (TREE_CODE (base) == MEM_REF)
6542 {
6543 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6544 if (!boff.to_shwi (bit_offset))
6545 return NULL_TREE;
6546
6547 if (valueize
6548 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6549 base = valueize (TREE_OPERAND (base, 0));
6550 if (!base || TREE_CODE (base) != ADDR_EXPR)
6551 return NULL_TREE;
6552 base = TREE_OPERAND (base, 0);
6553 }
6554 else if (valueize
6555 && TREE_CODE (base) == SSA_NAME)
6556 base = valueize (base);
6557
6558 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6559 DECL_INITIAL. If BASE is a nested reference into another
6560 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6561 the inner reference. */
6562 switch (TREE_CODE (base))
6563 {
6564 case VAR_DECL:
6565 case CONST_DECL:
6566 {
6567 tree init = ctor_for_folding (base);
6568
6569 /* Our semantic is exact opposite of ctor_for_folding;
6570 NULL means unknown, while error_mark_node is 0. */
6571 if (init == error_mark_node)
6572 return NULL_TREE;
6573 if (!init)
6574 return error_mark_node;
6575 return init;
6576 }
6577
6578 case VIEW_CONVERT_EXPR:
6579 return get_base_constructor (TREE_OPERAND (base, 0),
6580 bit_offset, valueize);
6581
6582 case ARRAY_REF:
6583 case COMPONENT_REF:
6584 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6585 &reverse);
6586 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6587 return NULL_TREE;
6588 *bit_offset += bit_offset2;
6589 return get_base_constructor (base, bit_offset, valueize);
6590
6591 case CONSTRUCTOR:
6592 return base;
6593
6594 default:
6595 if (CONSTANT_CLASS_P (base))
6596 return base;
6597
6598 return NULL_TREE;
6599 }
6600 }
6601
6602 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6603 to the memory at bit OFFSET. When non-null, TYPE is the expected
6604 type of the reference; otherwise the type of the referenced element
6605 is used instead. When SIZE is zero, attempt to fold a reference to
6606 the entire element which OFFSET refers to. Increment *SUBOFF by
6607 the bit offset of the accessed element. */
6608
6609 static tree
6610 fold_array_ctor_reference (tree type, tree ctor,
6611 unsigned HOST_WIDE_INT offset,
6612 unsigned HOST_WIDE_INT size,
6613 tree from_decl,
6614 unsigned HOST_WIDE_INT *suboff)
6615 {
6616 offset_int low_bound;
6617 offset_int elt_size;
6618 offset_int access_index;
6619 tree domain_type = NULL_TREE;
6620 HOST_WIDE_INT inner_offset;
6621
6622 /* Compute low bound and elt size. */
6623 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6624 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6625 if (domain_type && TYPE_MIN_VALUE (domain_type))
6626 {
6627 /* Static constructors for variably sized objects make no sense. */
6628 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6629 return NULL_TREE;
6630 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6631 }
6632 else
6633 low_bound = 0;
6634 /* Static constructors for variably sized objects make no sense. */
6635 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6636 return NULL_TREE;
6637 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6638
6639 /* When TYPE is non-null, verify that it specifies a constant-sized
6640 access of a multiple of the array element size. Avoid division
6641 by zero below when ELT_SIZE is zero, such as with the result of
6642 an initializer for a zero-length array or an empty struct. */
6643 if (elt_size == 0
6644 || (type
6645 && (!TYPE_SIZE_UNIT (type)
6646 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6647 return NULL_TREE;
6648
6649 /* Compute the array index we look for. */
6650 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6651 elt_size);
6652 access_index += low_bound;
6653
6654 /* And offset within the access. */
6655 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6656
6657 if (size > elt_size.to_uhwi () * BITS_PER_UNIT)
6658 {
6659 /* native_encode_expr constraints. */
6660 if (size > MAX_BITSIZE_MODE_ANY_MODE
6661 || size % BITS_PER_UNIT != 0
6662 || inner_offset % BITS_PER_UNIT != 0)
6663 return NULL_TREE;
6664
6665 unsigned ctor_idx;
6666 tree val = get_array_ctor_element_at_index (ctor, access_index,
6667 &ctor_idx);
6668 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6669 return build_zero_cst (type);
6670
6671 /* native-encode adjacent ctor elements. */
6672 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6673 unsigned bufoff = 0;
6674 offset_int index = 0;
6675 offset_int max_index = access_index;
6676 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6677 if (!val)
6678 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6679 else if (!CONSTANT_CLASS_P (val))
6680 return NULL_TREE;
6681 if (!elt->index)
6682 ;
6683 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6684 {
6685 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6686 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6687 }
6688 else
6689 index = max_index = wi::to_offset (elt->index);
6690 index = wi::umax (index, access_index);
6691 do
6692 {
6693 int len = native_encode_expr (val, buf + bufoff,
6694 elt_size.to_uhwi (),
6695 inner_offset / BITS_PER_UNIT);
6696 if (len != elt_size - inner_offset / BITS_PER_UNIT)
6697 return NULL_TREE;
6698 inner_offset = 0;
6699 bufoff += len;
6700
6701 access_index += 1;
6702 if (wi::cmpu (access_index, index) == 0)
6703 val = elt->value;
6704 else if (wi::cmpu (access_index, max_index) > 0)
6705 {
6706 ctor_idx++;
6707 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6708 {
6709 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6710 ++max_index;
6711 }
6712 else
6713 {
6714 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6715 index = 0;
6716 max_index = access_index;
6717 if (!elt->index)
6718 ;
6719 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6720 {
6721 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6722 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6723 }
6724 else
6725 index = max_index = wi::to_offset (elt->index);
6726 index = wi::umax (index, access_index);
6727 if (wi::cmpu (access_index, index) == 0)
6728 val = elt->value;
6729 else
6730 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6731 }
6732 }
6733 }
6734 while (bufoff < size / BITS_PER_UNIT);
6735 *suboff += size;
6736 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6737 }
6738
6739 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6740 {
6741 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6742 {
6743 /* For the final reference to the entire accessed element
6744 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6745 may be null) in favor of the type of the element, and set
6746 SIZE to the size of the accessed element. */
6747 inner_offset = 0;
6748 type = TREE_TYPE (val);
6749 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6750 }
6751
6752 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6753 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6754 suboff);
6755 }
6756
6757 /* Memory not explicitly mentioned in constructor is 0 (or
6758 the reference is out of range). */
6759 return type ? build_zero_cst (type) : NULL_TREE;
6760 }
6761
6762 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6763 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6764 is the expected type of the reference; otherwise the type of
6765 the referenced member is used instead. When SIZE is zero,
6766 attempt to fold a reference to the entire member which OFFSET
6767 refers to; in this case. Increment *SUBOFF by the bit offset
6768 of the accessed member. */
6769
6770 static tree
6771 fold_nonarray_ctor_reference (tree type, tree ctor,
6772 unsigned HOST_WIDE_INT offset,
6773 unsigned HOST_WIDE_INT size,
6774 tree from_decl,
6775 unsigned HOST_WIDE_INT *suboff)
6776 {
6777 unsigned HOST_WIDE_INT cnt;
6778 tree cfield, cval;
6779
6780 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6781 cval)
6782 {
6783 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6784 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6785 tree field_size = DECL_SIZE (cfield);
6786
6787 if (!field_size)
6788 {
6789 /* Determine the size of the flexible array member from
6790 the size of the initializer provided for it. */
6791 field_size = TYPE_SIZE (TREE_TYPE (cval));
6792 }
6793
6794 /* Variable sized objects in static constructors makes no sense,
6795 but field_size can be NULL for flexible array members. */
6796 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6797 && TREE_CODE (byte_offset) == INTEGER_CST
6798 && (field_size != NULL_TREE
6799 ? TREE_CODE (field_size) == INTEGER_CST
6800 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6801
6802 /* Compute bit offset of the field. */
6803 offset_int bitoffset
6804 = (wi::to_offset (field_offset)
6805 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6806 /* Compute bit offset where the field ends. */
6807 offset_int bitoffset_end;
6808 if (field_size != NULL_TREE)
6809 bitoffset_end = bitoffset + wi::to_offset (field_size);
6810 else
6811 bitoffset_end = 0;
6812
6813 /* Compute the bit offset of the end of the desired access.
6814 As a special case, if the size of the desired access is
6815 zero, assume the access is to the entire field (and let
6816 the caller make any necessary adjustments by storing
6817 the actual bounds of the field in FIELDBOUNDS). */
6818 offset_int access_end = offset_int (offset);
6819 if (size)
6820 access_end += size;
6821 else
6822 access_end = bitoffset_end;
6823
6824 /* Is there any overlap between the desired access at
6825 [OFFSET, OFFSET+SIZE) and the offset of the field within
6826 the object at [BITOFFSET, BITOFFSET_END)? */
6827 if (wi::cmps (access_end, bitoffset) > 0
6828 && (field_size == NULL_TREE
6829 || wi::lts_p (offset, bitoffset_end)))
6830 {
6831 *suboff += bitoffset.to_uhwi ();
6832
6833 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6834 {
6835 /* For the final reference to the entire accessed member
6836 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6837 be null) in favor of the type of the member, and set
6838 SIZE to the size of the accessed member. */
6839 offset = bitoffset.to_uhwi ();
6840 type = TREE_TYPE (cval);
6841 size = (bitoffset_end - bitoffset).to_uhwi ();
6842 }
6843
6844 /* We do have overlap. Now see if the field is large enough
6845 to cover the access. Give up for accesses that extend
6846 beyond the end of the object or that span multiple fields. */
6847 if (wi::cmps (access_end, bitoffset_end) > 0)
6848 return NULL_TREE;
6849 if (offset < bitoffset)
6850 return NULL_TREE;
6851
6852 offset_int inner_offset = offset_int (offset) - bitoffset;
6853 return fold_ctor_reference (type, cval,
6854 inner_offset.to_uhwi (), size,
6855 from_decl, suboff);
6856 }
6857 }
6858
6859 if (!type)
6860 return NULL_TREE;
6861
6862 return build_zero_cst (type);
6863 }
6864
6865 /* CTOR is value initializing memory. Fold a reference of TYPE and
6866 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
6867 is zero, attempt to fold a reference to the entire subobject
6868 which OFFSET refers to. This is used when folding accesses to
6869 string members of aggregates. When non-null, set *SUBOFF to
6870 the bit offset of the accessed subobject. */
6871
6872 tree
6873 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6874 const poly_uint64 &poly_size, tree from_decl,
6875 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6876 {
6877 tree ret;
6878
6879 /* We found the field with exact match. */
6880 if (type
6881 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6882 && known_eq (poly_offset, 0U))
6883 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6884
6885 /* The remaining optimizations need a constant size and offset. */
6886 unsigned HOST_WIDE_INT size, offset;
6887 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6888 return NULL_TREE;
6889
6890 /* We are at the end of walk, see if we can view convert the
6891 result. */
6892 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6893 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6894 && !compare_tree_int (TYPE_SIZE (type), size)
6895 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6896 {
6897 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6898 if (ret)
6899 {
6900 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6901 if (ret)
6902 STRIP_USELESS_TYPE_CONVERSION (ret);
6903 }
6904 return ret;
6905 }
6906 /* For constants and byte-aligned/sized reads try to go through
6907 native_encode/interpret. */
6908 if (CONSTANT_CLASS_P (ctor)
6909 && BITS_PER_UNIT == 8
6910 && offset % BITS_PER_UNIT == 0
6911 && size % BITS_PER_UNIT == 0
6912 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6913 {
6914 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6915 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6916 offset / BITS_PER_UNIT);
6917 if (len > 0)
6918 return native_interpret_expr (type, buf, len);
6919 }
6920 if (TREE_CODE (ctor) == CONSTRUCTOR)
6921 {
6922 unsigned HOST_WIDE_INT dummy = 0;
6923 if (!suboff)
6924 suboff = &dummy;
6925
6926 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6927 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6928 return fold_array_ctor_reference (type, ctor, offset, size,
6929 from_decl, suboff);
6930
6931 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6932 from_decl, suboff);
6933 }
6934
6935 return NULL_TREE;
6936 }
6937
6938 /* Return the tree representing the element referenced by T if T is an
6939 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6940 names using VALUEIZE. Return NULL_TREE otherwise. */
6941
6942 tree
6943 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6944 {
6945 tree ctor, idx, base;
6946 poly_int64 offset, size, max_size;
6947 tree tem;
6948 bool reverse;
6949
6950 if (TREE_THIS_VOLATILE (t))
6951 return NULL_TREE;
6952
6953 if (DECL_P (t))
6954 return get_symbol_constant_value (t);
6955
6956 tem = fold_read_from_constant_string (t);
6957 if (tem)
6958 return tem;
6959
6960 switch (TREE_CODE (t))
6961 {
6962 case ARRAY_REF:
6963 case ARRAY_RANGE_REF:
6964 /* Constant indexes are handled well by get_base_constructor.
6965 Only special case variable offsets.
6966 FIXME: This code can't handle nested references with variable indexes
6967 (they will be handled only by iteration of ccp). Perhaps we can bring
6968 get_ref_base_and_extent here and make it use a valueize callback. */
6969 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6970 && valueize
6971 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6972 && poly_int_tree_p (idx))
6973 {
6974 tree low_bound, unit_size;
6975
6976 /* If the resulting bit-offset is constant, track it. */
6977 if ((low_bound = array_ref_low_bound (t),
6978 poly_int_tree_p (low_bound))
6979 && (unit_size = array_ref_element_size (t),
6980 tree_fits_uhwi_p (unit_size)))
6981 {
6982 poly_offset_int woffset
6983 = wi::sext (wi::to_poly_offset (idx)
6984 - wi::to_poly_offset (low_bound),
6985 TYPE_PRECISION (TREE_TYPE (idx)));
6986 woffset *= tree_to_uhwi (unit_size);
6987 woffset *= BITS_PER_UNIT;
6988 if (woffset.to_shwi (&offset))
6989 {
6990 base = TREE_OPERAND (t, 0);
6991 ctor = get_base_constructor (base, &offset, valueize);
6992 /* Empty constructor. Always fold to 0. */
6993 if (ctor == error_mark_node)
6994 return build_zero_cst (TREE_TYPE (t));
6995 /* Out of bound array access. Value is undefined,
6996 but don't fold. */
6997 if (maybe_lt (offset, 0))
6998 return NULL_TREE;
6999 /* We cannot determine ctor. */
7000 if (!ctor)
7001 return NULL_TREE;
7002 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
7003 tree_to_uhwi (unit_size)
7004 * BITS_PER_UNIT,
7005 base);
7006 }
7007 }
7008 }
7009 /* Fallthru. */
7010
7011 case COMPONENT_REF:
7012 case BIT_FIELD_REF:
7013 case TARGET_MEM_REF:
7014 case MEM_REF:
7015 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7016 ctor = get_base_constructor (base, &offset, valueize);
7017
7018 /* Empty constructor. Always fold to 0. */
7019 if (ctor == error_mark_node)
7020 return build_zero_cst (TREE_TYPE (t));
7021 /* We do not know precise address. */
7022 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7023 return NULL_TREE;
7024 /* We cannot determine ctor. */
7025 if (!ctor)
7026 return NULL_TREE;
7027
7028 /* Out of bound array access. Value is undefined, but don't fold. */
7029 if (maybe_lt (offset, 0))
7030 return NULL_TREE;
7031
7032 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7033 base);
7034
7035 case REALPART_EXPR:
7036 case IMAGPART_EXPR:
7037 {
7038 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7039 if (c && TREE_CODE (c) == COMPLEX_CST)
7040 return fold_build1_loc (EXPR_LOCATION (t),
7041 TREE_CODE (t), TREE_TYPE (t), c);
7042 break;
7043 }
7044
7045 default:
7046 break;
7047 }
7048
7049 return NULL_TREE;
7050 }
7051
7052 tree
7053 fold_const_aggregate_ref (tree t)
7054 {
7055 return fold_const_aggregate_ref_1 (t, NULL);
7056 }
7057
7058 /* Lookup virtual method with index TOKEN in a virtual table V
7059 at OFFSET.
7060 Set CAN_REFER if non-NULL to false if method
7061 is not referable or if the virtual table is ill-formed (such as rewriten
7062 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7063
7064 tree
7065 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7066 tree v,
7067 unsigned HOST_WIDE_INT offset,
7068 bool *can_refer)
7069 {
7070 tree vtable = v, init, fn;
7071 unsigned HOST_WIDE_INT size;
7072 unsigned HOST_WIDE_INT elt_size, access_index;
7073 tree domain_type;
7074
7075 if (can_refer)
7076 *can_refer = true;
7077
7078 /* First of all double check we have virtual table. */
7079 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7080 {
7081 /* Pass down that we lost track of the target. */
7082 if (can_refer)
7083 *can_refer = false;
7084 return NULL_TREE;
7085 }
7086
7087 init = ctor_for_folding (v);
7088
7089 /* The virtual tables should always be born with constructors
7090 and we always should assume that they are avaialble for
7091 folding. At the moment we do not stream them in all cases,
7092 but it should never happen that ctor seem unreachable. */
7093 gcc_assert (init);
7094 if (init == error_mark_node)
7095 {
7096 /* Pass down that we lost track of the target. */
7097 if (can_refer)
7098 *can_refer = false;
7099 return NULL_TREE;
7100 }
7101 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7102 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7103 offset *= BITS_PER_UNIT;
7104 offset += token * size;
7105
7106 /* Lookup the value in the constructor that is assumed to be array.
7107 This is equivalent to
7108 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7109 offset, size, NULL);
7110 but in a constant time. We expect that frontend produced a simple
7111 array without indexed initializers. */
7112
7113 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7114 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7115 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7116 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7117
7118 access_index = offset / BITS_PER_UNIT / elt_size;
7119 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7120
7121 /* The C++ FE can now produce indexed fields, and we check if the indexes
7122 match. */
7123 if (access_index < CONSTRUCTOR_NELTS (init))
7124 {
7125 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7126 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7127 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7128 STRIP_NOPS (fn);
7129 }
7130 else
7131 fn = NULL;
7132
7133 /* For type inconsistent program we may end up looking up virtual method
7134 in virtual table that does not contain TOKEN entries. We may overrun
7135 the virtual table and pick up a constant or RTTI info pointer.
7136 In any case the call is undefined. */
7137 if (!fn
7138 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7139 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7140 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7141 else
7142 {
7143 fn = TREE_OPERAND (fn, 0);
7144
7145 /* When cgraph node is missing and function is not public, we cannot
7146 devirtualize. This can happen in WHOPR when the actual method
7147 ends up in other partition, because we found devirtualization
7148 possibility too late. */
7149 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7150 {
7151 if (can_refer)
7152 {
7153 *can_refer = false;
7154 return fn;
7155 }
7156 return NULL_TREE;
7157 }
7158 }
7159
7160 /* Make sure we create a cgraph node for functions we'll reference.
7161 They can be non-existent if the reference comes from an entry
7162 of an external vtable for example. */
7163 cgraph_node::get_create (fn);
7164
7165 return fn;
7166 }
7167
7168 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7169 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7170 KNOWN_BINFO carries the binfo describing the true type of
7171 OBJ_TYPE_REF_OBJECT(REF).
7172 Set CAN_REFER if non-NULL to false if method
7173 is not referable or if the virtual table is ill-formed (such as rewriten
7174 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7175
7176 tree
7177 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7178 bool *can_refer)
7179 {
7180 unsigned HOST_WIDE_INT offset;
7181 tree v;
7182
7183 v = BINFO_VTABLE (known_binfo);
7184 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7185 if (!v)
7186 return NULL_TREE;
7187
7188 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7189 {
7190 if (can_refer)
7191 *can_refer = false;
7192 return NULL_TREE;
7193 }
7194 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7195 }
7196
7197 /* Given a pointer value T, return a simplified version of an
7198 indirection through T, or NULL_TREE if no simplification is
7199 possible. Note that the resulting type may be different from
7200 the type pointed to in the sense that it is still compatible
7201 from the langhooks point of view. */
7202
7203 tree
7204 gimple_fold_indirect_ref (tree t)
7205 {
7206 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7207 tree sub = t;
7208 tree subtype;
7209
7210 STRIP_NOPS (sub);
7211 subtype = TREE_TYPE (sub);
7212 if (!POINTER_TYPE_P (subtype)
7213 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7214 return NULL_TREE;
7215
7216 if (TREE_CODE (sub) == ADDR_EXPR)
7217 {
7218 tree op = TREE_OPERAND (sub, 0);
7219 tree optype = TREE_TYPE (op);
7220 /* *&p => p */
7221 if (useless_type_conversion_p (type, optype))
7222 return op;
7223
7224 /* *(foo *)&fooarray => fooarray[0] */
7225 if (TREE_CODE (optype) == ARRAY_TYPE
7226 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7227 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7228 {
7229 tree type_domain = TYPE_DOMAIN (optype);
7230 tree min_val = size_zero_node;
7231 if (type_domain && TYPE_MIN_VALUE (type_domain))
7232 min_val = TYPE_MIN_VALUE (type_domain);
7233 if (TREE_CODE (min_val) == INTEGER_CST)
7234 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7235 }
7236 /* *(foo *)&complexfoo => __real__ complexfoo */
7237 else if (TREE_CODE (optype) == COMPLEX_TYPE
7238 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7239 return fold_build1 (REALPART_EXPR, type, op);
7240 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7241 else if (TREE_CODE (optype) == VECTOR_TYPE
7242 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7243 {
7244 tree part_width = TYPE_SIZE (type);
7245 tree index = bitsize_int (0);
7246 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7247 }
7248 }
7249
7250 /* *(p + CST) -> ... */
7251 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7252 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7253 {
7254 tree addr = TREE_OPERAND (sub, 0);
7255 tree off = TREE_OPERAND (sub, 1);
7256 tree addrtype;
7257
7258 STRIP_NOPS (addr);
7259 addrtype = TREE_TYPE (addr);
7260
7261 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7262 if (TREE_CODE (addr) == ADDR_EXPR
7263 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7264 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7265 && tree_fits_uhwi_p (off))
7266 {
7267 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7268 tree part_width = TYPE_SIZE (type);
7269 unsigned HOST_WIDE_INT part_widthi
7270 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7271 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7272 tree index = bitsize_int (indexi);
7273 if (known_lt (offset / part_widthi,
7274 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7275 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7276 part_width, index);
7277 }
7278
7279 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7280 if (TREE_CODE (addr) == ADDR_EXPR
7281 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7282 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7283 {
7284 tree size = TYPE_SIZE_UNIT (type);
7285 if (tree_int_cst_equal (size, off))
7286 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7287 }
7288
7289 /* *(p + CST) -> MEM_REF <p, CST>. */
7290 if (TREE_CODE (addr) != ADDR_EXPR
7291 || DECL_P (TREE_OPERAND (addr, 0)))
7292 return fold_build2 (MEM_REF, type,
7293 addr,
7294 wide_int_to_tree (ptype, wi::to_wide (off)));
7295 }
7296
7297 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7298 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7299 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7300 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7301 {
7302 tree type_domain;
7303 tree min_val = size_zero_node;
7304 tree osub = sub;
7305 sub = gimple_fold_indirect_ref (sub);
7306 if (! sub)
7307 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7308 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7309 if (type_domain && TYPE_MIN_VALUE (type_domain))
7310 min_val = TYPE_MIN_VALUE (type_domain);
7311 if (TREE_CODE (min_val) == INTEGER_CST)
7312 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7313 }
7314
7315 return NULL_TREE;
7316 }
7317
7318 /* Return true if CODE is an operation that when operating on signed
7319 integer types involves undefined behavior on overflow and the
7320 operation can be expressed with unsigned arithmetic. */
7321
7322 bool
7323 arith_code_with_undefined_signed_overflow (tree_code code)
7324 {
7325 switch (code)
7326 {
7327 case ABS_EXPR:
7328 case PLUS_EXPR:
7329 case MINUS_EXPR:
7330 case MULT_EXPR:
7331 case NEGATE_EXPR:
7332 case POINTER_PLUS_EXPR:
7333 return true;
7334 default:
7335 return false;
7336 }
7337 }
7338
7339 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7340 operation that can be transformed to unsigned arithmetic by converting
7341 its operand, carrying out the operation in the corresponding unsigned
7342 type and converting the result back to the original type.
7343
7344 Returns a sequence of statements that replace STMT and also contain
7345 a modified form of STMT itself. */
7346
7347 gimple_seq
7348 rewrite_to_defined_overflow (gimple *stmt)
7349 {
7350 if (dump_file && (dump_flags & TDF_DETAILS))
7351 {
7352 fprintf (dump_file, "rewriting stmt with undefined signed "
7353 "overflow ");
7354 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7355 }
7356
7357 tree lhs = gimple_assign_lhs (stmt);
7358 tree type = unsigned_type_for (TREE_TYPE (lhs));
7359 gimple_seq stmts = NULL;
7360 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7361 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7362 else
7363 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7364 {
7365 tree op = gimple_op (stmt, i);
7366 op = gimple_convert (&stmts, type, op);
7367 gimple_set_op (stmt, i, op);
7368 }
7369 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7370 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7371 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7372 gimple_seq_add_stmt (&stmts, stmt);
7373 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7374 gimple_seq_add_stmt (&stmts, cvt);
7375
7376 return stmts;
7377 }
7378
7379
7380 /* The valueization hook we use for the gimple_build API simplification.
7381 This makes us match fold_buildN behavior by only combining with
7382 statements in the sequence(s) we are currently building. */
7383
7384 static tree
7385 gimple_build_valueize (tree op)
7386 {
7387 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7388 return op;
7389 return NULL_TREE;
7390 }
7391
7392 /* Build the expression CODE OP0 of type TYPE with location LOC,
7393 simplifying it first if possible. Returns the built
7394 expression value and appends statements possibly defining it
7395 to SEQ. */
7396
7397 tree
7398 gimple_build (gimple_seq *seq, location_t loc,
7399 enum tree_code code, tree type, tree op0)
7400 {
7401 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7402 if (!res)
7403 {
7404 res = create_tmp_reg_or_ssa_name (type);
7405 gimple *stmt;
7406 if (code == REALPART_EXPR
7407 || code == IMAGPART_EXPR
7408 || code == VIEW_CONVERT_EXPR)
7409 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7410 else
7411 stmt = gimple_build_assign (res, code, op0);
7412 gimple_set_location (stmt, loc);
7413 gimple_seq_add_stmt_without_update (seq, stmt);
7414 }
7415 return res;
7416 }
7417
7418 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7419 simplifying it first if possible. Returns the built
7420 expression value and appends statements possibly defining it
7421 to SEQ. */
7422
7423 tree
7424 gimple_build (gimple_seq *seq, location_t loc,
7425 enum tree_code code, tree type, tree op0, tree op1)
7426 {
7427 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7428 if (!res)
7429 {
7430 res = create_tmp_reg_or_ssa_name (type);
7431 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7432 gimple_set_location (stmt, loc);
7433 gimple_seq_add_stmt_without_update (seq, stmt);
7434 }
7435 return res;
7436 }
7437
7438 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7439 simplifying it first if possible. Returns the built
7440 expression value and appends statements possibly defining it
7441 to SEQ. */
7442
7443 tree
7444 gimple_build (gimple_seq *seq, location_t loc,
7445 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7446 {
7447 tree res = gimple_simplify (code, type, op0, op1, op2,
7448 seq, gimple_build_valueize);
7449 if (!res)
7450 {
7451 res = create_tmp_reg_or_ssa_name (type);
7452 gimple *stmt;
7453 if (code == BIT_FIELD_REF)
7454 stmt = gimple_build_assign (res, code,
7455 build3 (code, type, op0, op1, op2));
7456 else
7457 stmt = gimple_build_assign (res, code, op0, op1, op2);
7458 gimple_set_location (stmt, loc);
7459 gimple_seq_add_stmt_without_update (seq, stmt);
7460 }
7461 return res;
7462 }
7463
7464 /* Build the call FN (ARG0) with a result of type TYPE
7465 (or no result if TYPE is void) with location LOC,
7466 simplifying it first if possible. Returns the built
7467 expression value (or NULL_TREE if TYPE is void) and appends
7468 statements possibly defining it to SEQ. */
7469
7470 tree
7471 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7472 tree type, tree arg0)
7473 {
7474 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7475 if (!res)
7476 {
7477 gcall *stmt;
7478 if (internal_fn_p (fn))
7479 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7480 else
7481 {
7482 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7483 stmt = gimple_build_call (decl, 1, arg0);
7484 }
7485 if (!VOID_TYPE_P (type))
7486 {
7487 res = create_tmp_reg_or_ssa_name (type);
7488 gimple_call_set_lhs (stmt, res);
7489 }
7490 gimple_set_location (stmt, loc);
7491 gimple_seq_add_stmt_without_update (seq, stmt);
7492 }
7493 return res;
7494 }
7495
7496 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7497 (or no result if TYPE is void) with location LOC,
7498 simplifying it first if possible. Returns the built
7499 expression value (or NULL_TREE if TYPE is void) and appends
7500 statements possibly defining it to SEQ. */
7501
7502 tree
7503 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7504 tree type, tree arg0, tree arg1)
7505 {
7506 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7507 if (!res)
7508 {
7509 gcall *stmt;
7510 if (internal_fn_p (fn))
7511 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7512 else
7513 {
7514 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7515 stmt = gimple_build_call (decl, 2, arg0, arg1);
7516 }
7517 if (!VOID_TYPE_P (type))
7518 {
7519 res = create_tmp_reg_or_ssa_name (type);
7520 gimple_call_set_lhs (stmt, res);
7521 }
7522 gimple_set_location (stmt, loc);
7523 gimple_seq_add_stmt_without_update (seq, stmt);
7524 }
7525 return res;
7526 }
7527
7528 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7529 (or no result if TYPE is void) with location LOC,
7530 simplifying it first if possible. Returns the built
7531 expression value (or NULL_TREE if TYPE is void) and appends
7532 statements possibly defining it to SEQ. */
7533
7534 tree
7535 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7536 tree type, tree arg0, tree arg1, tree arg2)
7537 {
7538 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7539 seq, gimple_build_valueize);
7540 if (!res)
7541 {
7542 gcall *stmt;
7543 if (internal_fn_p (fn))
7544 stmt = gimple_build_call_internal (as_internal_fn (fn),
7545 3, arg0, arg1, arg2);
7546 else
7547 {
7548 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7549 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7550 }
7551 if (!VOID_TYPE_P (type))
7552 {
7553 res = create_tmp_reg_or_ssa_name (type);
7554 gimple_call_set_lhs (stmt, res);
7555 }
7556 gimple_set_location (stmt, loc);
7557 gimple_seq_add_stmt_without_update (seq, stmt);
7558 }
7559 return res;
7560 }
7561
7562 /* Build the conversion (TYPE) OP with a result of type TYPE
7563 with location LOC if such conversion is neccesary in GIMPLE,
7564 simplifying it first.
7565 Returns the built expression value and appends
7566 statements possibly defining it to SEQ. */
7567
7568 tree
7569 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7570 {
7571 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7572 return op;
7573 return gimple_build (seq, loc, NOP_EXPR, type, op);
7574 }
7575
7576 /* Build the conversion (ptrofftype) OP with a result of a type
7577 compatible with ptrofftype with location LOC if such conversion
7578 is neccesary in GIMPLE, simplifying it first.
7579 Returns the built expression value and appends
7580 statements possibly defining it to SEQ. */
7581
7582 tree
7583 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7584 {
7585 if (ptrofftype_p (TREE_TYPE (op)))
7586 return op;
7587 return gimple_convert (seq, loc, sizetype, op);
7588 }
7589
7590 /* Build a vector of type TYPE in which each element has the value OP.
7591 Return a gimple value for the result, appending any new statements
7592 to SEQ. */
7593
7594 tree
7595 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7596 tree op)
7597 {
7598 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7599 && !CONSTANT_CLASS_P (op))
7600 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7601
7602 tree res, vec = build_vector_from_val (type, op);
7603 if (is_gimple_val (vec))
7604 return vec;
7605 if (gimple_in_ssa_p (cfun))
7606 res = make_ssa_name (type);
7607 else
7608 res = create_tmp_reg (type);
7609 gimple *stmt = gimple_build_assign (res, vec);
7610 gimple_set_location (stmt, loc);
7611 gimple_seq_add_stmt_without_update (seq, stmt);
7612 return res;
7613 }
7614
7615 /* Build a vector from BUILDER, handling the case in which some elements
7616 are non-constant. Return a gimple value for the result, appending any
7617 new instructions to SEQ.
7618
7619 BUILDER must not have a stepped encoding on entry. This is because
7620 the function is not geared up to handle the arithmetic that would
7621 be needed in the variable case, and any code building a vector that
7622 is known to be constant should use BUILDER->build () directly. */
7623
7624 tree
7625 gimple_build_vector (gimple_seq *seq, location_t loc,
7626 tree_vector_builder *builder)
7627 {
7628 gcc_assert (builder->nelts_per_pattern () <= 2);
7629 unsigned int encoded_nelts = builder->encoded_nelts ();
7630 for (unsigned int i = 0; i < encoded_nelts; ++i)
7631 if (!TREE_CONSTANT ((*builder)[i]))
7632 {
7633 tree type = builder->type ();
7634 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7635 vec<constructor_elt, va_gc> *v;
7636 vec_alloc (v, nelts);
7637 for (i = 0; i < nelts; ++i)
7638 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7639
7640 tree res;
7641 if (gimple_in_ssa_p (cfun))
7642 res = make_ssa_name (type);
7643 else
7644 res = create_tmp_reg (type);
7645 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7646 gimple_set_location (stmt, loc);
7647 gimple_seq_add_stmt_without_update (seq, stmt);
7648 return res;
7649 }
7650 return builder->build ();
7651 }
7652
7653 /* Return true if the result of assignment STMT is known to be non-negative.
7654 If the return value is based on the assumption that signed overflow is
7655 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7656 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7657
7658 static bool
7659 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7660 int depth)
7661 {
7662 enum tree_code code = gimple_assign_rhs_code (stmt);
7663 switch (get_gimple_rhs_class (code))
7664 {
7665 case GIMPLE_UNARY_RHS:
7666 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7667 gimple_expr_type (stmt),
7668 gimple_assign_rhs1 (stmt),
7669 strict_overflow_p, depth);
7670 case GIMPLE_BINARY_RHS:
7671 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7672 gimple_expr_type (stmt),
7673 gimple_assign_rhs1 (stmt),
7674 gimple_assign_rhs2 (stmt),
7675 strict_overflow_p, depth);
7676 case GIMPLE_TERNARY_RHS:
7677 return false;
7678 case GIMPLE_SINGLE_RHS:
7679 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7680 strict_overflow_p, depth);
7681 case GIMPLE_INVALID_RHS:
7682 break;
7683 }
7684 gcc_unreachable ();
7685 }
7686
7687 /* Return true if return value of call STMT is known to be non-negative.
7688 If the return value is based on the assumption that signed overflow is
7689 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7690 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7691
7692 static bool
7693 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7694 int depth)
7695 {
7696 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7697 gimple_call_arg (stmt, 0) : NULL_TREE;
7698 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7699 gimple_call_arg (stmt, 1) : NULL_TREE;
7700
7701 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7702 gimple_call_combined_fn (stmt),
7703 arg0,
7704 arg1,
7705 strict_overflow_p, depth);
7706 }
7707
7708 /* Return true if return value of call STMT is known to be non-negative.
7709 If the return value is based on the assumption that signed overflow is
7710 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7711 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7712
7713 static bool
7714 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7715 int depth)
7716 {
7717 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7718 {
7719 tree arg = gimple_phi_arg_def (stmt, i);
7720 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7721 return false;
7722 }
7723 return true;
7724 }
7725
7726 /* Return true if STMT is known to compute a non-negative value.
7727 If the return value is based on the assumption that signed overflow is
7728 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7729 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7730
7731 bool
7732 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7733 int depth)
7734 {
7735 switch (gimple_code (stmt))
7736 {
7737 case GIMPLE_ASSIGN:
7738 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7739 depth);
7740 case GIMPLE_CALL:
7741 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7742 depth);
7743 case GIMPLE_PHI:
7744 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7745 depth);
7746 default:
7747 return false;
7748 }
7749 }
7750
7751 /* Return true if the floating-point value computed by assignment STMT
7752 is known to have an integer value. We also allow +Inf, -Inf and NaN
7753 to be considered integer values. Return false for signaling NaN.
7754
7755 DEPTH is the current nesting depth of the query. */
7756
7757 static bool
7758 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7759 {
7760 enum tree_code code = gimple_assign_rhs_code (stmt);
7761 switch (get_gimple_rhs_class (code))
7762 {
7763 case GIMPLE_UNARY_RHS:
7764 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7765 gimple_assign_rhs1 (stmt), depth);
7766 case GIMPLE_BINARY_RHS:
7767 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7768 gimple_assign_rhs1 (stmt),
7769 gimple_assign_rhs2 (stmt), depth);
7770 case GIMPLE_TERNARY_RHS:
7771 return false;
7772 case GIMPLE_SINGLE_RHS:
7773 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7774 case GIMPLE_INVALID_RHS:
7775 break;
7776 }
7777 gcc_unreachable ();
7778 }
7779
7780 /* Return true if the floating-point value computed by call STMT is known
7781 to have an integer value. We also allow +Inf, -Inf and NaN to be
7782 considered integer values. Return false for signaling NaN.
7783
7784 DEPTH is the current nesting depth of the query. */
7785
7786 static bool
7787 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7788 {
7789 tree arg0 = (gimple_call_num_args (stmt) > 0
7790 ? gimple_call_arg (stmt, 0)
7791 : NULL_TREE);
7792 tree arg1 = (gimple_call_num_args (stmt) > 1
7793 ? gimple_call_arg (stmt, 1)
7794 : NULL_TREE);
7795 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7796 arg0, arg1, depth);
7797 }
7798
7799 /* Return true if the floating-point result of phi STMT is known to have
7800 an integer value. We also allow +Inf, -Inf and NaN to be considered
7801 integer values. Return false for signaling NaN.
7802
7803 DEPTH is the current nesting depth of the query. */
7804
7805 static bool
7806 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7807 {
7808 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7809 {
7810 tree arg = gimple_phi_arg_def (stmt, i);
7811 if (!integer_valued_real_single_p (arg, depth + 1))
7812 return false;
7813 }
7814 return true;
7815 }
7816
7817 /* Return true if the floating-point value computed by STMT is known
7818 to have an integer value. We also allow +Inf, -Inf and NaN to be
7819 considered integer values. Return false for signaling NaN.
7820
7821 DEPTH is the current nesting depth of the query. */
7822
7823 bool
7824 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7825 {
7826 switch (gimple_code (stmt))
7827 {
7828 case GIMPLE_ASSIGN:
7829 return gimple_assign_integer_valued_real_p (stmt, depth);
7830 case GIMPLE_CALL:
7831 return gimple_call_integer_valued_real_p (stmt, depth);
7832 case GIMPLE_PHI:
7833 return gimple_phi_integer_valued_real_p (stmt, depth);
7834 default:
7835 return false;
7836 }
7837 }