]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
Update copyright years.
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Temporary until the rest of Martin's strlen range work is integrated. */
81 SRK_LENRANGE_2,
82 /* Determine the integer value of the argument (not string length). */
83 SRK_INT_VALUE
84 };
85
86 static bool get_range_strlen (tree, tree[2], bitmap *, strlen_range_kind,
87 bool *, unsigned, tree *);
88
89 /* Return true when DECL can be referenced from current unit.
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
92 reasons:
93
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
99 set.
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
104 declaring the body.
105 3) COMDAT functions referred by external vtables that
106 we devirtualize only during final compilation stage.
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
109 directly. */
110
111 static bool
112 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
113 {
114 varpool_node *vnode;
115 struct cgraph_node *node;
116 symtab_node *snode;
117
118 if (DECL_ABSTRACT_P (decl))
119 return false;
120
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
123 || !VAR_OR_FUNCTION_DECL_P (decl))
124 return true;
125
126 /* Static objects can be referred only if they was not optimized out yet. */
127 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
128 {
129 /* Before we start optimizing unreachable code we can be sure all
130 static objects are defined. */
131 if (symtab->function_flags_ready)
132 return true;
133 snode = symtab_node::get (decl);
134 if (!snode || !snode->definition)
135 return false;
136 node = dyn_cast <cgraph_node *> (snode);
137 return !node || !node->global.inlined_to;
138 }
139
140 /* We will later output the initializer, so we can refer to it.
141 So we are concerned only when DECL comes from initializer of
142 external var or var that has been optimized out. */
143 if (!from_decl
144 || !VAR_P (from_decl)
145 || (!DECL_EXTERNAL (from_decl)
146 && (vnode = varpool_node::get (from_decl)) != NULL
147 && vnode->definition)
148 || (flag_ltrans
149 && (vnode = varpool_node::get (from_decl)) != NULL
150 && vnode->in_other_partition))
151 return true;
152 /* We are folding reference from external vtable. The vtable may reffer
153 to a symbol keyed to other compilation unit. The other compilation
154 unit may be in separate DSO and the symbol may be hidden. */
155 if (DECL_VISIBILITY_SPECIFIED (decl)
156 && DECL_EXTERNAL (decl)
157 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
158 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
159 return false;
160 /* When function is public, we always can introduce new reference.
161 Exception are the COMDAT functions where introducing a direct
162 reference imply need to include function body in the curren tunit. */
163 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
164 return true;
165 /* We have COMDAT. We are going to check if we still have definition
166 or if the definition is going to be output in other partition.
167 Bypass this when gimplifying; all needed functions will be produced.
168
169 As observed in PR20991 for already optimized out comdat virtual functions
170 it may be tempting to not necessarily give up because the copy will be
171 output elsewhere when corresponding vtable is output.
172 This is however not possible - ABI specify that COMDATs are output in
173 units where they are used and when the other unit was compiled with LTO
174 it is possible that vtable was kept public while the function itself
175 was privatized. */
176 if (!symtab->function_flags_ready)
177 return true;
178
179 snode = symtab_node::get (decl);
180 if (!snode
181 || ((!snode->definition || DECL_EXTERNAL (decl))
182 && (!snode->in_other_partition
183 || (!snode->forced_by_abi && !snode->force_output))))
184 return false;
185 node = dyn_cast <cgraph_node *> (snode);
186 return !node || !node->global.inlined_to;
187 }
188
189 /* Create a temporary for TYPE for a statement STMT. If the current function
190 is in SSA form, a SSA name is created. Otherwise a temporary register
191 is made. */
192
193 tree
194 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
195 {
196 if (gimple_in_ssa_p (cfun))
197 return make_ssa_name (type, stmt);
198 else
199 return create_tmp_reg (type);
200 }
201
202 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
203 acceptable form for is_gimple_min_invariant.
204 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
205
206 tree
207 canonicalize_constructor_val (tree cval, tree from_decl)
208 {
209 tree orig_cval = cval;
210 STRIP_NOPS (cval);
211 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
212 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
213 {
214 tree ptr = TREE_OPERAND (cval, 0);
215 if (is_gimple_min_invariant (ptr))
216 cval = build1_loc (EXPR_LOCATION (cval),
217 ADDR_EXPR, TREE_TYPE (ptr),
218 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
219 ptr,
220 fold_convert (ptr_type_node,
221 TREE_OPERAND (cval, 1))));
222 }
223 if (TREE_CODE (cval) == ADDR_EXPR)
224 {
225 tree base = NULL_TREE;
226 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
227 {
228 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
229 if (base)
230 TREE_OPERAND (cval, 0) = base;
231 }
232 else
233 base = get_base_address (TREE_OPERAND (cval, 0));
234 if (!base)
235 return NULL_TREE;
236
237 if (VAR_OR_FUNCTION_DECL_P (base)
238 && !can_refer_decl_in_current_unit_p (base, from_decl))
239 return NULL_TREE;
240 if (TREE_TYPE (base) == error_mark_node)
241 return NULL_TREE;
242 if (VAR_P (base))
243 TREE_ADDRESSABLE (base) = 1;
244 else if (TREE_CODE (base) == FUNCTION_DECL)
245 {
246 /* Make sure we create a cgraph node for functions we'll reference.
247 They can be non-existent if the reference comes from an entry
248 of an external vtable for example. */
249 cgraph_node::get_create (base);
250 }
251 /* Fixup types in global initializers. */
252 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
253 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
254
255 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
256 cval = fold_convert (TREE_TYPE (orig_cval), cval);
257 return cval;
258 }
259 if (TREE_OVERFLOW_P (cval))
260 return drop_tree_overflow (cval);
261 return orig_cval;
262 }
263
264 /* If SYM is a constant variable with known value, return the value.
265 NULL_TREE is returned otherwise. */
266
267 tree
268 get_symbol_constant_value (tree sym)
269 {
270 tree val = ctor_for_folding (sym);
271 if (val != error_mark_node)
272 {
273 if (val)
274 {
275 val = canonicalize_constructor_val (unshare_expr (val), sym);
276 if (val && is_gimple_min_invariant (val))
277 return val;
278 else
279 return NULL_TREE;
280 }
281 /* Variables declared 'const' without an initializer
282 have zero as the initializer if they may not be
283 overridden at link or run time. */
284 if (!val
285 && is_gimple_reg_type (TREE_TYPE (sym)))
286 return build_zero_cst (TREE_TYPE (sym));
287 }
288
289 return NULL_TREE;
290 }
291
292
293
294 /* Subroutine of fold_stmt. We perform several simplifications of the
295 memory reference tree EXPR and make sure to re-gimplify them properly
296 after propagation of constant addresses. IS_LHS is true if the
297 reference is supposed to be an lvalue. */
298
299 static tree
300 maybe_fold_reference (tree expr, bool is_lhs)
301 {
302 tree result;
303
304 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
305 || TREE_CODE (expr) == REALPART_EXPR
306 || TREE_CODE (expr) == IMAGPART_EXPR)
307 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
308 return fold_unary_loc (EXPR_LOCATION (expr),
309 TREE_CODE (expr),
310 TREE_TYPE (expr),
311 TREE_OPERAND (expr, 0));
312 else if (TREE_CODE (expr) == BIT_FIELD_REF
313 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
314 return fold_ternary_loc (EXPR_LOCATION (expr),
315 TREE_CODE (expr),
316 TREE_TYPE (expr),
317 TREE_OPERAND (expr, 0),
318 TREE_OPERAND (expr, 1),
319 TREE_OPERAND (expr, 2));
320
321 if (!is_lhs
322 && (result = fold_const_aggregate_ref (expr))
323 && is_gimple_min_invariant (result))
324 return result;
325
326 return NULL_TREE;
327 }
328
329
330 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
331 replacement rhs for the statement or NULL_TREE if no simplification
332 could be made. It is assumed that the operands have been previously
333 folded. */
334
335 static tree
336 fold_gimple_assign (gimple_stmt_iterator *si)
337 {
338 gimple *stmt = gsi_stmt (*si);
339 enum tree_code subcode = gimple_assign_rhs_code (stmt);
340 location_t loc = gimple_location (stmt);
341
342 tree result = NULL_TREE;
343
344 switch (get_gimple_rhs_class (subcode))
345 {
346 case GIMPLE_SINGLE_RHS:
347 {
348 tree rhs = gimple_assign_rhs1 (stmt);
349
350 if (TREE_CLOBBER_P (rhs))
351 return NULL_TREE;
352
353 if (REFERENCE_CLASS_P (rhs))
354 return maybe_fold_reference (rhs, false);
355
356 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
357 {
358 tree val = OBJ_TYPE_REF_EXPR (rhs);
359 if (is_gimple_min_invariant (val))
360 return val;
361 else if (flag_devirtualize && virtual_method_call_p (rhs))
362 {
363 bool final;
364 vec <cgraph_node *>targets
365 = possible_polymorphic_call_targets (rhs, stmt, &final);
366 if (final && targets.length () <= 1 && dbg_cnt (devirt))
367 {
368 if (dump_enabled_p ())
369 {
370 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
371 "resolving virtual function address "
372 "reference to function %s\n",
373 targets.length () == 1
374 ? targets[0]->name ()
375 : "NULL");
376 }
377 if (targets.length () == 1)
378 {
379 val = fold_convert (TREE_TYPE (val),
380 build_fold_addr_expr_loc
381 (loc, targets[0]->decl));
382 STRIP_USELESS_TYPE_CONVERSION (val);
383 }
384 else
385 /* We can not use __builtin_unreachable here because it
386 can not have address taken. */
387 val = build_int_cst (TREE_TYPE (val), 0);
388 return val;
389 }
390 }
391 }
392
393 else if (TREE_CODE (rhs) == ADDR_EXPR)
394 {
395 tree ref = TREE_OPERAND (rhs, 0);
396 tree tem = maybe_fold_reference (ref, true);
397 if (tem
398 && TREE_CODE (tem) == MEM_REF
399 && integer_zerop (TREE_OPERAND (tem, 1)))
400 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
401 else if (tem)
402 result = fold_convert (TREE_TYPE (rhs),
403 build_fold_addr_expr_loc (loc, tem));
404 else if (TREE_CODE (ref) == MEM_REF
405 && integer_zerop (TREE_OPERAND (ref, 1)))
406 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
407
408 if (result)
409 {
410 /* Strip away useless type conversions. Both the
411 NON_LVALUE_EXPR that may have been added by fold, and
412 "useless" type conversions that might now be apparent
413 due to propagation. */
414 STRIP_USELESS_TYPE_CONVERSION (result);
415
416 if (result != rhs && valid_gimple_rhs_p (result))
417 return result;
418 }
419 }
420
421 else if (TREE_CODE (rhs) == CONSTRUCTOR
422 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
423 {
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
425 unsigned i;
426 tree val;
427
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
429 if (! CONSTANT_CLASS_P (val))
430 return NULL_TREE;
431
432 return build_vector_from_ctor (TREE_TYPE (rhs),
433 CONSTRUCTOR_ELTS (rhs));
434 }
435
436 else if (DECL_P (rhs))
437 return get_symbol_constant_value (rhs);
438 }
439 break;
440
441 case GIMPLE_UNARY_RHS:
442 break;
443
444 case GIMPLE_BINARY_RHS:
445 break;
446
447 case GIMPLE_TERNARY_RHS:
448 result = fold_ternary_loc (loc, subcode,
449 TREE_TYPE (gimple_assign_lhs (stmt)),
450 gimple_assign_rhs1 (stmt),
451 gimple_assign_rhs2 (stmt),
452 gimple_assign_rhs3 (stmt));
453
454 if (result)
455 {
456 STRIP_USELESS_TYPE_CONVERSION (result);
457 if (valid_gimple_rhs_p (result))
458 return result;
459 }
460 break;
461
462 case GIMPLE_INVALID_RHS:
463 gcc_unreachable ();
464 }
465
466 return NULL_TREE;
467 }
468
469
470 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
471 adjusting the replacement stmts location and virtual operands.
472 If the statement has a lhs the last stmt in the sequence is expected
473 to assign to that lhs. */
474
475 static void
476 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
477 {
478 gimple *stmt = gsi_stmt (*si_p);
479
480 if (gimple_has_location (stmt))
481 annotate_all_with_location (stmts, gimple_location (stmt));
482
483 /* First iterate over the replacement statements backward, assigning
484 virtual operands to their defining statements. */
485 gimple *laststore = NULL;
486 for (gimple_stmt_iterator i = gsi_last (stmts);
487 !gsi_end_p (i); gsi_prev (&i))
488 {
489 gimple *new_stmt = gsi_stmt (i);
490 if ((gimple_assign_single_p (new_stmt)
491 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
492 || (is_gimple_call (new_stmt)
493 && (gimple_call_flags (new_stmt)
494 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
495 {
496 tree vdef;
497 if (!laststore)
498 vdef = gimple_vdef (stmt);
499 else
500 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
501 gimple_set_vdef (new_stmt, vdef);
502 if (vdef && TREE_CODE (vdef) == SSA_NAME)
503 SSA_NAME_DEF_STMT (vdef) = new_stmt;
504 laststore = new_stmt;
505 }
506 }
507
508 /* Second iterate over the statements forward, assigning virtual
509 operands to their uses. */
510 tree reaching_vuse = gimple_vuse (stmt);
511 for (gimple_stmt_iterator i = gsi_start (stmts);
512 !gsi_end_p (i); gsi_next (&i))
513 {
514 gimple *new_stmt = gsi_stmt (i);
515 /* If the new statement possibly has a VUSE, update it with exact SSA
516 name we know will reach this one. */
517 if (gimple_has_mem_ops (new_stmt))
518 gimple_set_vuse (new_stmt, reaching_vuse);
519 gimple_set_modified (new_stmt, true);
520 if (gimple_vdef (new_stmt))
521 reaching_vuse = gimple_vdef (new_stmt);
522 }
523
524 /* If the new sequence does not do a store release the virtual
525 definition of the original statement. */
526 if (reaching_vuse
527 && reaching_vuse == gimple_vuse (stmt))
528 {
529 tree vdef = gimple_vdef (stmt);
530 if (vdef
531 && TREE_CODE (vdef) == SSA_NAME)
532 {
533 unlink_stmt_vdef (stmt);
534 release_ssa_name (vdef);
535 }
536 }
537
538 /* Finally replace the original statement with the sequence. */
539 gsi_replace_with_seq (si_p, stmts, false);
540 }
541
542 /* Convert EXPR into a GIMPLE value suitable for substitution on the
543 RHS of an assignment. Insert the necessary statements before
544 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
545 is replaced. If the call is expected to produces a result, then it
546 is replaced by an assignment of the new RHS to the result variable.
547 If the result is to be ignored, then the call is replaced by a
548 GIMPLE_NOP. A proper VDEF chain is retained by making the first
549 VUSE and the last VDEF of the whole sequence be the same as the replaced
550 statement and using new SSA names for stores in between. */
551
552 void
553 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
554 {
555 tree lhs;
556 gimple *stmt, *new_stmt;
557 gimple_stmt_iterator i;
558 gimple_seq stmts = NULL;
559
560 stmt = gsi_stmt (*si_p);
561
562 gcc_assert (is_gimple_call (stmt));
563
564 push_gimplify_context (gimple_in_ssa_p (cfun));
565
566 lhs = gimple_call_lhs (stmt);
567 if (lhs == NULL_TREE)
568 {
569 gimplify_and_add (expr, &stmts);
570 /* We can end up with folding a memcpy of an empty class assignment
571 which gets optimized away by C++ gimplification. */
572 if (gimple_seq_empty_p (stmts))
573 {
574 pop_gimplify_context (NULL);
575 if (gimple_in_ssa_p (cfun))
576 {
577 unlink_stmt_vdef (stmt);
578 release_defs (stmt);
579 }
580 gsi_replace (si_p, gimple_build_nop (), false);
581 return;
582 }
583 }
584 else
585 {
586 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
587 new_stmt = gimple_build_assign (lhs, tmp);
588 i = gsi_last (stmts);
589 gsi_insert_after_without_update (&i, new_stmt,
590 GSI_CONTINUE_LINKING);
591 }
592
593 pop_gimplify_context (NULL);
594
595 gsi_replace_with_seq_vops (si_p, stmts);
596 }
597
598
599 /* Replace the call at *GSI with the gimple value VAL. */
600
601 void
602 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
603 {
604 gimple *stmt = gsi_stmt (*gsi);
605 tree lhs = gimple_call_lhs (stmt);
606 gimple *repl;
607 if (lhs)
608 {
609 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
610 val = fold_convert (TREE_TYPE (lhs), val);
611 repl = gimple_build_assign (lhs, val);
612 }
613 else
614 repl = gimple_build_nop ();
615 tree vdef = gimple_vdef (stmt);
616 if (vdef && TREE_CODE (vdef) == SSA_NAME)
617 {
618 unlink_stmt_vdef (stmt);
619 release_ssa_name (vdef);
620 }
621 gsi_replace (gsi, repl, false);
622 }
623
624 /* Replace the call at *GSI with the new call REPL and fold that
625 again. */
626
627 static void
628 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
629 {
630 gimple *stmt = gsi_stmt (*gsi);
631 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
632 gimple_set_location (repl, gimple_location (stmt));
633 if (gimple_vdef (stmt)
634 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
635 {
636 gimple_set_vdef (repl, gimple_vdef (stmt));
637 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
638 }
639 if (gimple_vuse (stmt))
640 gimple_set_vuse (repl, gimple_vuse (stmt));
641 gsi_replace (gsi, repl, false);
642 fold_stmt (gsi);
643 }
644
645 /* Return true if VAR is a VAR_DECL or a component thereof. */
646
647 static bool
648 var_decl_component_p (tree var)
649 {
650 tree inner = var;
651 while (handled_component_p (inner))
652 inner = TREE_OPERAND (inner, 0);
653 return (DECL_P (inner)
654 || (TREE_CODE (inner) == MEM_REF
655 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
656 }
657
658 /* Return TRUE if the SIZE argument, representing the size of an
659 object, is in a range of values of which exactly zero is valid. */
660
661 static bool
662 size_must_be_zero_p (tree size)
663 {
664 if (integer_zerop (size))
665 return true;
666
667 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
668 return false;
669
670 tree type = TREE_TYPE (size);
671 int prec = TYPE_PRECISION (type);
672
673 /* Compute the value of SSIZE_MAX, the largest positive value that
674 can be stored in ssize_t, the signed counterpart of size_t. */
675 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
676 value_range valid_range (VR_RANGE,
677 build_int_cst (type, 0),
678 wide_int_to_tree (type, ssize_max));
679 value_range vr;
680 get_range_info (size, vr);
681 vr.intersect (&valid_range);
682 return vr.zero_p ();
683 }
684
685 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
686 diagnose (otherwise undefined) overlapping copies without preventing
687 folding. When folded, GCC guarantees that overlapping memcpy has
688 the same semantics as memmove. Call to the library memcpy need not
689 provide the same guarantee. Return false if no simplification can
690 be made. */
691
692 static bool
693 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
694 tree dest, tree src, int endp)
695 {
696 gimple *stmt = gsi_stmt (*gsi);
697 tree lhs = gimple_call_lhs (stmt);
698 tree len = gimple_call_arg (stmt, 2);
699 tree destvar, srcvar;
700 location_t loc = gimple_location (stmt);
701
702 bool nowarn = gimple_no_warning_p (stmt);
703
704 /* If the LEN parameter is a constant zero or in range where
705 the only valid value is zero, return DEST. */
706 if (size_must_be_zero_p (len))
707 {
708 gimple *repl;
709 if (gimple_call_lhs (stmt))
710 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
711 else
712 repl = gimple_build_nop ();
713 tree vdef = gimple_vdef (stmt);
714 if (vdef && TREE_CODE (vdef) == SSA_NAME)
715 {
716 unlink_stmt_vdef (stmt);
717 release_ssa_name (vdef);
718 }
719 gsi_replace (gsi, repl, false);
720 return true;
721 }
722
723 /* If SRC and DEST are the same (and not volatile), return
724 DEST{,+LEN,+LEN-1}. */
725 if (operand_equal_p (src, dest, 0))
726 {
727 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
728 It's safe and may even be emitted by GCC itself (see bug
729 32667). */
730 unlink_stmt_vdef (stmt);
731 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
732 release_ssa_name (gimple_vdef (stmt));
733 if (!lhs)
734 {
735 gsi_replace (gsi, gimple_build_nop (), false);
736 return true;
737 }
738 goto done;
739 }
740 else
741 {
742 tree srctype, desttype;
743 unsigned int src_align, dest_align;
744 tree off0;
745 const char *tmp_str;
746 unsigned HOST_WIDE_INT tmp_len;
747
748 /* Build accesses at offset zero with a ref-all character type. */
749 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
750 ptr_mode, true), 0);
751
752 /* If we can perform the copy efficiently with first doing all loads
753 and then all stores inline it that way. Currently efficiently
754 means that we can load all the memory into a single integer
755 register which is what MOVE_MAX gives us. */
756 src_align = get_pointer_alignment (src);
757 dest_align = get_pointer_alignment (dest);
758 if (tree_fits_uhwi_p (len)
759 && compare_tree_int (len, MOVE_MAX) <= 0
760 /* ??? Don't transform copies from strings with known length this
761 confuses the tree-ssa-strlen.c. This doesn't handle
762 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
763 reason. */
764 && !c_strlen (src, 2)
765 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
766 && memchr (tmp_str, 0, tmp_len) == NULL))
767 {
768 unsigned ilen = tree_to_uhwi (len);
769 if (pow2p_hwi (ilen))
770 {
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
777
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
789 {
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
804 {
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
807 {
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
815 }
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
828 {
829 gsi_replace (gsi, new_stmt, false);
830 return true;
831 }
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
834 }
835 }
836 }
837 }
838
839 if (endp == 3)
840 {
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
844
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
852 {
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
861 }
862
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
866 {
867 tree src_base, dest_base, fn;
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
870
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (!poly_int_tree_p (len, &maxsize))
881 maxsize = -1;
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
884 {
885 if (operand_equal_p (src_base, dest_base, 0)
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
888 return false;
889 }
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
892 {
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
902 return false;
903 }
904 else
905 return false;
906
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
923 {
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
928 {
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
938 }
939 }
940
941 return false;
942 }
943
944 if (!tree_fits_shwi_p (len))
945 return false;
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
958 srctype = TREE_TYPE (srctype);
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 desttype = TREE_TYPE (desttype);
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
966
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
983
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
989
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
995
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1000 {
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1004 src, off0);
1005 else if (!STRICT_ALIGNMENT)
1006 {
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1010 }
1011 }
1012
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1015
1016 if (srcvar == NULL_TREE)
1017 {
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1021 {
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1027 }
1028 }
1029 else if (destvar == NULL_TREE)
1030 {
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1040 }
1041 }
1042
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1047
1048 gimple *new_stmt;
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1050 {
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1055 {
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1062 }
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
1065 }
1066
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080 set_vop_and_replace:
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1087 {
1088 gsi_replace (gsi, new_stmt, false);
1089 return true;
1090 }
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1092 }
1093
1094 done:
1095 gimple_seq stmts = NULL;
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
1101 if (endp == 2 || endp == 1)
1102 {
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1106 }
1107
1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1109 gimple *repl = gimple_build_assign (lhs, dest);
1110 gsi_replace (gsi, repl, false);
1111 return true;
1112 }
1113
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1116
1117 static bool
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1119 {
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1121
1122 if (!fn)
1123 return false;
1124
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1126
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1131
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1134
1135 return true;
1136 }
1137
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1140
1141 static bool
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1143 {
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1145
1146 if (!fn)
1147 return false;
1148
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1152
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1157
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1161
1162 return true;
1163 }
1164
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1167
1168 static bool
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1170 {
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1172
1173 if (!fn)
1174 return false;
1175
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1177
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1181
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1187
1188 return true;
1189 }
1190
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1193
1194 static bool
1195 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1196 {
1197 gimple *stmt = gsi_stmt (*gsi);
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1200
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1203 {
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1206 }
1207
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1210
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1213
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1218
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1222
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1226
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1230
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1233
1234 length = tree_to_uhwi (len);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1238
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1241
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1245 {
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1248
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1254 }
1255
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1261 {
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1264 }
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1267 {
1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1269 gsi_replace (gsi, asgn, false);
1270 }
1271 else
1272 {
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1276 }
1277
1278 return true;
1279 }
1280
1281 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1282
1283 static bool
1284 get_range_strlen_tree (tree arg, tree length[2], bitmap *visited,
1285 strlen_range_kind rkind,
1286 bool *flexp, unsigned eltsize, tree *nonstr)
1287 {
1288 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1289
1290 /* The minimum and maximum length. */
1291 tree *const minlen = length;
1292 tree *const maxlen = length + 1;
1293
1294 /* The length computed by this invocation of the function. */
1295 tree val = NULL_TREE;
1296
1297 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1298 if (TREE_CODE (arg) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1300 {
1301 tree op = TREE_OPERAND (arg, 0);
1302 if (integer_zerop (TREE_OPERAND (op, 1)))
1303 {
1304 tree aop0 = TREE_OPERAND (op, 0);
1305 if (TREE_CODE (aop0) == INDIRECT_REF
1306 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1307 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1308 visited, rkind, flexp,
1309 eltsize, nonstr);
1310 }
1311 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1312 && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
1313 {
1314 /* Fail if an array is the last member of a struct object
1315 since it could be treated as a (fake) flexible array
1316 member. */
1317 tree idx = TREE_OPERAND (op, 1);
1318
1319 arg = TREE_OPERAND (op, 0);
1320 tree optype = TREE_TYPE (arg);
1321 if (tree dom = TYPE_DOMAIN (optype))
1322 if (tree bound = TYPE_MAX_VALUE (dom))
1323 if (TREE_CODE (bound) == INTEGER_CST
1324 && TREE_CODE (idx) == INTEGER_CST
1325 && tree_int_cst_lt (bound, idx))
1326 return false;
1327 }
1328 }
1329
1330 if (rkind == SRK_INT_VALUE)
1331 {
1332 /* We are computing the maximum value (not string length). */
1333 val = arg;
1334 if (TREE_CODE (val) != INTEGER_CST
1335 || tree_int_cst_sgn (val) < 0)
1336 return false;
1337 }
1338 else
1339 {
1340 c_strlen_data lendata = { };
1341 val = c_strlen (arg, 1, &lendata, eltsize);
1342
1343 /* If we potentially had a non-terminated string, then
1344 bubble that information up to the caller. */
1345 if (!val && lendata.decl)
1346 {
1347 *nonstr = lendata.decl;
1348 *minlen = lendata.minlen;
1349 *maxlen = lendata.minlen;
1350 return rkind == SRK_STRLEN ? false : true;
1351 }
1352 }
1353
1354 if (!val && (rkind == SRK_LENRANGE || rkind == SRK_LENRANGE_2))
1355 {
1356 if (TREE_CODE (arg) == ADDR_EXPR)
1357 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1358 visited, rkind, flexp,
1359 eltsize, nonstr);
1360
1361 if (TREE_CODE (arg) == ARRAY_REF)
1362 {
1363 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1364
1365 /* Determine the "innermost" array type. */
1366 while (TREE_CODE (optype) == ARRAY_TYPE
1367 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1368 optype = TREE_TYPE (optype);
1369
1370 /* Avoid arrays of pointers. */
1371 tree eltype = TREE_TYPE (optype);
1372 if (TREE_CODE (optype) != ARRAY_TYPE
1373 || !INTEGRAL_TYPE_P (eltype))
1374 return false;
1375
1376 /* Fail when the array bound is unknown or zero. */
1377 val = TYPE_SIZE_UNIT (optype);
1378 if (!val || integer_zerop (val))
1379 return false;
1380
1381 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1382 integer_one_node);
1383
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 *minlen = ssize_int (0);
1387
1388 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1389 && optype == TREE_TYPE (TREE_OPERAND (arg, 0))
1390 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1391 *flexp = true;
1392 }
1393 else if (TREE_CODE (arg) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1395 == ARRAY_TYPE))
1396 {
1397 /* Use the type of the member array to determine the upper
1398 bound on the length of the array. This may be overly
1399 optimistic if the array itself isn't NUL-terminated and
1400 the caller relies on the subsequent member to contain
1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct.
1403 Set *FLEXP to true if the array whose bound is being
1404 used is at the end of a struct. */
1405 if (array_at_struct_end_p (arg))
1406 *flexp = true;
1407
1408 tree fld = TREE_OPERAND (arg, 1);
1409
1410 tree optype = TREE_TYPE (fld);
1411
1412 /* Determine the "innermost" array type. */
1413 while (TREE_CODE (optype) == ARRAY_TYPE
1414 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1415 optype = TREE_TYPE (optype);
1416
1417 /* Fail when the array bound is unknown or zero. */
1418 val = TYPE_SIZE_UNIT (optype);
1419 if (!val || integer_zerop (val))
1420 return false;
1421 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1422 integer_one_node);
1423
1424 /* Set the minimum size to zero since the string in
1425 the array could have zero length. */
1426 *minlen = ssize_int (0);
1427 }
1428
1429 if (VAR_P (arg))
1430 {
1431 tree type = TREE_TYPE (arg);
1432 if (POINTER_TYPE_P (type))
1433 type = TREE_TYPE (type);
1434
1435 if (TREE_CODE (type) == ARRAY_TYPE)
1436 {
1437 val = TYPE_SIZE_UNIT (type);
1438 if (!val
1439 || TREE_CODE (val) != INTEGER_CST
1440 || integer_zerop (val))
1441 return false;
1442 val = wide_int_to_tree (TREE_TYPE (val),
1443 wi::sub (wi::to_wide (val), 1));
1444
1445 /* Set the minimum size to zero since the string in
1446 the array could have zero length. */
1447 *minlen = ssize_int (0);
1448 }
1449 }
1450 }
1451
1452 if (!val)
1453 return false;
1454
1455 /* Adjust the lower bound on the string length as necessary. */
1456 if (!*minlen
1457 || (rkind != SRK_STRLEN
1458 && TREE_CODE (*minlen) == INTEGER_CST
1459 && TREE_CODE (val) == INTEGER_CST
1460 && tree_int_cst_lt (val, *minlen)))
1461 *minlen = val;
1462
1463 if (*maxlen)
1464 {
1465 /* Adjust the more conservative bound if possible/necessary
1466 and fail otherwise. */
1467 if (rkind != SRK_STRLEN)
1468 {
1469 if (TREE_CODE (*maxlen) != INTEGER_CST
1470 || TREE_CODE (val) != INTEGER_CST)
1471 return false;
1472
1473 if (tree_int_cst_lt (*maxlen, val))
1474 *maxlen = val;
1475 return true;
1476 }
1477 else if (simple_cst_equal (val, *maxlen) != 1)
1478 {
1479 /* Fail if the length of this ARG is different from that
1480 previously determined from another ARG. */
1481 return false;
1482 }
1483 }
1484
1485 *maxlen = val;
1486 return true;
1487 }
1488
1489 /* Obtain the minimum and maximum string length or minimum and maximum
1490 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1491 If ARG is an SSA name variable, follow its use-def chains. When
1492 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1493 if we are unable to determine the length or value, return false.
1494 VISITED is a bitmap of visited variables.
1495 RKIND determines the kind of value or range to obtain (see
1496 strlen_range_kind).
1497 Set PDATA->DECL if ARG refers to an unterminated constant array.
1498 On input, set ELTSIZE to 1 for normal single byte character strings,
1499 and either 2 or 4 for wide characer strings (the size of wchar_t).
1500 Return true if *PDATA was successfully populated and false otherwise. */
1501
1502 static bool
1503 get_range_strlen (tree arg, tree length[2], bitmap *visited,
1504 strlen_range_kind rkind,
1505 bool *flexp, unsigned eltsize, tree *nonstr)
1506 {
1507
1508 if (TREE_CODE (arg) != SSA_NAME)
1509 return get_range_strlen_tree (arg, length, visited, rkind, flexp,
1510 eltsize, nonstr);
1511
1512 /* If ARG is registered for SSA update we cannot look at its defining
1513 statement. */
1514 if (name_registered_for_update_p (arg))
1515 return false;
1516
1517 /* If we were already here, break the infinite cycle. */
1518 if (!*visited)
1519 *visited = BITMAP_ALLOC (NULL);
1520 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1521 return true;
1522
1523 tree var = arg;
1524 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1525
1526 /* The minimum and maximum length. */
1527 tree *const maxlen = length + 1;
1528
1529 switch (gimple_code (def_stmt))
1530 {
1531 case GIMPLE_ASSIGN:
1532 /* The RHS of the statement defining VAR must either have a
1533 constant length or come from another SSA_NAME with a constant
1534 length. */
1535 if (gimple_assign_single_p (def_stmt)
1536 || gimple_assign_unary_nop_p (def_stmt))
1537 {
1538 tree rhs = gimple_assign_rhs1 (def_stmt);
1539 return get_range_strlen (rhs, length, visited, rkind, flexp,
1540 eltsize, nonstr);
1541 }
1542 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1543 {
1544 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1545 gimple_assign_rhs3 (def_stmt) };
1546
1547 for (unsigned int i = 0; i < 2; i++)
1548 if (!get_range_strlen (ops[i], length, visited, rkind,
1549 flexp, eltsize, nonstr))
1550 {
1551 if (rkind != SRK_LENRANGE_2)
1552 return false;
1553 /* Set the upper bound to the maximum to prevent
1554 it from being adjusted in the next iteration but
1555 leave MINLEN and the more conservative MAXBOUND
1556 determined so far alone (or leave them null if
1557 they haven't been set yet). That the MINLEN is
1558 in fact zero can be determined from MAXLEN being
1559 unbounded but the discovered minimum is used for
1560 diagnostics. */
1561 *maxlen = build_all_ones_cst (size_type_node);
1562 }
1563 return true;
1564 }
1565 return false;
1566
1567 case GIMPLE_PHI:
1568 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1569 must have a constant length. */
1570 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1571 {
1572 tree arg = gimple_phi_arg (def_stmt, i)->def;
1573
1574 /* If this PHI has itself as an argument, we cannot
1575 determine the string length of this argument. However,
1576 if we can find a constant string length for the other
1577 PHI args then we can still be sure that this is a
1578 constant string length. So be optimistic and just
1579 continue with the next argument. */
1580 if (arg == gimple_phi_result (def_stmt))
1581 continue;
1582
1583 if (!get_range_strlen (arg, length, visited, rkind, flexp,
1584 eltsize, nonstr))
1585 {
1586 if (rkind != SRK_LENRANGE_2)
1587 return false;
1588 /* Set the upper bound to the maximum to prevent
1589 it from being adjusted in the next iteration but
1590 leave MINLEN and the more conservative MAXBOUND
1591 determined so far alone (or leave them null if
1592 they haven't been set yet). That the MINLEN is
1593 in fact zero can be determined from MAXLEN being
1594 unbounded but the discovered minimum is used for
1595 diagnostics. */
1596 *maxlen = build_all_ones_cst (size_type_node);
1597 }
1598 }
1599 return true;
1600
1601 default:
1602 return false;
1603 }
1604 }
1605 /* Determine the minimum and maximum value or string length that ARG
1606 refers to and store each in the first two elements of MINMAXLEN.
1607 For expressions that point to strings of unknown lengths that are
1608 character arrays, use the upper bound of the array as the maximum
1609 length. For example, given an expression like 'x ? array : "xyz"'
1610 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1611 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1612 stored in array.
1613 Return true if the range of the string lengths has been obtained
1614 from the upper bound of an array at the end of a struct. Such
1615 an array may hold a string that's longer than its upper bound
1616 due to it being used as a poor-man's flexible array member.
1617
1618 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1619 and false if PHIs and COND_EXPRs are to be handled optimistically,
1620 if we can determine string length minimum and maximum; it will use
1621 the minimum from the ones where it can be determined.
1622 STRICT false should be only used for warning code.
1623 When non-null, clear *NONSTR if ARG refers to a constant array
1624 that is known not be nul-terminated. Otherwise set it to
1625 the declaration of the constant non-terminated array.
1626
1627 ELTSIZE is 1 for normal single byte character strings, and 2 or
1628 4 for wide characer strings. ELTSIZE is by default 1. */
1629
1630 bool
1631 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1632 bool strict, tree *nonstr /* = NULL */)
1633 {
1634 bitmap visited = NULL;
1635
1636 minmaxlen[0] = NULL_TREE;
1637 minmaxlen[1] = NULL_TREE;
1638
1639 tree nonstrbuf;
1640 if (!nonstr)
1641 nonstr = &nonstrbuf;
1642 *nonstr = NULL_TREE;
1643
1644 bool flexarray = false;
1645 if (!get_range_strlen (arg, minmaxlen, &visited,
1646 strict ? SRK_LENRANGE : SRK_LENRANGE_2,
1647 &flexarray, eltsize, nonstr))
1648 {
1649 minmaxlen[0] = NULL_TREE;
1650 minmaxlen[1] = NULL_TREE;
1651 }
1652
1653 if (visited)
1654 BITMAP_FREE (visited);
1655
1656 return flexarray;
1657 }
1658
1659 /* Return the maximum string length for ARG, counting by TYPE
1660 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1661 if the caller is prepared to handle unterminated strings.
1662
1663 If an unterminated string is discovered and our caller handles
1664 unterminated strings, then bubble up the offending DECL and
1665 return the maximum size. Otherwise return NULL. */
1666
1667 static tree
1668 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1669 {
1670 /* A non-null NONSTR is meaningless when determining the maximum
1671 value of an integer ARG. */
1672 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1673 /* ARG must have an integral type when RKIND says so. */
1674 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1675
1676 bitmap visited = NULL;
1677 tree len[2] = { NULL_TREE, NULL_TREE };
1678
1679 bool dummy;
1680 /* Set to non-null if ARG refers to an untermianted array. */
1681 tree mynonstr = NULL_TREE;
1682 if (!get_range_strlen (arg, len, &visited, rkind, &dummy, 1, &mynonstr))
1683 len[1] = NULL_TREE;
1684 if (visited)
1685 BITMAP_FREE (visited);
1686
1687 if (nonstr)
1688 {
1689 /* For callers prepared to handle unterminated arrays set
1690 *NONSTR to point to the declaration of the array and return
1691 the maximum length/size. */
1692 *nonstr = mynonstr;
1693 return len[1];
1694 }
1695
1696 /* Fail if the constant array isn't nul-terminated. */
1697 return mynonstr ? NULL_TREE : len[1];
1698 }
1699
1700
1701 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1702 If LEN is not NULL, it represents the length of the string to be
1703 copied. Return NULL_TREE if no simplification can be made. */
1704
1705 static bool
1706 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1707 tree dest, tree src)
1708 {
1709 gimple *stmt = gsi_stmt (*gsi);
1710 location_t loc = gimple_location (stmt);
1711 tree fn;
1712
1713 /* If SRC and DEST are the same (and not volatile), return DEST. */
1714 if (operand_equal_p (src, dest, 0))
1715 {
1716 /* Issue -Wrestrict unless the pointers are null (those do
1717 not point to objects and so do not indicate an overlap;
1718 such calls could be the result of sanitization and jump
1719 threading). */
1720 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1721 {
1722 tree func = gimple_call_fndecl (stmt);
1723
1724 warning_at (loc, OPT_Wrestrict,
1725 "%qD source argument is the same as destination",
1726 func);
1727 }
1728
1729 replace_call_with_value (gsi, dest);
1730 return true;
1731 }
1732
1733 if (optimize_function_for_size_p (cfun))
1734 return false;
1735
1736 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1737 if (!fn)
1738 return false;
1739
1740 /* Set to non-null if ARG refers to an unterminated array. */
1741 tree nonstr = NULL;
1742 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1743
1744 if (nonstr)
1745 {
1746 /* Avoid folding calls with unterminated arrays. */
1747 if (!gimple_no_warning_p (stmt))
1748 warn_string_no_nul (loc, "strcpy", src, nonstr);
1749 gimple_set_no_warning (stmt, true);
1750 return false;
1751 }
1752
1753 if (!len)
1754 return false;
1755
1756 len = fold_convert_loc (loc, size_type_node, len);
1757 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1758 len = force_gimple_operand_gsi (gsi, len, true,
1759 NULL_TREE, true, GSI_SAME_STMT);
1760 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1761 replace_call_with_call_and_fold (gsi, repl);
1762 return true;
1763 }
1764
1765 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1766 If SLEN is not NULL, it represents the length of the source string.
1767 Return NULL_TREE if no simplification can be made. */
1768
1769 static bool
1770 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1771 tree dest, tree src, tree len)
1772 {
1773 gimple *stmt = gsi_stmt (*gsi);
1774 location_t loc = gimple_location (stmt);
1775 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1776
1777 /* If the LEN parameter is zero, return DEST. */
1778 if (integer_zerop (len))
1779 {
1780 /* Avoid warning if the destination refers to a an array/pointer
1781 decorate with attribute nonstring. */
1782 if (!nonstring)
1783 {
1784 tree fndecl = gimple_call_fndecl (stmt);
1785
1786 /* Warn about the lack of nul termination: the result is not
1787 a (nul-terminated) string. */
1788 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1789 if (slen && !integer_zerop (slen))
1790 warning_at (loc, OPT_Wstringop_truncation,
1791 "%G%qD destination unchanged after copying no bytes "
1792 "from a string of length %E",
1793 stmt, fndecl, slen);
1794 else
1795 warning_at (loc, OPT_Wstringop_truncation,
1796 "%G%qD destination unchanged after copying no bytes",
1797 stmt, fndecl);
1798 }
1799
1800 replace_call_with_value (gsi, dest);
1801 return true;
1802 }
1803
1804 /* We can't compare slen with len as constants below if len is not a
1805 constant. */
1806 if (TREE_CODE (len) != INTEGER_CST)
1807 return false;
1808
1809 /* Now, we must be passed a constant src ptr parameter. */
1810 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1811 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1812 return false;
1813
1814 /* The size of the source string including the terminating nul. */
1815 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1816
1817 /* We do not support simplification of this case, though we do
1818 support it when expanding trees into RTL. */
1819 /* FIXME: generate a call to __builtin_memset. */
1820 if (tree_int_cst_lt (ssize, len))
1821 return false;
1822
1823 /* Diagnose truncation that leaves the copy unterminated. */
1824 maybe_diag_stxncpy_trunc (*gsi, src, len);
1825
1826 /* OK transform into builtin memcpy. */
1827 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1828 if (!fn)
1829 return false;
1830
1831 len = fold_convert_loc (loc, size_type_node, len);
1832 len = force_gimple_operand_gsi (gsi, len, true,
1833 NULL_TREE, true, GSI_SAME_STMT);
1834 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1835 replace_call_with_call_and_fold (gsi, repl);
1836
1837 return true;
1838 }
1839
1840 /* Fold function call to builtin strchr or strrchr.
1841 If both arguments are constant, evaluate and fold the result,
1842 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1843 In general strlen is significantly faster than strchr
1844 due to being a simpler operation. */
1845 static bool
1846 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1847 {
1848 gimple *stmt = gsi_stmt (*gsi);
1849 tree str = gimple_call_arg (stmt, 0);
1850 tree c = gimple_call_arg (stmt, 1);
1851 location_t loc = gimple_location (stmt);
1852 const char *p;
1853 char ch;
1854
1855 if (!gimple_call_lhs (stmt))
1856 return false;
1857
1858 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1859 {
1860 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1861
1862 if (p1 == NULL)
1863 {
1864 replace_call_with_value (gsi, integer_zero_node);
1865 return true;
1866 }
1867
1868 tree len = build_int_cst (size_type_node, p1 - p);
1869 gimple_seq stmts = NULL;
1870 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1871 POINTER_PLUS_EXPR, str, len);
1872 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1873 gsi_replace_with_seq_vops (gsi, stmts);
1874 return true;
1875 }
1876
1877 if (!integer_zerop (c))
1878 return false;
1879
1880 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1881 if (is_strrchr && optimize_function_for_size_p (cfun))
1882 {
1883 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1884
1885 if (strchr_fn)
1886 {
1887 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1888 replace_call_with_call_and_fold (gsi, repl);
1889 return true;
1890 }
1891
1892 return false;
1893 }
1894
1895 tree len;
1896 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1897
1898 if (!strlen_fn)
1899 return false;
1900
1901 /* Create newstr = strlen (str). */
1902 gimple_seq stmts = NULL;
1903 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1904 gimple_set_location (new_stmt, loc);
1905 len = create_tmp_reg_or_ssa_name (size_type_node);
1906 gimple_call_set_lhs (new_stmt, len);
1907 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1908
1909 /* Create (str p+ strlen (str)). */
1910 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1911 POINTER_PLUS_EXPR, str, len);
1912 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1913 gsi_replace_with_seq_vops (gsi, stmts);
1914 /* gsi now points at the assignment to the lhs, get a
1915 stmt iterator to the strlen.
1916 ??? We can't use gsi_for_stmt as that doesn't work when the
1917 CFG isn't built yet. */
1918 gimple_stmt_iterator gsi2 = *gsi;
1919 gsi_prev (&gsi2);
1920 fold_stmt (&gsi2);
1921 return true;
1922 }
1923
1924 /* Fold function call to builtin strstr.
1925 If both arguments are constant, evaluate and fold the result,
1926 additionally fold strstr (x, "") into x and strstr (x, "c")
1927 into strchr (x, 'c'). */
1928 static bool
1929 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1930 {
1931 gimple *stmt = gsi_stmt (*gsi);
1932 tree haystack = gimple_call_arg (stmt, 0);
1933 tree needle = gimple_call_arg (stmt, 1);
1934 const char *p, *q;
1935
1936 if (!gimple_call_lhs (stmt))
1937 return false;
1938
1939 q = c_getstr (needle);
1940 if (q == NULL)
1941 return false;
1942
1943 if ((p = c_getstr (haystack)))
1944 {
1945 const char *r = strstr (p, q);
1946
1947 if (r == NULL)
1948 {
1949 replace_call_with_value (gsi, integer_zero_node);
1950 return true;
1951 }
1952
1953 tree len = build_int_cst (size_type_node, r - p);
1954 gimple_seq stmts = NULL;
1955 gimple *new_stmt
1956 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1957 haystack, len);
1958 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1959 gsi_replace_with_seq_vops (gsi, stmts);
1960 return true;
1961 }
1962
1963 /* For strstr (x, "") return x. */
1964 if (q[0] == '\0')
1965 {
1966 replace_call_with_value (gsi, haystack);
1967 return true;
1968 }
1969
1970 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1971 if (q[1] == '\0')
1972 {
1973 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1974 if (strchr_fn)
1975 {
1976 tree c = build_int_cst (integer_type_node, q[0]);
1977 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1978 replace_call_with_call_and_fold (gsi, repl);
1979 return true;
1980 }
1981 }
1982
1983 return false;
1984 }
1985
1986 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1987 to the call.
1988
1989 Return NULL_TREE if no simplification was possible, otherwise return the
1990 simplified form of the call as a tree.
1991
1992 The simplified form may be a constant or other expression which
1993 computes the same value, but in a more efficient manner (including
1994 calls to other builtin functions).
1995
1996 The call may contain arguments which need to be evaluated, but
1997 which are not useful to determine the result of the call. In
1998 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1999 COMPOUND_EXPR will be an argument which must be evaluated.
2000 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2001 COMPOUND_EXPR in the chain will contain the tree for the simplified
2002 form of the builtin function call. */
2003
2004 static bool
2005 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2006 {
2007 gimple *stmt = gsi_stmt (*gsi);
2008 location_t loc = gimple_location (stmt);
2009
2010 const char *p = c_getstr (src);
2011
2012 /* If the string length is zero, return the dst parameter. */
2013 if (p && *p == '\0')
2014 {
2015 replace_call_with_value (gsi, dst);
2016 return true;
2017 }
2018
2019 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2020 return false;
2021
2022 /* See if we can store by pieces into (dst + strlen(dst)). */
2023 tree newdst;
2024 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2025 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2026
2027 if (!strlen_fn || !memcpy_fn)
2028 return false;
2029
2030 /* If the length of the source string isn't computable don't
2031 split strcat into strlen and memcpy. */
2032 tree len = get_maxval_strlen (src, SRK_STRLEN);
2033 if (! len)
2034 return false;
2035
2036 /* Create strlen (dst). */
2037 gimple_seq stmts = NULL, stmts2;
2038 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2039 gimple_set_location (repl, loc);
2040 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2041 gimple_call_set_lhs (repl, newdst);
2042 gimple_seq_add_stmt_without_update (&stmts, repl);
2043
2044 /* Create (dst p+ strlen (dst)). */
2045 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2046 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2047 gimple_seq_add_seq_without_update (&stmts, stmts2);
2048
2049 len = fold_convert_loc (loc, size_type_node, len);
2050 len = size_binop_loc (loc, PLUS_EXPR, len,
2051 build_int_cst (size_type_node, 1));
2052 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2053 gimple_seq_add_seq_without_update (&stmts, stmts2);
2054
2055 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2056 gimple_seq_add_stmt_without_update (&stmts, repl);
2057 if (gimple_call_lhs (stmt))
2058 {
2059 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2060 gimple_seq_add_stmt_without_update (&stmts, repl);
2061 gsi_replace_with_seq_vops (gsi, stmts);
2062 /* gsi now points at the assignment to the lhs, get a
2063 stmt iterator to the memcpy call.
2064 ??? We can't use gsi_for_stmt as that doesn't work when the
2065 CFG isn't built yet. */
2066 gimple_stmt_iterator gsi2 = *gsi;
2067 gsi_prev (&gsi2);
2068 fold_stmt (&gsi2);
2069 }
2070 else
2071 {
2072 gsi_replace_with_seq_vops (gsi, stmts);
2073 fold_stmt (gsi);
2074 }
2075 return true;
2076 }
2077
2078 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2079 are the arguments to the call. */
2080
2081 static bool
2082 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2083 {
2084 gimple *stmt = gsi_stmt (*gsi);
2085 tree dest = gimple_call_arg (stmt, 0);
2086 tree src = gimple_call_arg (stmt, 1);
2087 tree size = gimple_call_arg (stmt, 2);
2088 tree fn;
2089 const char *p;
2090
2091
2092 p = c_getstr (src);
2093 /* If the SRC parameter is "", return DEST. */
2094 if (p && *p == '\0')
2095 {
2096 replace_call_with_value (gsi, dest);
2097 return true;
2098 }
2099
2100 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2101 return false;
2102
2103 /* If __builtin_strcat_chk is used, assume strcat is available. */
2104 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2105 if (!fn)
2106 return false;
2107
2108 gimple *repl = gimple_build_call (fn, 2, dest, src);
2109 replace_call_with_call_and_fold (gsi, repl);
2110 return true;
2111 }
2112
2113 /* Simplify a call to the strncat builtin. */
2114
2115 static bool
2116 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2117 {
2118 gimple *stmt = gsi_stmt (*gsi);
2119 tree dst = gimple_call_arg (stmt, 0);
2120 tree src = gimple_call_arg (stmt, 1);
2121 tree len = gimple_call_arg (stmt, 2);
2122
2123 const char *p = c_getstr (src);
2124
2125 /* If the requested length is zero, or the src parameter string
2126 length is zero, return the dst parameter. */
2127 if (integer_zerop (len) || (p && *p == '\0'))
2128 {
2129 replace_call_with_value (gsi, dst);
2130 return true;
2131 }
2132
2133 if (TREE_CODE (len) != INTEGER_CST || !p)
2134 return false;
2135
2136 unsigned srclen = strlen (p);
2137
2138 int cmpsrc = compare_tree_int (len, srclen);
2139
2140 /* Return early if the requested len is less than the string length.
2141 Warnings will be issued elsewhere later. */
2142 if (cmpsrc < 0)
2143 return false;
2144
2145 unsigned HOST_WIDE_INT dstsize;
2146
2147 bool nowarn = gimple_no_warning_p (stmt);
2148
2149 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2150 {
2151 int cmpdst = compare_tree_int (len, dstsize);
2152
2153 if (cmpdst >= 0)
2154 {
2155 tree fndecl = gimple_call_fndecl (stmt);
2156
2157 /* Strncat copies (at most) LEN bytes and always appends
2158 the terminating NUL so the specified bound should never
2159 be equal to (or greater than) the size of the destination.
2160 If it is, the copy could overflow. */
2161 location_t loc = gimple_location (stmt);
2162 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2163 cmpdst == 0
2164 ? G_("%G%qD specified bound %E equals "
2165 "destination size")
2166 : G_("%G%qD specified bound %E exceeds "
2167 "destination size %wu"),
2168 stmt, fndecl, len, dstsize);
2169 if (nowarn)
2170 gimple_set_no_warning (stmt, true);
2171 }
2172 }
2173
2174 if (!nowarn && cmpsrc == 0)
2175 {
2176 tree fndecl = gimple_call_fndecl (stmt);
2177 location_t loc = gimple_location (stmt);
2178
2179 /* To avoid possible overflow the specified bound should also
2180 not be equal to the length of the source, even when the size
2181 of the destination is unknown (it's not an uncommon mistake
2182 to specify as the bound to strncpy the length of the source). */
2183 if (warning_at (loc, OPT_Wstringop_overflow_,
2184 "%G%qD specified bound %E equals source length",
2185 stmt, fndecl, len))
2186 gimple_set_no_warning (stmt, true);
2187 }
2188
2189 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2190
2191 /* If the replacement _DECL isn't initialized, don't do the
2192 transformation. */
2193 if (!fn)
2194 return false;
2195
2196 /* Otherwise, emit a call to strcat. */
2197 gcall *repl = gimple_build_call (fn, 2, dst, src);
2198 replace_call_with_call_and_fold (gsi, repl);
2199 return true;
2200 }
2201
2202 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2203 LEN, and SIZE. */
2204
2205 static bool
2206 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2207 {
2208 gimple *stmt = gsi_stmt (*gsi);
2209 tree dest = gimple_call_arg (stmt, 0);
2210 tree src = gimple_call_arg (stmt, 1);
2211 tree len = gimple_call_arg (stmt, 2);
2212 tree size = gimple_call_arg (stmt, 3);
2213 tree fn;
2214 const char *p;
2215
2216 p = c_getstr (src);
2217 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2218 if ((p && *p == '\0')
2219 || integer_zerop (len))
2220 {
2221 replace_call_with_value (gsi, dest);
2222 return true;
2223 }
2224
2225 if (! tree_fits_uhwi_p (size))
2226 return false;
2227
2228 if (! integer_all_onesp (size))
2229 {
2230 tree src_len = c_strlen (src, 1);
2231 if (src_len
2232 && tree_fits_uhwi_p (src_len)
2233 && tree_fits_uhwi_p (len)
2234 && ! tree_int_cst_lt (len, src_len))
2235 {
2236 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2237 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2238 if (!fn)
2239 return false;
2240
2241 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2242 replace_call_with_call_and_fold (gsi, repl);
2243 return true;
2244 }
2245 return false;
2246 }
2247
2248 /* If __builtin_strncat_chk is used, assume strncat is available. */
2249 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2250 if (!fn)
2251 return false;
2252
2253 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2254 replace_call_with_call_and_fold (gsi, repl);
2255 return true;
2256 }
2257
2258 /* Build and append gimple statements to STMTS that would load a first
2259 character of a memory location identified by STR. LOC is location
2260 of the statement. */
2261
2262 static tree
2263 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2264 {
2265 tree var;
2266
2267 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2268 tree cst_uchar_ptr_node
2269 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2270 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2271
2272 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2273 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2274 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2275
2276 gimple_assign_set_lhs (stmt, var);
2277 gimple_seq_add_stmt_without_update (stmts, stmt);
2278
2279 return var;
2280 }
2281
2282 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2283 FCODE is the name of the builtin. */
2284
2285 static bool
2286 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2287 {
2288 gimple *stmt = gsi_stmt (*gsi);
2289 tree callee = gimple_call_fndecl (stmt);
2290 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2291
2292 tree type = integer_type_node;
2293 tree str1 = gimple_call_arg (stmt, 0);
2294 tree str2 = gimple_call_arg (stmt, 1);
2295 tree lhs = gimple_call_lhs (stmt);
2296 HOST_WIDE_INT length = -1;
2297
2298 /* Handle strncmp and strncasecmp functions. */
2299 if (gimple_call_num_args (stmt) == 3)
2300 {
2301 tree len = gimple_call_arg (stmt, 2);
2302 if (tree_fits_uhwi_p (len))
2303 length = tree_to_uhwi (len);
2304 }
2305
2306 /* If the LEN parameter is zero, return zero. */
2307 if (length == 0)
2308 {
2309 replace_call_with_value (gsi, integer_zero_node);
2310 return true;
2311 }
2312
2313 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2314 if (operand_equal_p (str1, str2, 0))
2315 {
2316 replace_call_with_value (gsi, integer_zero_node);
2317 return true;
2318 }
2319
2320 const char *p1 = c_getstr (str1);
2321 const char *p2 = c_getstr (str2);
2322
2323 /* For known strings, return an immediate value. */
2324 if (p1 && p2)
2325 {
2326 int r = 0;
2327 bool known_result = false;
2328
2329 switch (fcode)
2330 {
2331 case BUILT_IN_STRCMP:
2332 case BUILT_IN_STRCMP_EQ:
2333 {
2334 r = strcmp (p1, p2);
2335 known_result = true;
2336 break;
2337 }
2338 case BUILT_IN_STRNCMP:
2339 case BUILT_IN_STRNCMP_EQ:
2340 {
2341 if (length == -1)
2342 break;
2343 r = strncmp (p1, p2, length);
2344 known_result = true;
2345 break;
2346 }
2347 /* Only handleable situation is where the string are equal (result 0),
2348 which is already handled by operand_equal_p case. */
2349 case BUILT_IN_STRCASECMP:
2350 break;
2351 case BUILT_IN_STRNCASECMP:
2352 {
2353 if (length == -1)
2354 break;
2355 r = strncmp (p1, p2, length);
2356 if (r == 0)
2357 known_result = true;
2358 break;
2359 }
2360 default:
2361 gcc_unreachable ();
2362 }
2363
2364 if (known_result)
2365 {
2366 replace_call_with_value (gsi, build_cmp_result (type, r));
2367 return true;
2368 }
2369 }
2370
2371 bool nonzero_length = length >= 1
2372 || fcode == BUILT_IN_STRCMP
2373 || fcode == BUILT_IN_STRCMP_EQ
2374 || fcode == BUILT_IN_STRCASECMP;
2375
2376 location_t loc = gimple_location (stmt);
2377
2378 /* If the second arg is "", return *(const unsigned char*)arg1. */
2379 if (p2 && *p2 == '\0' && nonzero_length)
2380 {
2381 gimple_seq stmts = NULL;
2382 tree var = gimple_load_first_char (loc, str1, &stmts);
2383 if (lhs)
2384 {
2385 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2386 gimple_seq_add_stmt_without_update (&stmts, stmt);
2387 }
2388
2389 gsi_replace_with_seq_vops (gsi, stmts);
2390 return true;
2391 }
2392
2393 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2394 if (p1 && *p1 == '\0' && nonzero_length)
2395 {
2396 gimple_seq stmts = NULL;
2397 tree var = gimple_load_first_char (loc, str2, &stmts);
2398
2399 if (lhs)
2400 {
2401 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2402 stmt = gimple_build_assign (c, NOP_EXPR, var);
2403 gimple_seq_add_stmt_without_update (&stmts, stmt);
2404
2405 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2406 gimple_seq_add_stmt_without_update (&stmts, stmt);
2407 }
2408
2409 gsi_replace_with_seq_vops (gsi, stmts);
2410 return true;
2411 }
2412
2413 /* If len parameter is one, return an expression corresponding to
2414 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2415 if (fcode == BUILT_IN_STRNCMP && length == 1)
2416 {
2417 gimple_seq stmts = NULL;
2418 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2419 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2420
2421 if (lhs)
2422 {
2423 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2424 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2425 gimple_seq_add_stmt_without_update (&stmts, convert1);
2426
2427 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2428 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2429 gimple_seq_add_stmt_without_update (&stmts, convert2);
2430
2431 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2432 gimple_seq_add_stmt_without_update (&stmts, stmt);
2433 }
2434
2435 gsi_replace_with_seq_vops (gsi, stmts);
2436 return true;
2437 }
2438
2439 /* If length is larger than the length of one constant string,
2440 replace strncmp with corresponding strcmp */
2441 if (fcode == BUILT_IN_STRNCMP
2442 && length > 0
2443 && ((p2 && (size_t) length > strlen (p2))
2444 || (p1 && (size_t) length > strlen (p1))))
2445 {
2446 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2447 if (!fn)
2448 return false;
2449 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2450 replace_call_with_call_and_fold (gsi, repl);
2451 return true;
2452 }
2453
2454 return false;
2455 }
2456
2457 /* Fold a call to the memchr pointed by GSI iterator. */
2458
2459 static bool
2460 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2461 {
2462 gimple *stmt = gsi_stmt (*gsi);
2463 tree lhs = gimple_call_lhs (stmt);
2464 tree arg1 = gimple_call_arg (stmt, 0);
2465 tree arg2 = gimple_call_arg (stmt, 1);
2466 tree len = gimple_call_arg (stmt, 2);
2467
2468 /* If the LEN parameter is zero, return zero. */
2469 if (integer_zerop (len))
2470 {
2471 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2472 return true;
2473 }
2474
2475 char c;
2476 if (TREE_CODE (arg2) != INTEGER_CST
2477 || !tree_fits_uhwi_p (len)
2478 || !target_char_cst_p (arg2, &c))
2479 return false;
2480
2481 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2482 unsigned HOST_WIDE_INT string_length;
2483 const char *p1 = c_getstr (arg1, &string_length);
2484
2485 if (p1)
2486 {
2487 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2488 if (r == NULL)
2489 {
2490 if (length <= string_length)
2491 {
2492 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2493 return true;
2494 }
2495 }
2496 else
2497 {
2498 unsigned HOST_WIDE_INT offset = r - p1;
2499 gimple_seq stmts = NULL;
2500 if (lhs != NULL_TREE)
2501 {
2502 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2503 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2504 arg1, offset_cst);
2505 gimple_seq_add_stmt_without_update (&stmts, stmt);
2506 }
2507 else
2508 gimple_seq_add_stmt_without_update (&stmts,
2509 gimple_build_nop ());
2510
2511 gsi_replace_with_seq_vops (gsi, stmts);
2512 return true;
2513 }
2514 }
2515
2516 return false;
2517 }
2518
2519 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2520 to the call. IGNORE is true if the value returned
2521 by the builtin will be ignored. UNLOCKED is true is true if this
2522 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2523 the known length of the string. Return NULL_TREE if no simplification
2524 was possible. */
2525
2526 static bool
2527 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2528 tree arg0, tree arg1,
2529 bool unlocked)
2530 {
2531 gimple *stmt = gsi_stmt (*gsi);
2532
2533 /* If we're using an unlocked function, assume the other unlocked
2534 functions exist explicitly. */
2535 tree const fn_fputc = (unlocked
2536 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2537 : builtin_decl_implicit (BUILT_IN_FPUTC));
2538 tree const fn_fwrite = (unlocked
2539 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2540 : builtin_decl_implicit (BUILT_IN_FWRITE));
2541
2542 /* If the return value is used, don't do the transformation. */
2543 if (gimple_call_lhs (stmt))
2544 return false;
2545
2546 /* Get the length of the string passed to fputs. If the length
2547 can't be determined, punt. */
2548 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2549 if (!len
2550 || TREE_CODE (len) != INTEGER_CST)
2551 return false;
2552
2553 switch (compare_tree_int (len, 1))
2554 {
2555 case -1: /* length is 0, delete the call entirely . */
2556 replace_call_with_value (gsi, integer_zero_node);
2557 return true;
2558
2559 case 0: /* length is 1, call fputc. */
2560 {
2561 const char *p = c_getstr (arg0);
2562 if (p != NULL)
2563 {
2564 if (!fn_fputc)
2565 return false;
2566
2567 gimple *repl = gimple_build_call (fn_fputc, 2,
2568 build_int_cst
2569 (integer_type_node, p[0]), arg1);
2570 replace_call_with_call_and_fold (gsi, repl);
2571 return true;
2572 }
2573 }
2574 /* FALLTHROUGH */
2575 case 1: /* length is greater than 1, call fwrite. */
2576 {
2577 /* If optimizing for size keep fputs. */
2578 if (optimize_function_for_size_p (cfun))
2579 return false;
2580 /* New argument list transforming fputs(string, stream) to
2581 fwrite(string, 1, len, stream). */
2582 if (!fn_fwrite)
2583 return false;
2584
2585 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2586 size_one_node, len, arg1);
2587 replace_call_with_call_and_fold (gsi, repl);
2588 return true;
2589 }
2590 default:
2591 gcc_unreachable ();
2592 }
2593 return false;
2594 }
2595
2596 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2597 DEST, SRC, LEN, and SIZE are the arguments to the call.
2598 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2599 code of the builtin. If MAXLEN is not NULL, it is maximum length
2600 passed as third argument. */
2601
2602 static bool
2603 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2604 tree dest, tree src, tree len, tree size,
2605 enum built_in_function fcode)
2606 {
2607 gimple *stmt = gsi_stmt (*gsi);
2608 location_t loc = gimple_location (stmt);
2609 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2610 tree fn;
2611
2612 /* If SRC and DEST are the same (and not volatile), return DEST
2613 (resp. DEST+LEN for __mempcpy_chk). */
2614 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2615 {
2616 if (fcode != BUILT_IN_MEMPCPY_CHK)
2617 {
2618 replace_call_with_value (gsi, dest);
2619 return true;
2620 }
2621 else
2622 {
2623 gimple_seq stmts = NULL;
2624 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2625 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2626 TREE_TYPE (dest), dest, len);
2627 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2628 replace_call_with_value (gsi, temp);
2629 return true;
2630 }
2631 }
2632
2633 if (! tree_fits_uhwi_p (size))
2634 return false;
2635
2636 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2637 if (! integer_all_onesp (size))
2638 {
2639 if (! tree_fits_uhwi_p (len))
2640 {
2641 /* If LEN is not constant, try MAXLEN too.
2642 For MAXLEN only allow optimizing into non-_ocs function
2643 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2644 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2645 {
2646 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2647 {
2648 /* (void) __mempcpy_chk () can be optimized into
2649 (void) __memcpy_chk (). */
2650 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2651 if (!fn)
2652 return false;
2653
2654 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2655 replace_call_with_call_and_fold (gsi, repl);
2656 return true;
2657 }
2658 return false;
2659 }
2660 }
2661 else
2662 maxlen = len;
2663
2664 if (tree_int_cst_lt (size, maxlen))
2665 return false;
2666 }
2667
2668 fn = NULL_TREE;
2669 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2670 mem{cpy,pcpy,move,set} is available. */
2671 switch (fcode)
2672 {
2673 case BUILT_IN_MEMCPY_CHK:
2674 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2675 break;
2676 case BUILT_IN_MEMPCPY_CHK:
2677 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2678 break;
2679 case BUILT_IN_MEMMOVE_CHK:
2680 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2681 break;
2682 case BUILT_IN_MEMSET_CHK:
2683 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2684 break;
2685 default:
2686 break;
2687 }
2688
2689 if (!fn)
2690 return false;
2691
2692 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2693 replace_call_with_call_and_fold (gsi, repl);
2694 return true;
2695 }
2696
2697 /* Fold a call to the __st[rp]cpy_chk builtin.
2698 DEST, SRC, and SIZE are the arguments to the call.
2699 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2700 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2701 strings passed as second argument. */
2702
2703 static bool
2704 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2705 tree dest,
2706 tree src, tree size,
2707 enum built_in_function fcode)
2708 {
2709 gimple *stmt = gsi_stmt (*gsi);
2710 location_t loc = gimple_location (stmt);
2711 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2712 tree len, fn;
2713
2714 /* If SRC and DEST are the same (and not volatile), return DEST. */
2715 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2716 {
2717 /* Issue -Wrestrict unless the pointers are null (those do
2718 not point to objects and so do not indicate an overlap;
2719 such calls could be the result of sanitization and jump
2720 threading). */
2721 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2722 {
2723 tree func = gimple_call_fndecl (stmt);
2724
2725 warning_at (loc, OPT_Wrestrict,
2726 "%qD source argument is the same as destination",
2727 func);
2728 }
2729
2730 replace_call_with_value (gsi, dest);
2731 return true;
2732 }
2733
2734 if (! tree_fits_uhwi_p (size))
2735 return false;
2736
2737 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2738 if (! integer_all_onesp (size))
2739 {
2740 len = c_strlen (src, 1);
2741 if (! len || ! tree_fits_uhwi_p (len))
2742 {
2743 /* If LEN is not constant, try MAXLEN too.
2744 For MAXLEN only allow optimizing into non-_ocs function
2745 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2746 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2747 {
2748 if (fcode == BUILT_IN_STPCPY_CHK)
2749 {
2750 if (! ignore)
2751 return false;
2752
2753 /* If return value of __stpcpy_chk is ignored,
2754 optimize into __strcpy_chk. */
2755 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2756 if (!fn)
2757 return false;
2758
2759 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2760 replace_call_with_call_and_fold (gsi, repl);
2761 return true;
2762 }
2763
2764 if (! len || TREE_SIDE_EFFECTS (len))
2765 return false;
2766
2767 /* If c_strlen returned something, but not a constant,
2768 transform __strcpy_chk into __memcpy_chk. */
2769 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2770 if (!fn)
2771 return false;
2772
2773 gimple_seq stmts = NULL;
2774 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2775 len = gimple_convert (&stmts, loc, size_type_node, len);
2776 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2777 build_int_cst (size_type_node, 1));
2778 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2779 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2780 replace_call_with_call_and_fold (gsi, repl);
2781 return true;
2782 }
2783 }
2784 else
2785 maxlen = len;
2786
2787 if (! tree_int_cst_lt (maxlen, size))
2788 return false;
2789 }
2790
2791 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2792 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2793 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2794 if (!fn)
2795 return false;
2796
2797 gimple *repl = gimple_build_call (fn, 2, dest, src);
2798 replace_call_with_call_and_fold (gsi, repl);
2799 return true;
2800 }
2801
2802 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2803 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2804 length passed as third argument. IGNORE is true if return value can be
2805 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2806
2807 static bool
2808 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2809 tree dest, tree src,
2810 tree len, tree size,
2811 enum built_in_function fcode)
2812 {
2813 gimple *stmt = gsi_stmt (*gsi);
2814 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2815 tree fn;
2816
2817 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2818 {
2819 /* If return value of __stpncpy_chk is ignored,
2820 optimize into __strncpy_chk. */
2821 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2822 if (fn)
2823 {
2824 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2825 replace_call_with_call_and_fold (gsi, repl);
2826 return true;
2827 }
2828 }
2829
2830 if (! tree_fits_uhwi_p (size))
2831 return false;
2832
2833 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2834 if (! integer_all_onesp (size))
2835 {
2836 if (! tree_fits_uhwi_p (len))
2837 {
2838 /* If LEN is not constant, try MAXLEN too.
2839 For MAXLEN only allow optimizing into non-_ocs function
2840 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2841 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2842 return false;
2843 }
2844 else
2845 maxlen = len;
2846
2847 if (tree_int_cst_lt (size, maxlen))
2848 return false;
2849 }
2850
2851 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2852 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2853 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2854 if (!fn)
2855 return false;
2856
2857 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2858 replace_call_with_call_and_fold (gsi, repl);
2859 return true;
2860 }
2861
2862 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2863 Return NULL_TREE if no simplification can be made. */
2864
2865 static bool
2866 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2867 {
2868 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2869 location_t loc = gimple_location (stmt);
2870 tree dest = gimple_call_arg (stmt, 0);
2871 tree src = gimple_call_arg (stmt, 1);
2872 tree fn, lenp1;
2873
2874 /* If the result is unused, replace stpcpy with strcpy. */
2875 if (gimple_call_lhs (stmt) == NULL_TREE)
2876 {
2877 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2878 if (!fn)
2879 return false;
2880 gimple_call_set_fndecl (stmt, fn);
2881 fold_stmt (gsi);
2882 return true;
2883 }
2884
2885 /* Set to non-null if ARG refers to an unterminated array. */
2886 c_strlen_data data = { };
2887 tree len = c_strlen (src, 1, &data, 1);
2888 if (!len
2889 || TREE_CODE (len) != INTEGER_CST)
2890 {
2891 data.decl = unterminated_array (src);
2892 if (!data.decl)
2893 return false;
2894 }
2895
2896 if (data.decl)
2897 {
2898 /* Avoid folding calls with unterminated arrays. */
2899 if (!gimple_no_warning_p (stmt))
2900 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2901 gimple_set_no_warning (stmt, true);
2902 return false;
2903 }
2904
2905 if (optimize_function_for_size_p (cfun)
2906 /* If length is zero it's small enough. */
2907 && !integer_zerop (len))
2908 return false;
2909
2910 /* If the source has a known length replace stpcpy with memcpy. */
2911 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2912 if (!fn)
2913 return false;
2914
2915 gimple_seq stmts = NULL;
2916 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2917 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2918 tem, build_int_cst (size_type_node, 1));
2919 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2920 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2921 gimple_set_vuse (repl, gimple_vuse (stmt));
2922 gimple_set_vdef (repl, gimple_vdef (stmt));
2923 if (gimple_vdef (repl)
2924 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2925 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2926 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2927 /* Replace the result with dest + len. */
2928 stmts = NULL;
2929 tem = gimple_convert (&stmts, loc, sizetype, len);
2930 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2931 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2932 POINTER_PLUS_EXPR, dest, tem);
2933 gsi_replace (gsi, ret, false);
2934 /* Finally fold the memcpy call. */
2935 gimple_stmt_iterator gsi2 = *gsi;
2936 gsi_prev (&gsi2);
2937 fold_stmt (&gsi2);
2938 return true;
2939 }
2940
2941 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2942 NULL_TREE if a normal call should be emitted rather than expanding
2943 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2944 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2945 passed as second argument. */
2946
2947 static bool
2948 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2949 enum built_in_function fcode)
2950 {
2951 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2952 tree dest, size, len, fn, fmt, flag;
2953 const char *fmt_str;
2954
2955 /* Verify the required arguments in the original call. */
2956 if (gimple_call_num_args (stmt) < 5)
2957 return false;
2958
2959 dest = gimple_call_arg (stmt, 0);
2960 len = gimple_call_arg (stmt, 1);
2961 flag = gimple_call_arg (stmt, 2);
2962 size = gimple_call_arg (stmt, 3);
2963 fmt = gimple_call_arg (stmt, 4);
2964
2965 if (! tree_fits_uhwi_p (size))
2966 return false;
2967
2968 if (! integer_all_onesp (size))
2969 {
2970 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2971 if (! tree_fits_uhwi_p (len))
2972 {
2973 /* If LEN is not constant, try MAXLEN too.
2974 For MAXLEN only allow optimizing into non-_ocs function
2975 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2976 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2977 return false;
2978 }
2979 else
2980 maxlen = len;
2981
2982 if (tree_int_cst_lt (size, maxlen))
2983 return false;
2984 }
2985
2986 if (!init_target_chars ())
2987 return false;
2988
2989 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2990 or if format doesn't contain % chars or is "%s". */
2991 if (! integer_zerop (flag))
2992 {
2993 fmt_str = c_getstr (fmt);
2994 if (fmt_str == NULL)
2995 return false;
2996 if (strchr (fmt_str, target_percent) != NULL
2997 && strcmp (fmt_str, target_percent_s))
2998 return false;
2999 }
3000
3001 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3002 available. */
3003 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3004 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3005 if (!fn)
3006 return false;
3007
3008 /* Replace the called function and the first 5 argument by 3 retaining
3009 trailing varargs. */
3010 gimple_call_set_fndecl (stmt, fn);
3011 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3012 gimple_call_set_arg (stmt, 0, dest);
3013 gimple_call_set_arg (stmt, 1, len);
3014 gimple_call_set_arg (stmt, 2, fmt);
3015 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3016 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3017 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3018 fold_stmt (gsi);
3019 return true;
3020 }
3021
3022 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3023 Return NULL_TREE if a normal call should be emitted rather than
3024 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3025 or BUILT_IN_VSPRINTF_CHK. */
3026
3027 static bool
3028 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3029 enum built_in_function fcode)
3030 {
3031 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3032 tree dest, size, len, fn, fmt, flag;
3033 const char *fmt_str;
3034 unsigned nargs = gimple_call_num_args (stmt);
3035
3036 /* Verify the required arguments in the original call. */
3037 if (nargs < 4)
3038 return false;
3039 dest = gimple_call_arg (stmt, 0);
3040 flag = gimple_call_arg (stmt, 1);
3041 size = gimple_call_arg (stmt, 2);
3042 fmt = gimple_call_arg (stmt, 3);
3043
3044 if (! tree_fits_uhwi_p (size))
3045 return false;
3046
3047 len = NULL_TREE;
3048
3049 if (!init_target_chars ())
3050 return false;
3051
3052 /* Check whether the format is a literal string constant. */
3053 fmt_str = c_getstr (fmt);
3054 if (fmt_str != NULL)
3055 {
3056 /* If the format doesn't contain % args or %%, we know the size. */
3057 if (strchr (fmt_str, target_percent) == 0)
3058 {
3059 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3060 len = build_int_cstu (size_type_node, strlen (fmt_str));
3061 }
3062 /* If the format is "%s" and first ... argument is a string literal,
3063 we know the size too. */
3064 else if (fcode == BUILT_IN_SPRINTF_CHK
3065 && strcmp (fmt_str, target_percent_s) == 0)
3066 {
3067 tree arg;
3068
3069 if (nargs == 5)
3070 {
3071 arg = gimple_call_arg (stmt, 4);
3072 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3073 {
3074 len = c_strlen (arg, 1);
3075 if (! len || ! tree_fits_uhwi_p (len))
3076 len = NULL_TREE;
3077 }
3078 }
3079 }
3080 }
3081
3082 if (! integer_all_onesp (size))
3083 {
3084 if (! len || ! tree_int_cst_lt (len, size))
3085 return false;
3086 }
3087
3088 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3089 or if format doesn't contain % chars or is "%s". */
3090 if (! integer_zerop (flag))
3091 {
3092 if (fmt_str == NULL)
3093 return false;
3094 if (strchr (fmt_str, target_percent) != NULL
3095 && strcmp (fmt_str, target_percent_s))
3096 return false;
3097 }
3098
3099 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3100 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3101 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3102 if (!fn)
3103 return false;
3104
3105 /* Replace the called function and the first 4 argument by 2 retaining
3106 trailing varargs. */
3107 gimple_call_set_fndecl (stmt, fn);
3108 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3109 gimple_call_set_arg (stmt, 0, dest);
3110 gimple_call_set_arg (stmt, 1, fmt);
3111 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3112 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3113 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3114 fold_stmt (gsi);
3115 return true;
3116 }
3117
3118 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3119 ORIG may be null if this is a 2-argument call. We don't attempt to
3120 simplify calls with more than 3 arguments.
3121
3122 Return true if simplification was possible, otherwise false. */
3123
3124 bool
3125 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3126 {
3127 gimple *stmt = gsi_stmt (*gsi);
3128 tree dest = gimple_call_arg (stmt, 0);
3129 tree fmt = gimple_call_arg (stmt, 1);
3130 tree orig = NULL_TREE;
3131 const char *fmt_str = NULL;
3132
3133 /* Verify the required arguments in the original call. We deal with two
3134 types of sprintf() calls: 'sprintf (str, fmt)' and
3135 'sprintf (dest, "%s", orig)'. */
3136 if (gimple_call_num_args (stmt) > 3)
3137 return false;
3138
3139 if (gimple_call_num_args (stmt) == 3)
3140 orig = gimple_call_arg (stmt, 2);
3141
3142 /* Check whether the format is a literal string constant. */
3143 fmt_str = c_getstr (fmt);
3144 if (fmt_str == NULL)
3145 return false;
3146
3147 if (!init_target_chars ())
3148 return false;
3149
3150 /* If the format doesn't contain % args or %%, use strcpy. */
3151 if (strchr (fmt_str, target_percent) == NULL)
3152 {
3153 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3154
3155 if (!fn)
3156 return false;
3157
3158 /* Don't optimize sprintf (buf, "abc", ptr++). */
3159 if (orig)
3160 return false;
3161
3162 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3163 'format' is known to contain no % formats. */
3164 gimple_seq stmts = NULL;
3165 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3166
3167 /* Propagate the NO_WARNING bit to avoid issuing the same
3168 warning more than once. */
3169 if (gimple_no_warning_p (stmt))
3170 gimple_set_no_warning (repl, true);
3171
3172 gimple_seq_add_stmt_without_update (&stmts, repl);
3173 if (gimple_call_lhs (stmt))
3174 {
3175 repl = gimple_build_assign (gimple_call_lhs (stmt),
3176 build_int_cst (integer_type_node,
3177 strlen (fmt_str)));
3178 gimple_seq_add_stmt_without_update (&stmts, repl);
3179 gsi_replace_with_seq_vops (gsi, stmts);
3180 /* gsi now points at the assignment to the lhs, get a
3181 stmt iterator to the memcpy call.
3182 ??? We can't use gsi_for_stmt as that doesn't work when the
3183 CFG isn't built yet. */
3184 gimple_stmt_iterator gsi2 = *gsi;
3185 gsi_prev (&gsi2);
3186 fold_stmt (&gsi2);
3187 }
3188 else
3189 {
3190 gsi_replace_with_seq_vops (gsi, stmts);
3191 fold_stmt (gsi);
3192 }
3193 return true;
3194 }
3195
3196 /* If the format is "%s", use strcpy if the result isn't used. */
3197 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3198 {
3199 tree fn;
3200 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3201
3202 if (!fn)
3203 return false;
3204
3205 /* Don't crash on sprintf (str1, "%s"). */
3206 if (!orig)
3207 return false;
3208
3209 tree orig_len = NULL_TREE;
3210 if (gimple_call_lhs (stmt))
3211 {
3212 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3213 if (!orig_len)
3214 return false;
3215 }
3216
3217 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3218 gimple_seq stmts = NULL;
3219 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3220
3221 /* Propagate the NO_WARNING bit to avoid issuing the same
3222 warning more than once. */
3223 if (gimple_no_warning_p (stmt))
3224 gimple_set_no_warning (repl, true);
3225
3226 gimple_seq_add_stmt_without_update (&stmts, repl);
3227 if (gimple_call_lhs (stmt))
3228 {
3229 if (!useless_type_conversion_p (integer_type_node,
3230 TREE_TYPE (orig_len)))
3231 orig_len = fold_convert (integer_type_node, orig_len);
3232 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3233 gimple_seq_add_stmt_without_update (&stmts, repl);
3234 gsi_replace_with_seq_vops (gsi, stmts);
3235 /* gsi now points at the assignment to the lhs, get a
3236 stmt iterator to the memcpy call.
3237 ??? We can't use gsi_for_stmt as that doesn't work when the
3238 CFG isn't built yet. */
3239 gimple_stmt_iterator gsi2 = *gsi;
3240 gsi_prev (&gsi2);
3241 fold_stmt (&gsi2);
3242 }
3243 else
3244 {
3245 gsi_replace_with_seq_vops (gsi, stmts);
3246 fold_stmt (gsi);
3247 }
3248 return true;
3249 }
3250 return false;
3251 }
3252
3253 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3254 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3255 attempt to simplify calls with more than 4 arguments.
3256
3257 Return true if simplification was possible, otherwise false. */
3258
3259 bool
3260 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3261 {
3262 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3263 tree dest = gimple_call_arg (stmt, 0);
3264 tree destsize = gimple_call_arg (stmt, 1);
3265 tree fmt = gimple_call_arg (stmt, 2);
3266 tree orig = NULL_TREE;
3267 const char *fmt_str = NULL;
3268
3269 if (gimple_call_num_args (stmt) > 4)
3270 return false;
3271
3272 if (gimple_call_num_args (stmt) == 4)
3273 orig = gimple_call_arg (stmt, 3);
3274
3275 if (!tree_fits_uhwi_p (destsize))
3276 return false;
3277 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3278
3279 /* Check whether the format is a literal string constant. */
3280 fmt_str = c_getstr (fmt);
3281 if (fmt_str == NULL)
3282 return false;
3283
3284 if (!init_target_chars ())
3285 return false;
3286
3287 /* If the format doesn't contain % args or %%, use strcpy. */
3288 if (strchr (fmt_str, target_percent) == NULL)
3289 {
3290 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3291 if (!fn)
3292 return false;
3293
3294 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3295 if (orig)
3296 return false;
3297
3298 /* We could expand this as
3299 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3300 or to
3301 memcpy (str, fmt_with_nul_at_cstm1, cst);
3302 but in the former case that might increase code size
3303 and in the latter case grow .rodata section too much.
3304 So punt for now. */
3305 size_t len = strlen (fmt_str);
3306 if (len >= destlen)
3307 return false;
3308
3309 gimple_seq stmts = NULL;
3310 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3311 gimple_seq_add_stmt_without_update (&stmts, repl);
3312 if (gimple_call_lhs (stmt))
3313 {
3314 repl = gimple_build_assign (gimple_call_lhs (stmt),
3315 build_int_cst (integer_type_node, len));
3316 gimple_seq_add_stmt_without_update (&stmts, repl);
3317 gsi_replace_with_seq_vops (gsi, stmts);
3318 /* gsi now points at the assignment to the lhs, get a
3319 stmt iterator to the memcpy call.
3320 ??? We can't use gsi_for_stmt as that doesn't work when the
3321 CFG isn't built yet. */
3322 gimple_stmt_iterator gsi2 = *gsi;
3323 gsi_prev (&gsi2);
3324 fold_stmt (&gsi2);
3325 }
3326 else
3327 {
3328 gsi_replace_with_seq_vops (gsi, stmts);
3329 fold_stmt (gsi);
3330 }
3331 return true;
3332 }
3333
3334 /* If the format is "%s", use strcpy if the result isn't used. */
3335 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3336 {
3337 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3338 if (!fn)
3339 return false;
3340
3341 /* Don't crash on snprintf (str1, cst, "%s"). */
3342 if (!orig)
3343 return false;
3344
3345 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3346 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3347 return false;
3348
3349 /* We could expand this as
3350 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3351 or to
3352 memcpy (str1, str2_with_nul_at_cstm1, cst);
3353 but in the former case that might increase code size
3354 and in the latter case grow .rodata section too much.
3355 So punt for now. */
3356 if (compare_tree_int (orig_len, destlen) >= 0)
3357 return false;
3358
3359 /* Convert snprintf (str1, cst, "%s", str2) into
3360 strcpy (str1, str2) if strlen (str2) < cst. */
3361 gimple_seq stmts = NULL;
3362 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3363 gimple_seq_add_stmt_without_update (&stmts, repl);
3364 if (gimple_call_lhs (stmt))
3365 {
3366 if (!useless_type_conversion_p (integer_type_node,
3367 TREE_TYPE (orig_len)))
3368 orig_len = fold_convert (integer_type_node, orig_len);
3369 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3370 gimple_seq_add_stmt_without_update (&stmts, repl);
3371 gsi_replace_with_seq_vops (gsi, stmts);
3372 /* gsi now points at the assignment to the lhs, get a
3373 stmt iterator to the memcpy call.
3374 ??? We can't use gsi_for_stmt as that doesn't work when the
3375 CFG isn't built yet. */
3376 gimple_stmt_iterator gsi2 = *gsi;
3377 gsi_prev (&gsi2);
3378 fold_stmt (&gsi2);
3379 }
3380 else
3381 {
3382 gsi_replace_with_seq_vops (gsi, stmts);
3383 fold_stmt (gsi);
3384 }
3385 return true;
3386 }
3387 return false;
3388 }
3389
3390 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3391 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3392 more than 3 arguments, and ARG may be null in the 2-argument case.
3393
3394 Return NULL_TREE if no simplification was possible, otherwise return the
3395 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3396 code of the function to be simplified. */
3397
3398 static bool
3399 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3400 tree fp, tree fmt, tree arg,
3401 enum built_in_function fcode)
3402 {
3403 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3404 tree fn_fputc, fn_fputs;
3405 const char *fmt_str = NULL;
3406
3407 /* If the return value is used, don't do the transformation. */
3408 if (gimple_call_lhs (stmt) != NULL_TREE)
3409 return false;
3410
3411 /* Check whether the format is a literal string constant. */
3412 fmt_str = c_getstr (fmt);
3413 if (fmt_str == NULL)
3414 return false;
3415
3416 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3417 {
3418 /* If we're using an unlocked function, assume the other
3419 unlocked functions exist explicitly. */
3420 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3421 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3422 }
3423 else
3424 {
3425 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3426 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3427 }
3428
3429 if (!init_target_chars ())
3430 return false;
3431
3432 /* If the format doesn't contain % args or %%, use strcpy. */
3433 if (strchr (fmt_str, target_percent) == NULL)
3434 {
3435 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3436 && arg)
3437 return false;
3438
3439 /* If the format specifier was "", fprintf does nothing. */
3440 if (fmt_str[0] == '\0')
3441 {
3442 replace_call_with_value (gsi, NULL_TREE);
3443 return true;
3444 }
3445
3446 /* When "string" doesn't contain %, replace all cases of
3447 fprintf (fp, string) with fputs (string, fp). The fputs
3448 builtin will take care of special cases like length == 1. */
3449 if (fn_fputs)
3450 {
3451 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3452 replace_call_with_call_and_fold (gsi, repl);
3453 return true;
3454 }
3455 }
3456
3457 /* The other optimizations can be done only on the non-va_list variants. */
3458 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3459 return false;
3460
3461 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3462 else if (strcmp (fmt_str, target_percent_s) == 0)
3463 {
3464 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3465 return false;
3466 if (fn_fputs)
3467 {
3468 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3469 replace_call_with_call_and_fold (gsi, repl);
3470 return true;
3471 }
3472 }
3473
3474 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3475 else if (strcmp (fmt_str, target_percent_c) == 0)
3476 {
3477 if (!arg
3478 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3479 return false;
3480 if (fn_fputc)
3481 {
3482 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3483 replace_call_with_call_and_fold (gsi, repl);
3484 return true;
3485 }
3486 }
3487
3488 return false;
3489 }
3490
3491 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3492 FMT and ARG are the arguments to the call; we don't fold cases with
3493 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3494
3495 Return NULL_TREE if no simplification was possible, otherwise return the
3496 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3497 code of the function to be simplified. */
3498
3499 static bool
3500 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3501 tree arg, enum built_in_function fcode)
3502 {
3503 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3504 tree fn_putchar, fn_puts, newarg;
3505 const char *fmt_str = NULL;
3506
3507 /* If the return value is used, don't do the transformation. */
3508 if (gimple_call_lhs (stmt) != NULL_TREE)
3509 return false;
3510
3511 /* Check whether the format is a literal string constant. */
3512 fmt_str = c_getstr (fmt);
3513 if (fmt_str == NULL)
3514 return false;
3515
3516 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3517 {
3518 /* If we're using an unlocked function, assume the other
3519 unlocked functions exist explicitly. */
3520 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3521 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3522 }
3523 else
3524 {
3525 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3526 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3527 }
3528
3529 if (!init_target_chars ())
3530 return false;
3531
3532 if (strcmp (fmt_str, target_percent_s) == 0
3533 || strchr (fmt_str, target_percent) == NULL)
3534 {
3535 const char *str;
3536
3537 if (strcmp (fmt_str, target_percent_s) == 0)
3538 {
3539 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3540 return false;
3541
3542 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3543 return false;
3544
3545 str = c_getstr (arg);
3546 if (str == NULL)
3547 return false;
3548 }
3549 else
3550 {
3551 /* The format specifier doesn't contain any '%' characters. */
3552 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3553 && arg)
3554 return false;
3555 str = fmt_str;
3556 }
3557
3558 /* If the string was "", printf does nothing. */
3559 if (str[0] == '\0')
3560 {
3561 replace_call_with_value (gsi, NULL_TREE);
3562 return true;
3563 }
3564
3565 /* If the string has length of 1, call putchar. */
3566 if (str[1] == '\0')
3567 {
3568 /* Given printf("c"), (where c is any one character,)
3569 convert "c"[0] to an int and pass that to the replacement
3570 function. */
3571 newarg = build_int_cst (integer_type_node, str[0]);
3572 if (fn_putchar)
3573 {
3574 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3575 replace_call_with_call_and_fold (gsi, repl);
3576 return true;
3577 }
3578 }
3579 else
3580 {
3581 /* If the string was "string\n", call puts("string"). */
3582 size_t len = strlen (str);
3583 if ((unsigned char)str[len - 1] == target_newline
3584 && (size_t) (int) len == len
3585 && (int) len > 0)
3586 {
3587 char *newstr;
3588
3589 /* Create a NUL-terminated string that's one char shorter
3590 than the original, stripping off the trailing '\n'. */
3591 newstr = xstrdup (str);
3592 newstr[len - 1] = '\0';
3593 newarg = build_string_literal (len, newstr);
3594 free (newstr);
3595 if (fn_puts)
3596 {
3597 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3598 replace_call_with_call_and_fold (gsi, repl);
3599 return true;
3600 }
3601 }
3602 else
3603 /* We'd like to arrange to call fputs(string,stdout) here,
3604 but we need stdout and don't have a way to get it yet. */
3605 return false;
3606 }
3607 }
3608
3609 /* The other optimizations can be done only on the non-va_list variants. */
3610 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3611 return false;
3612
3613 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3614 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3615 {
3616 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3617 return false;
3618 if (fn_puts)
3619 {
3620 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3621 replace_call_with_call_and_fold (gsi, repl);
3622 return true;
3623 }
3624 }
3625
3626 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3627 else if (strcmp (fmt_str, target_percent_c) == 0)
3628 {
3629 if (!arg || ! useless_type_conversion_p (integer_type_node,
3630 TREE_TYPE (arg)))
3631 return false;
3632 if (fn_putchar)
3633 {
3634 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3635 replace_call_with_call_and_fold (gsi, repl);
3636 return true;
3637 }
3638 }
3639
3640 return false;
3641 }
3642
3643
3644
3645 /* Fold a call to __builtin_strlen with known length LEN. */
3646
3647 static bool
3648 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3649 {
3650 gimple *stmt = gsi_stmt (*gsi);
3651 tree arg = gimple_call_arg (stmt, 0);
3652
3653 wide_int minlen;
3654 wide_int maxlen;
3655
3656 /* Set to non-null if ARG refers to an unterminated array. */
3657 tree nonstr;
3658 tree lenrange[2];
3659 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
3660 && !nonstr
3661 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3662 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3663 {
3664 /* The range of lengths refers to either a single constant
3665 string or to the longest and shortest constant string
3666 referenced by the argument of the strlen() call, or to
3667 the strings that can possibly be stored in the arrays
3668 the argument refers to. */
3669 minlen = wi::to_wide (lenrange[0]);
3670 maxlen = wi::to_wide (lenrange[1]);
3671 }
3672 else
3673 {
3674 unsigned prec = TYPE_PRECISION (sizetype);
3675
3676 minlen = wi::shwi (0, prec);
3677 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3678 }
3679
3680 if (minlen == maxlen)
3681 {
3682 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3683 true, GSI_SAME_STMT);
3684 replace_call_with_value (gsi, lenrange[0]);
3685 return true;
3686 }
3687
3688 if (tree lhs = gimple_call_lhs (stmt))
3689 if (TREE_CODE (lhs) == SSA_NAME
3690 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3691 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3692
3693 return false;
3694 }
3695
3696 /* Fold a call to __builtin_acc_on_device. */
3697
3698 static bool
3699 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3700 {
3701 /* Defer folding until we know which compiler we're in. */
3702 if (symtab->state != EXPANSION)
3703 return false;
3704
3705 unsigned val_host = GOMP_DEVICE_HOST;
3706 unsigned val_dev = GOMP_DEVICE_NONE;
3707
3708 #ifdef ACCEL_COMPILER
3709 val_host = GOMP_DEVICE_NOT_HOST;
3710 val_dev = ACCEL_COMPILER_acc_device;
3711 #endif
3712
3713 location_t loc = gimple_location (gsi_stmt (*gsi));
3714
3715 tree host_eq = make_ssa_name (boolean_type_node);
3716 gimple *host_ass = gimple_build_assign
3717 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3718 gimple_set_location (host_ass, loc);
3719 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3720
3721 tree dev_eq = make_ssa_name (boolean_type_node);
3722 gimple *dev_ass = gimple_build_assign
3723 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3724 gimple_set_location (dev_ass, loc);
3725 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3726
3727 tree result = make_ssa_name (boolean_type_node);
3728 gimple *result_ass = gimple_build_assign
3729 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3730 gimple_set_location (result_ass, loc);
3731 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3732
3733 replace_call_with_value (gsi, result);
3734
3735 return true;
3736 }
3737
3738 /* Fold realloc (0, n) -> malloc (n). */
3739
3740 static bool
3741 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3742 {
3743 gimple *stmt = gsi_stmt (*gsi);
3744 tree arg = gimple_call_arg (stmt, 0);
3745 tree size = gimple_call_arg (stmt, 1);
3746
3747 if (operand_equal_p (arg, null_pointer_node, 0))
3748 {
3749 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3750 if (fn_malloc)
3751 {
3752 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3753 replace_call_with_call_and_fold (gsi, repl);
3754 return true;
3755 }
3756 }
3757 return false;
3758 }
3759
3760 /* Fold the non-target builtin at *GSI and return whether any simplification
3761 was made. */
3762
3763 static bool
3764 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3765 {
3766 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3767 tree callee = gimple_call_fndecl (stmt);
3768
3769 /* Give up for always_inline inline builtins until they are
3770 inlined. */
3771 if (avoid_folding_inline_builtin (callee))
3772 return false;
3773
3774 unsigned n = gimple_call_num_args (stmt);
3775 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3776 switch (fcode)
3777 {
3778 case BUILT_IN_BCMP:
3779 return gimple_fold_builtin_bcmp (gsi);
3780 case BUILT_IN_BCOPY:
3781 return gimple_fold_builtin_bcopy (gsi);
3782 case BUILT_IN_BZERO:
3783 return gimple_fold_builtin_bzero (gsi);
3784
3785 case BUILT_IN_MEMSET:
3786 return gimple_fold_builtin_memset (gsi,
3787 gimple_call_arg (stmt, 1),
3788 gimple_call_arg (stmt, 2));
3789 case BUILT_IN_MEMCPY:
3790 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3791 gimple_call_arg (stmt, 1), 0);
3792 case BUILT_IN_MEMPCPY:
3793 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3794 gimple_call_arg (stmt, 1), 1);
3795 case BUILT_IN_MEMMOVE:
3796 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3797 gimple_call_arg (stmt, 1), 3);
3798 case BUILT_IN_SPRINTF_CHK:
3799 case BUILT_IN_VSPRINTF_CHK:
3800 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3801 case BUILT_IN_STRCAT_CHK:
3802 return gimple_fold_builtin_strcat_chk (gsi);
3803 case BUILT_IN_STRNCAT_CHK:
3804 return gimple_fold_builtin_strncat_chk (gsi);
3805 case BUILT_IN_STRLEN:
3806 return gimple_fold_builtin_strlen (gsi);
3807 case BUILT_IN_STRCPY:
3808 return gimple_fold_builtin_strcpy (gsi,
3809 gimple_call_arg (stmt, 0),
3810 gimple_call_arg (stmt, 1));
3811 case BUILT_IN_STRNCPY:
3812 return gimple_fold_builtin_strncpy (gsi,
3813 gimple_call_arg (stmt, 0),
3814 gimple_call_arg (stmt, 1),
3815 gimple_call_arg (stmt, 2));
3816 case BUILT_IN_STRCAT:
3817 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3818 gimple_call_arg (stmt, 1));
3819 case BUILT_IN_STRNCAT:
3820 return gimple_fold_builtin_strncat (gsi);
3821 case BUILT_IN_INDEX:
3822 case BUILT_IN_STRCHR:
3823 return gimple_fold_builtin_strchr (gsi, false);
3824 case BUILT_IN_RINDEX:
3825 case BUILT_IN_STRRCHR:
3826 return gimple_fold_builtin_strchr (gsi, true);
3827 case BUILT_IN_STRSTR:
3828 return gimple_fold_builtin_strstr (gsi);
3829 case BUILT_IN_STRCMP:
3830 case BUILT_IN_STRCMP_EQ:
3831 case BUILT_IN_STRCASECMP:
3832 case BUILT_IN_STRNCMP:
3833 case BUILT_IN_STRNCMP_EQ:
3834 case BUILT_IN_STRNCASECMP:
3835 return gimple_fold_builtin_string_compare (gsi);
3836 case BUILT_IN_MEMCHR:
3837 return gimple_fold_builtin_memchr (gsi);
3838 case BUILT_IN_FPUTS:
3839 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3840 gimple_call_arg (stmt, 1), false);
3841 case BUILT_IN_FPUTS_UNLOCKED:
3842 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3843 gimple_call_arg (stmt, 1), true);
3844 case BUILT_IN_MEMCPY_CHK:
3845 case BUILT_IN_MEMPCPY_CHK:
3846 case BUILT_IN_MEMMOVE_CHK:
3847 case BUILT_IN_MEMSET_CHK:
3848 return gimple_fold_builtin_memory_chk (gsi,
3849 gimple_call_arg (stmt, 0),
3850 gimple_call_arg (stmt, 1),
3851 gimple_call_arg (stmt, 2),
3852 gimple_call_arg (stmt, 3),
3853 fcode);
3854 case BUILT_IN_STPCPY:
3855 return gimple_fold_builtin_stpcpy (gsi);
3856 case BUILT_IN_STRCPY_CHK:
3857 case BUILT_IN_STPCPY_CHK:
3858 return gimple_fold_builtin_stxcpy_chk (gsi,
3859 gimple_call_arg (stmt, 0),
3860 gimple_call_arg (stmt, 1),
3861 gimple_call_arg (stmt, 2),
3862 fcode);
3863 case BUILT_IN_STRNCPY_CHK:
3864 case BUILT_IN_STPNCPY_CHK:
3865 return gimple_fold_builtin_stxncpy_chk (gsi,
3866 gimple_call_arg (stmt, 0),
3867 gimple_call_arg (stmt, 1),
3868 gimple_call_arg (stmt, 2),
3869 gimple_call_arg (stmt, 3),
3870 fcode);
3871 case BUILT_IN_SNPRINTF_CHK:
3872 case BUILT_IN_VSNPRINTF_CHK:
3873 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3874
3875 case BUILT_IN_FPRINTF:
3876 case BUILT_IN_FPRINTF_UNLOCKED:
3877 case BUILT_IN_VFPRINTF:
3878 if (n == 2 || n == 3)
3879 return gimple_fold_builtin_fprintf (gsi,
3880 gimple_call_arg (stmt, 0),
3881 gimple_call_arg (stmt, 1),
3882 n == 3
3883 ? gimple_call_arg (stmt, 2)
3884 : NULL_TREE,
3885 fcode);
3886 break;
3887 case BUILT_IN_FPRINTF_CHK:
3888 case BUILT_IN_VFPRINTF_CHK:
3889 if (n == 3 || n == 4)
3890 return gimple_fold_builtin_fprintf (gsi,
3891 gimple_call_arg (stmt, 0),
3892 gimple_call_arg (stmt, 2),
3893 n == 4
3894 ? gimple_call_arg (stmt, 3)
3895 : NULL_TREE,
3896 fcode);
3897 break;
3898 case BUILT_IN_PRINTF:
3899 case BUILT_IN_PRINTF_UNLOCKED:
3900 case BUILT_IN_VPRINTF:
3901 if (n == 1 || n == 2)
3902 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3903 n == 2
3904 ? gimple_call_arg (stmt, 1)
3905 : NULL_TREE, fcode);
3906 break;
3907 case BUILT_IN_PRINTF_CHK:
3908 case BUILT_IN_VPRINTF_CHK:
3909 if (n == 2 || n == 3)
3910 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3911 n == 3
3912 ? gimple_call_arg (stmt, 2)
3913 : NULL_TREE, fcode);
3914 break;
3915 case BUILT_IN_ACC_ON_DEVICE:
3916 return gimple_fold_builtin_acc_on_device (gsi,
3917 gimple_call_arg (stmt, 0));
3918 case BUILT_IN_REALLOC:
3919 return gimple_fold_builtin_realloc (gsi);
3920
3921 default:;
3922 }
3923
3924 /* Try the generic builtin folder. */
3925 bool ignore = (gimple_call_lhs (stmt) == NULL);
3926 tree result = fold_call_stmt (stmt, ignore);
3927 if (result)
3928 {
3929 if (ignore)
3930 STRIP_NOPS (result);
3931 else
3932 result = fold_convert (gimple_call_return_type (stmt), result);
3933 if (!update_call_from_tree (gsi, result))
3934 gimplify_and_update_call_from_tree (gsi, result);
3935 return true;
3936 }
3937
3938 return false;
3939 }
3940
3941 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3942 function calls to constants, where possible. */
3943
3944 static tree
3945 fold_internal_goacc_dim (const gimple *call)
3946 {
3947 int axis = oacc_get_ifn_dim_arg (call);
3948 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3949 tree result = NULL_TREE;
3950 tree type = TREE_TYPE (gimple_call_lhs (call));
3951
3952 switch (gimple_call_internal_fn (call))
3953 {
3954 case IFN_GOACC_DIM_POS:
3955 /* If the size is 1, we know the answer. */
3956 if (size == 1)
3957 result = build_int_cst (type, 0);
3958 break;
3959 case IFN_GOACC_DIM_SIZE:
3960 /* If the size is not dynamic, we know the answer. */
3961 if (size)
3962 result = build_int_cst (type, size);
3963 break;
3964 default:
3965 break;
3966 }
3967
3968 return result;
3969 }
3970
3971 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3972 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3973 &var where var is only addressable because of such calls. */
3974
3975 bool
3976 optimize_atomic_compare_exchange_p (gimple *stmt)
3977 {
3978 if (gimple_call_num_args (stmt) != 6
3979 || !flag_inline_atomics
3980 || !optimize
3981 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3982 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3983 || !gimple_vdef (stmt)
3984 || !gimple_vuse (stmt))
3985 return false;
3986
3987 tree fndecl = gimple_call_fndecl (stmt);
3988 switch (DECL_FUNCTION_CODE (fndecl))
3989 {
3990 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3991 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3992 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3993 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3994 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3995 break;
3996 default:
3997 return false;
3998 }
3999
4000 tree expected = gimple_call_arg (stmt, 1);
4001 if (TREE_CODE (expected) != ADDR_EXPR
4002 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4003 return false;
4004
4005 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4006 if (!is_gimple_reg_type (etype)
4007 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4008 || TREE_THIS_VOLATILE (etype)
4009 || VECTOR_TYPE_P (etype)
4010 || TREE_CODE (etype) == COMPLEX_TYPE
4011 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4012 might not preserve all the bits. See PR71716. */
4013 || SCALAR_FLOAT_TYPE_P (etype)
4014 || maybe_ne (TYPE_PRECISION (etype),
4015 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4016 return false;
4017
4018 tree weak = gimple_call_arg (stmt, 3);
4019 if (!integer_zerop (weak) && !integer_onep (weak))
4020 return false;
4021
4022 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4023 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4024 machine_mode mode = TYPE_MODE (itype);
4025
4026 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4027 == CODE_FOR_nothing
4028 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4029 return false;
4030
4031 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4032 return false;
4033
4034 return true;
4035 }
4036
4037 /* Fold
4038 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4039 into
4040 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4041 i = IMAGPART_EXPR <t>;
4042 r = (_Bool) i;
4043 e = REALPART_EXPR <t>; */
4044
4045 void
4046 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4047 {
4048 gimple *stmt = gsi_stmt (*gsi);
4049 tree fndecl = gimple_call_fndecl (stmt);
4050 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4051 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4052 tree ctype = build_complex_type (itype);
4053 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4054 bool throws = false;
4055 edge e = NULL;
4056 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4057 expected);
4058 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4059 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4060 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4061 {
4062 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4063 build1 (VIEW_CONVERT_EXPR, itype,
4064 gimple_assign_lhs (g)));
4065 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4066 }
4067 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4068 + int_size_in_bytes (itype);
4069 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4070 gimple_call_arg (stmt, 0),
4071 gimple_assign_lhs (g),
4072 gimple_call_arg (stmt, 2),
4073 build_int_cst (integer_type_node, flag),
4074 gimple_call_arg (stmt, 4),
4075 gimple_call_arg (stmt, 5));
4076 tree lhs = make_ssa_name (ctype);
4077 gimple_call_set_lhs (g, lhs);
4078 gimple_set_vdef (g, gimple_vdef (stmt));
4079 gimple_set_vuse (g, gimple_vuse (stmt));
4080 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4081 tree oldlhs = gimple_call_lhs (stmt);
4082 if (stmt_can_throw_internal (cfun, stmt))
4083 {
4084 throws = true;
4085 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4086 }
4087 gimple_call_set_nothrow (as_a <gcall *> (g),
4088 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4089 gimple_call_set_lhs (stmt, NULL_TREE);
4090 gsi_replace (gsi, g, true);
4091 if (oldlhs)
4092 {
4093 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4094 build1 (IMAGPART_EXPR, itype, lhs));
4095 if (throws)
4096 {
4097 gsi_insert_on_edge_immediate (e, g);
4098 *gsi = gsi_for_stmt (g);
4099 }
4100 else
4101 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4102 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4103 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4104 }
4105 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4106 build1 (REALPART_EXPR, itype, lhs));
4107 if (throws && oldlhs == NULL_TREE)
4108 {
4109 gsi_insert_on_edge_immediate (e, g);
4110 *gsi = gsi_for_stmt (g);
4111 }
4112 else
4113 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4114 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4115 {
4116 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4117 VIEW_CONVERT_EXPR,
4118 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4119 gimple_assign_lhs (g)));
4120 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4121 }
4122 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4123 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4124 *gsi = gsiret;
4125 }
4126
4127 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4128 doesn't fit into TYPE. The test for overflow should be regardless of
4129 -fwrapv, and even for unsigned types. */
4130
4131 bool
4132 arith_overflowed_p (enum tree_code code, const_tree type,
4133 const_tree arg0, const_tree arg1)
4134 {
4135 widest2_int warg0 = widest2_int_cst (arg0);
4136 widest2_int warg1 = widest2_int_cst (arg1);
4137 widest2_int wres;
4138 switch (code)
4139 {
4140 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4141 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4142 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4143 default: gcc_unreachable ();
4144 }
4145 signop sign = TYPE_SIGN (type);
4146 if (sign == UNSIGNED && wi::neg_p (wres))
4147 return true;
4148 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4149 }
4150
4151 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4152 The statement may be replaced by another statement, e.g., if the call
4153 simplifies to a constant value. Return true if any changes were made.
4154 It is assumed that the operands have been previously folded. */
4155
4156 static bool
4157 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4158 {
4159 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4160 tree callee;
4161 bool changed = false;
4162 unsigned i;
4163
4164 /* Fold *& in call arguments. */
4165 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4166 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4167 {
4168 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4169 if (tmp)
4170 {
4171 gimple_call_set_arg (stmt, i, tmp);
4172 changed = true;
4173 }
4174 }
4175
4176 /* Check for virtual calls that became direct calls. */
4177 callee = gimple_call_fn (stmt);
4178 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4179 {
4180 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4181 {
4182 if (dump_file && virtual_method_call_p (callee)
4183 && !possible_polymorphic_call_target_p
4184 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4185 (OBJ_TYPE_REF_EXPR (callee)))))
4186 {
4187 fprintf (dump_file,
4188 "Type inheritance inconsistent devirtualization of ");
4189 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4190 fprintf (dump_file, " to ");
4191 print_generic_expr (dump_file, callee, TDF_SLIM);
4192 fprintf (dump_file, "\n");
4193 }
4194
4195 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4196 changed = true;
4197 }
4198 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4199 {
4200 bool final;
4201 vec <cgraph_node *>targets
4202 = possible_polymorphic_call_targets (callee, stmt, &final);
4203 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4204 {
4205 tree lhs = gimple_call_lhs (stmt);
4206 if (dump_enabled_p ())
4207 {
4208 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4209 "folding virtual function call to %s\n",
4210 targets.length () == 1
4211 ? targets[0]->name ()
4212 : "__builtin_unreachable");
4213 }
4214 if (targets.length () == 1)
4215 {
4216 tree fndecl = targets[0]->decl;
4217 gimple_call_set_fndecl (stmt, fndecl);
4218 changed = true;
4219 /* If changing the call to __cxa_pure_virtual
4220 or similar noreturn function, adjust gimple_call_fntype
4221 too. */
4222 if (gimple_call_noreturn_p (stmt)
4223 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4224 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4225 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4226 == void_type_node))
4227 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4228 /* If the call becomes noreturn, remove the lhs. */
4229 if (lhs
4230 && gimple_call_noreturn_p (stmt)
4231 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4232 || should_remove_lhs_p (lhs)))
4233 {
4234 if (TREE_CODE (lhs) == SSA_NAME)
4235 {
4236 tree var = create_tmp_var (TREE_TYPE (lhs));
4237 tree def = get_or_create_ssa_default_def (cfun, var);
4238 gimple *new_stmt = gimple_build_assign (lhs, def);
4239 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4240 }
4241 gimple_call_set_lhs (stmt, NULL_TREE);
4242 }
4243 maybe_remove_unused_call_args (cfun, stmt);
4244 }
4245 else
4246 {
4247 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4248 gimple *new_stmt = gimple_build_call (fndecl, 0);
4249 gimple_set_location (new_stmt, gimple_location (stmt));
4250 /* If the call had a SSA name as lhs morph that into
4251 an uninitialized value. */
4252 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4253 {
4254 tree var = create_tmp_var (TREE_TYPE (lhs));
4255 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4256 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4257 set_ssa_default_def (cfun, var, lhs);
4258 }
4259 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4260 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4261 gsi_replace (gsi, new_stmt, false);
4262 return true;
4263 }
4264 }
4265 }
4266 }
4267
4268 /* Check for indirect calls that became direct calls, and then
4269 no longer require a static chain. */
4270 if (gimple_call_chain (stmt))
4271 {
4272 tree fn = gimple_call_fndecl (stmt);
4273 if (fn && !DECL_STATIC_CHAIN (fn))
4274 {
4275 gimple_call_set_chain (stmt, NULL);
4276 changed = true;
4277 }
4278 else
4279 {
4280 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4281 if (tmp)
4282 {
4283 gimple_call_set_chain (stmt, tmp);
4284 changed = true;
4285 }
4286 }
4287 }
4288
4289 if (inplace)
4290 return changed;
4291
4292 /* Check for builtins that CCP can handle using information not
4293 available in the generic fold routines. */
4294 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4295 {
4296 if (gimple_fold_builtin (gsi))
4297 changed = true;
4298 }
4299 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4300 {
4301 changed |= targetm.gimple_fold_builtin (gsi);
4302 }
4303 else if (gimple_call_internal_p (stmt))
4304 {
4305 enum tree_code subcode = ERROR_MARK;
4306 tree result = NULL_TREE;
4307 bool cplx_result = false;
4308 tree overflow = NULL_TREE;
4309 switch (gimple_call_internal_fn (stmt))
4310 {
4311 case IFN_BUILTIN_EXPECT:
4312 result = fold_builtin_expect (gimple_location (stmt),
4313 gimple_call_arg (stmt, 0),
4314 gimple_call_arg (stmt, 1),
4315 gimple_call_arg (stmt, 2),
4316 NULL_TREE);
4317 break;
4318 case IFN_UBSAN_OBJECT_SIZE:
4319 {
4320 tree offset = gimple_call_arg (stmt, 1);
4321 tree objsize = gimple_call_arg (stmt, 2);
4322 if (integer_all_onesp (objsize)
4323 || (TREE_CODE (offset) == INTEGER_CST
4324 && TREE_CODE (objsize) == INTEGER_CST
4325 && tree_int_cst_le (offset, objsize)))
4326 {
4327 replace_call_with_value (gsi, NULL_TREE);
4328 return true;
4329 }
4330 }
4331 break;
4332 case IFN_UBSAN_PTR:
4333 if (integer_zerop (gimple_call_arg (stmt, 1)))
4334 {
4335 replace_call_with_value (gsi, NULL_TREE);
4336 return true;
4337 }
4338 break;
4339 case IFN_UBSAN_BOUNDS:
4340 {
4341 tree index = gimple_call_arg (stmt, 1);
4342 tree bound = gimple_call_arg (stmt, 2);
4343 if (TREE_CODE (index) == INTEGER_CST
4344 && TREE_CODE (bound) == INTEGER_CST)
4345 {
4346 index = fold_convert (TREE_TYPE (bound), index);
4347 if (TREE_CODE (index) == INTEGER_CST
4348 && tree_int_cst_le (index, bound))
4349 {
4350 replace_call_with_value (gsi, NULL_TREE);
4351 return true;
4352 }
4353 }
4354 }
4355 break;
4356 case IFN_GOACC_DIM_SIZE:
4357 case IFN_GOACC_DIM_POS:
4358 result = fold_internal_goacc_dim (stmt);
4359 break;
4360 case IFN_UBSAN_CHECK_ADD:
4361 subcode = PLUS_EXPR;
4362 break;
4363 case IFN_UBSAN_CHECK_SUB:
4364 subcode = MINUS_EXPR;
4365 break;
4366 case IFN_UBSAN_CHECK_MUL:
4367 subcode = MULT_EXPR;
4368 break;
4369 case IFN_ADD_OVERFLOW:
4370 subcode = PLUS_EXPR;
4371 cplx_result = true;
4372 break;
4373 case IFN_SUB_OVERFLOW:
4374 subcode = MINUS_EXPR;
4375 cplx_result = true;
4376 break;
4377 case IFN_MUL_OVERFLOW:
4378 subcode = MULT_EXPR;
4379 cplx_result = true;
4380 break;
4381 default:
4382 break;
4383 }
4384 if (subcode != ERROR_MARK)
4385 {
4386 tree arg0 = gimple_call_arg (stmt, 0);
4387 tree arg1 = gimple_call_arg (stmt, 1);
4388 tree type = TREE_TYPE (arg0);
4389 if (cplx_result)
4390 {
4391 tree lhs = gimple_call_lhs (stmt);
4392 if (lhs == NULL_TREE)
4393 type = NULL_TREE;
4394 else
4395 type = TREE_TYPE (TREE_TYPE (lhs));
4396 }
4397 if (type == NULL_TREE)
4398 ;
4399 /* x = y + 0; x = y - 0; x = y * 0; */
4400 else if (integer_zerop (arg1))
4401 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4402 /* x = 0 + y; x = 0 * y; */
4403 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4404 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4405 /* x = y - y; */
4406 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4407 result = integer_zero_node;
4408 /* x = y * 1; x = 1 * y; */
4409 else if (subcode == MULT_EXPR && integer_onep (arg1))
4410 result = arg0;
4411 else if (subcode == MULT_EXPR && integer_onep (arg0))
4412 result = arg1;
4413 else if (TREE_CODE (arg0) == INTEGER_CST
4414 && TREE_CODE (arg1) == INTEGER_CST)
4415 {
4416 if (cplx_result)
4417 result = int_const_binop (subcode, fold_convert (type, arg0),
4418 fold_convert (type, arg1));
4419 else
4420 result = int_const_binop (subcode, arg0, arg1);
4421 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4422 {
4423 if (cplx_result)
4424 overflow = build_one_cst (type);
4425 else
4426 result = NULL_TREE;
4427 }
4428 }
4429 if (result)
4430 {
4431 if (result == integer_zero_node)
4432 result = build_zero_cst (type);
4433 else if (cplx_result && TREE_TYPE (result) != type)
4434 {
4435 if (TREE_CODE (result) == INTEGER_CST)
4436 {
4437 if (arith_overflowed_p (PLUS_EXPR, type, result,
4438 integer_zero_node))
4439 overflow = build_one_cst (type);
4440 }
4441 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4442 && TYPE_UNSIGNED (type))
4443 || (TYPE_PRECISION (type)
4444 < (TYPE_PRECISION (TREE_TYPE (result))
4445 + (TYPE_UNSIGNED (TREE_TYPE (result))
4446 && !TYPE_UNSIGNED (type)))))
4447 result = NULL_TREE;
4448 if (result)
4449 result = fold_convert (type, result);
4450 }
4451 }
4452 }
4453
4454 if (result)
4455 {
4456 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4457 result = drop_tree_overflow (result);
4458 if (cplx_result)
4459 {
4460 if (overflow == NULL_TREE)
4461 overflow = build_zero_cst (TREE_TYPE (result));
4462 tree ctype = build_complex_type (TREE_TYPE (result));
4463 if (TREE_CODE (result) == INTEGER_CST
4464 && TREE_CODE (overflow) == INTEGER_CST)
4465 result = build_complex (ctype, result, overflow);
4466 else
4467 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4468 ctype, result, overflow);
4469 }
4470 if (!update_call_from_tree (gsi, result))
4471 gimplify_and_update_call_from_tree (gsi, result);
4472 changed = true;
4473 }
4474 }
4475
4476 return changed;
4477 }
4478
4479
4480 /* Return true whether NAME has a use on STMT. */
4481
4482 static bool
4483 has_use_on_stmt (tree name, gimple *stmt)
4484 {
4485 imm_use_iterator iter;
4486 use_operand_p use_p;
4487 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4488 if (USE_STMT (use_p) == stmt)
4489 return true;
4490 return false;
4491 }
4492
4493 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4494 gimple_simplify.
4495
4496 Replaces *GSI with the simplification result in RCODE and OPS
4497 and the associated statements in *SEQ. Does the replacement
4498 according to INPLACE and returns true if the operation succeeded. */
4499
4500 static bool
4501 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4502 gimple_match_op *res_op,
4503 gimple_seq *seq, bool inplace)
4504 {
4505 gimple *stmt = gsi_stmt (*gsi);
4506 tree *ops = res_op->ops;
4507 unsigned int num_ops = res_op->num_ops;
4508
4509 /* Play safe and do not allow abnormals to be mentioned in
4510 newly created statements. See also maybe_push_res_to_seq.
4511 As an exception allow such uses if there was a use of the
4512 same SSA name on the old stmt. */
4513 for (unsigned int i = 0; i < num_ops; ++i)
4514 if (TREE_CODE (ops[i]) == SSA_NAME
4515 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4516 && !has_use_on_stmt (ops[i], stmt))
4517 return false;
4518
4519 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4520 for (unsigned int i = 0; i < 2; ++i)
4521 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4522 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4523 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4524 return false;
4525
4526 /* Don't insert new statements when INPLACE is true, even if we could
4527 reuse STMT for the final statement. */
4528 if (inplace && !gimple_seq_empty_p (*seq))
4529 return false;
4530
4531 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4532 {
4533 gcc_assert (res_op->code.is_tree_code ());
4534 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4535 /* GIMPLE_CONDs condition may not throw. */
4536 && (!flag_exceptions
4537 || !cfun->can_throw_non_call_exceptions
4538 || !operation_could_trap_p (res_op->code,
4539 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4540 false, NULL_TREE)))
4541 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4542 else if (res_op->code == SSA_NAME)
4543 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4544 build_zero_cst (TREE_TYPE (ops[0])));
4545 else if (res_op->code == INTEGER_CST)
4546 {
4547 if (integer_zerop (ops[0]))
4548 gimple_cond_make_false (cond_stmt);
4549 else
4550 gimple_cond_make_true (cond_stmt);
4551 }
4552 else if (!inplace)
4553 {
4554 tree res = maybe_push_res_to_seq (res_op, seq);
4555 if (!res)
4556 return false;
4557 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4558 build_zero_cst (TREE_TYPE (res)));
4559 }
4560 else
4561 return false;
4562 if (dump_file && (dump_flags & TDF_DETAILS))
4563 {
4564 fprintf (dump_file, "gimple_simplified to ");
4565 if (!gimple_seq_empty_p (*seq))
4566 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4567 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4568 0, TDF_SLIM);
4569 }
4570 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4571 return true;
4572 }
4573 else if (is_gimple_assign (stmt)
4574 && res_op->code.is_tree_code ())
4575 {
4576 if (!inplace
4577 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4578 {
4579 maybe_build_generic_op (res_op);
4580 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4581 res_op->op_or_null (0),
4582 res_op->op_or_null (1),
4583 res_op->op_or_null (2));
4584 if (dump_file && (dump_flags & TDF_DETAILS))
4585 {
4586 fprintf (dump_file, "gimple_simplified to ");
4587 if (!gimple_seq_empty_p (*seq))
4588 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4589 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4590 0, TDF_SLIM);
4591 }
4592 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4593 return true;
4594 }
4595 }
4596 else if (res_op->code.is_fn_code ()
4597 && gimple_call_combined_fn (stmt) == res_op->code)
4598 {
4599 gcc_assert (num_ops == gimple_call_num_args (stmt));
4600 for (unsigned int i = 0; i < num_ops; ++i)
4601 gimple_call_set_arg (stmt, i, ops[i]);
4602 if (dump_file && (dump_flags & TDF_DETAILS))
4603 {
4604 fprintf (dump_file, "gimple_simplified to ");
4605 if (!gimple_seq_empty_p (*seq))
4606 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4607 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4608 }
4609 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4610 return true;
4611 }
4612 else if (!inplace)
4613 {
4614 if (gimple_has_lhs (stmt))
4615 {
4616 tree lhs = gimple_get_lhs (stmt);
4617 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4618 return false;
4619 if (dump_file && (dump_flags & TDF_DETAILS))
4620 {
4621 fprintf (dump_file, "gimple_simplified to ");
4622 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4623 }
4624 gsi_replace_with_seq_vops (gsi, *seq);
4625 return true;
4626 }
4627 else
4628 gcc_unreachable ();
4629 }
4630
4631 return false;
4632 }
4633
4634 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4635
4636 static bool
4637 maybe_canonicalize_mem_ref_addr (tree *t)
4638 {
4639 bool res = false;
4640
4641 if (TREE_CODE (*t) == ADDR_EXPR)
4642 t = &TREE_OPERAND (*t, 0);
4643
4644 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4645 generic vector extension. The actual vector referenced is
4646 view-converted to an array type for this purpose. If the index
4647 is constant the canonical representation in the middle-end is a
4648 BIT_FIELD_REF so re-write the former to the latter here. */
4649 if (TREE_CODE (*t) == ARRAY_REF
4650 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4651 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4652 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4653 {
4654 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4655 if (VECTOR_TYPE_P (vtype))
4656 {
4657 tree low = array_ref_low_bound (*t);
4658 if (TREE_CODE (low) == INTEGER_CST)
4659 {
4660 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4661 {
4662 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4663 wi::to_widest (low));
4664 idx = wi::mul (idx, wi::to_widest
4665 (TYPE_SIZE (TREE_TYPE (*t))));
4666 widest_int ext
4667 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4668 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4669 {
4670 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4671 TREE_TYPE (*t),
4672 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4673 TYPE_SIZE (TREE_TYPE (*t)),
4674 wide_int_to_tree (bitsizetype, idx));
4675 res = true;
4676 }
4677 }
4678 }
4679 }
4680 }
4681
4682 while (handled_component_p (*t))
4683 t = &TREE_OPERAND (*t, 0);
4684
4685 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4686 of invariant addresses into a SSA name MEM_REF address. */
4687 if (TREE_CODE (*t) == MEM_REF
4688 || TREE_CODE (*t) == TARGET_MEM_REF)
4689 {
4690 tree addr = TREE_OPERAND (*t, 0);
4691 if (TREE_CODE (addr) == ADDR_EXPR
4692 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4693 || handled_component_p (TREE_OPERAND (addr, 0))))
4694 {
4695 tree base;
4696 poly_int64 coffset;
4697 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4698 &coffset);
4699 if (!base)
4700 gcc_unreachable ();
4701
4702 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4703 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4704 TREE_OPERAND (*t, 1),
4705 size_int (coffset));
4706 res = true;
4707 }
4708 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4709 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4710 }
4711
4712 /* Canonicalize back MEM_REFs to plain reference trees if the object
4713 accessed is a decl that has the same access semantics as the MEM_REF. */
4714 if (TREE_CODE (*t) == MEM_REF
4715 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4716 && integer_zerop (TREE_OPERAND (*t, 1))
4717 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4718 {
4719 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4720 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4721 if (/* Same volatile qualification. */
4722 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4723 /* Same TBAA behavior with -fstrict-aliasing. */
4724 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4725 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4726 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4727 /* Same alignment. */
4728 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4729 /* We have to look out here to not drop a required conversion
4730 from the rhs to the lhs if *t appears on the lhs or vice-versa
4731 if it appears on the rhs. Thus require strict type
4732 compatibility. */
4733 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4734 {
4735 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4736 res = true;
4737 }
4738 }
4739
4740 /* Canonicalize TARGET_MEM_REF in particular with respect to
4741 the indexes becoming constant. */
4742 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4743 {
4744 tree tem = maybe_fold_tmr (*t);
4745 if (tem)
4746 {
4747 *t = tem;
4748 res = true;
4749 }
4750 }
4751
4752 return res;
4753 }
4754
4755 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4756 distinguishes both cases. */
4757
4758 static bool
4759 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4760 {
4761 bool changed = false;
4762 gimple *stmt = gsi_stmt (*gsi);
4763 bool nowarning = gimple_no_warning_p (stmt);
4764 unsigned i;
4765 fold_defer_overflow_warnings ();
4766
4767 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4768 after propagation.
4769 ??? This shouldn't be done in generic folding but in the
4770 propagation helpers which also know whether an address was
4771 propagated.
4772 Also canonicalize operand order. */
4773 switch (gimple_code (stmt))
4774 {
4775 case GIMPLE_ASSIGN:
4776 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4777 {
4778 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4779 if ((REFERENCE_CLASS_P (*rhs)
4780 || TREE_CODE (*rhs) == ADDR_EXPR)
4781 && maybe_canonicalize_mem_ref_addr (rhs))
4782 changed = true;
4783 tree *lhs = gimple_assign_lhs_ptr (stmt);
4784 if (REFERENCE_CLASS_P (*lhs)
4785 && maybe_canonicalize_mem_ref_addr (lhs))
4786 changed = true;
4787 }
4788 else
4789 {
4790 /* Canonicalize operand order. */
4791 enum tree_code code = gimple_assign_rhs_code (stmt);
4792 if (TREE_CODE_CLASS (code) == tcc_comparison
4793 || commutative_tree_code (code)
4794 || commutative_ternary_tree_code (code))
4795 {
4796 tree rhs1 = gimple_assign_rhs1 (stmt);
4797 tree rhs2 = gimple_assign_rhs2 (stmt);
4798 if (tree_swap_operands_p (rhs1, rhs2))
4799 {
4800 gimple_assign_set_rhs1 (stmt, rhs2);
4801 gimple_assign_set_rhs2 (stmt, rhs1);
4802 if (TREE_CODE_CLASS (code) == tcc_comparison)
4803 gimple_assign_set_rhs_code (stmt,
4804 swap_tree_comparison (code));
4805 changed = true;
4806 }
4807 }
4808 }
4809 break;
4810 case GIMPLE_CALL:
4811 {
4812 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4813 {
4814 tree *arg = gimple_call_arg_ptr (stmt, i);
4815 if (REFERENCE_CLASS_P (*arg)
4816 && maybe_canonicalize_mem_ref_addr (arg))
4817 changed = true;
4818 }
4819 tree *lhs = gimple_call_lhs_ptr (stmt);
4820 if (*lhs
4821 && REFERENCE_CLASS_P (*lhs)
4822 && maybe_canonicalize_mem_ref_addr (lhs))
4823 changed = true;
4824 break;
4825 }
4826 case GIMPLE_ASM:
4827 {
4828 gasm *asm_stmt = as_a <gasm *> (stmt);
4829 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4830 {
4831 tree link = gimple_asm_output_op (asm_stmt, i);
4832 tree op = TREE_VALUE (link);
4833 if (REFERENCE_CLASS_P (op)
4834 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4835 changed = true;
4836 }
4837 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4838 {
4839 tree link = gimple_asm_input_op (asm_stmt, i);
4840 tree op = TREE_VALUE (link);
4841 if ((REFERENCE_CLASS_P (op)
4842 || TREE_CODE (op) == ADDR_EXPR)
4843 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4844 changed = true;
4845 }
4846 }
4847 break;
4848 case GIMPLE_DEBUG:
4849 if (gimple_debug_bind_p (stmt))
4850 {
4851 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4852 if (*val
4853 && (REFERENCE_CLASS_P (*val)
4854 || TREE_CODE (*val) == ADDR_EXPR)
4855 && maybe_canonicalize_mem_ref_addr (val))
4856 changed = true;
4857 }
4858 break;
4859 case GIMPLE_COND:
4860 {
4861 /* Canonicalize operand order. */
4862 tree lhs = gimple_cond_lhs (stmt);
4863 tree rhs = gimple_cond_rhs (stmt);
4864 if (tree_swap_operands_p (lhs, rhs))
4865 {
4866 gcond *gc = as_a <gcond *> (stmt);
4867 gimple_cond_set_lhs (gc, rhs);
4868 gimple_cond_set_rhs (gc, lhs);
4869 gimple_cond_set_code (gc,
4870 swap_tree_comparison (gimple_cond_code (gc)));
4871 changed = true;
4872 }
4873 }
4874 default:;
4875 }
4876
4877 /* Dispatch to pattern-based folding. */
4878 if (!inplace
4879 || is_gimple_assign (stmt)
4880 || gimple_code (stmt) == GIMPLE_COND)
4881 {
4882 gimple_seq seq = NULL;
4883 gimple_match_op res_op;
4884 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4885 valueize, valueize))
4886 {
4887 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4888 changed = true;
4889 else
4890 gimple_seq_discard (seq);
4891 }
4892 }
4893
4894 stmt = gsi_stmt (*gsi);
4895
4896 /* Fold the main computation performed by the statement. */
4897 switch (gimple_code (stmt))
4898 {
4899 case GIMPLE_ASSIGN:
4900 {
4901 /* Try to canonicalize for boolean-typed X the comparisons
4902 X == 0, X == 1, X != 0, and X != 1. */
4903 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4904 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4905 {
4906 tree lhs = gimple_assign_lhs (stmt);
4907 tree op1 = gimple_assign_rhs1 (stmt);
4908 tree op2 = gimple_assign_rhs2 (stmt);
4909 tree type = TREE_TYPE (op1);
4910
4911 /* Check whether the comparison operands are of the same boolean
4912 type as the result type is.
4913 Check that second operand is an integer-constant with value
4914 one or zero. */
4915 if (TREE_CODE (op2) == INTEGER_CST
4916 && (integer_zerop (op2) || integer_onep (op2))
4917 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4918 {
4919 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4920 bool is_logical_not = false;
4921
4922 /* X == 0 and X != 1 is a logical-not.of X
4923 X == 1 and X != 0 is X */
4924 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4925 || (cmp_code == NE_EXPR && integer_onep (op2)))
4926 is_logical_not = true;
4927
4928 if (is_logical_not == false)
4929 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4930 /* Only for one-bit precision typed X the transformation
4931 !X -> ~X is valied. */
4932 else if (TYPE_PRECISION (type) == 1)
4933 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4934 /* Otherwise we use !X -> X ^ 1. */
4935 else
4936 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4937 build_int_cst (type, 1));
4938 changed = true;
4939 break;
4940 }
4941 }
4942
4943 unsigned old_num_ops = gimple_num_ops (stmt);
4944 tree lhs = gimple_assign_lhs (stmt);
4945 tree new_rhs = fold_gimple_assign (gsi);
4946 if (new_rhs
4947 && !useless_type_conversion_p (TREE_TYPE (lhs),
4948 TREE_TYPE (new_rhs)))
4949 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4950 if (new_rhs
4951 && (!inplace
4952 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4953 {
4954 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4955 changed = true;
4956 }
4957 break;
4958 }
4959
4960 case GIMPLE_CALL:
4961 changed |= gimple_fold_call (gsi, inplace);
4962 break;
4963
4964 case GIMPLE_ASM:
4965 /* Fold *& in asm operands. */
4966 {
4967 gasm *asm_stmt = as_a <gasm *> (stmt);
4968 size_t noutputs;
4969 const char **oconstraints;
4970 const char *constraint;
4971 bool allows_mem, allows_reg;
4972
4973 noutputs = gimple_asm_noutputs (asm_stmt);
4974 oconstraints = XALLOCAVEC (const char *, noutputs);
4975
4976 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4977 {
4978 tree link = gimple_asm_output_op (asm_stmt, i);
4979 tree op = TREE_VALUE (link);
4980 oconstraints[i]
4981 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4982 if (REFERENCE_CLASS_P (op)
4983 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4984 {
4985 TREE_VALUE (link) = op;
4986 changed = true;
4987 }
4988 }
4989 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4990 {
4991 tree link = gimple_asm_input_op (asm_stmt, i);
4992 tree op = TREE_VALUE (link);
4993 constraint
4994 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4995 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4996 oconstraints, &allows_mem, &allows_reg);
4997 if (REFERENCE_CLASS_P (op)
4998 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4999 != NULL_TREE)
5000 {
5001 TREE_VALUE (link) = op;
5002 changed = true;
5003 }
5004 }
5005 }
5006 break;
5007
5008 case GIMPLE_DEBUG:
5009 if (gimple_debug_bind_p (stmt))
5010 {
5011 tree val = gimple_debug_bind_get_value (stmt);
5012 if (val
5013 && REFERENCE_CLASS_P (val))
5014 {
5015 tree tem = maybe_fold_reference (val, false);
5016 if (tem)
5017 {
5018 gimple_debug_bind_set_value (stmt, tem);
5019 changed = true;
5020 }
5021 }
5022 else if (val
5023 && TREE_CODE (val) == ADDR_EXPR)
5024 {
5025 tree ref = TREE_OPERAND (val, 0);
5026 tree tem = maybe_fold_reference (ref, false);
5027 if (tem)
5028 {
5029 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5030 gimple_debug_bind_set_value (stmt, tem);
5031 changed = true;
5032 }
5033 }
5034 }
5035 break;
5036
5037 case GIMPLE_RETURN:
5038 {
5039 greturn *ret_stmt = as_a<greturn *> (stmt);
5040 tree ret = gimple_return_retval(ret_stmt);
5041
5042 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5043 {
5044 tree val = valueize (ret);
5045 if (val && val != ret
5046 && may_propagate_copy (ret, val))
5047 {
5048 gimple_return_set_retval (ret_stmt, val);
5049 changed = true;
5050 }
5051 }
5052 }
5053 break;
5054
5055 default:;
5056 }
5057
5058 stmt = gsi_stmt (*gsi);
5059
5060 /* Fold *& on the lhs. */
5061 if (gimple_has_lhs (stmt))
5062 {
5063 tree lhs = gimple_get_lhs (stmt);
5064 if (lhs && REFERENCE_CLASS_P (lhs))
5065 {
5066 tree new_lhs = maybe_fold_reference (lhs, true);
5067 if (new_lhs)
5068 {
5069 gimple_set_lhs (stmt, new_lhs);
5070 changed = true;
5071 }
5072 }
5073 }
5074
5075 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5076 return changed;
5077 }
5078
5079 /* Valueziation callback that ends up not following SSA edges. */
5080
5081 tree
5082 no_follow_ssa_edges (tree)
5083 {
5084 return NULL_TREE;
5085 }
5086
5087 /* Valueization callback that ends up following single-use SSA edges only. */
5088
5089 tree
5090 follow_single_use_edges (tree val)
5091 {
5092 if (TREE_CODE (val) == SSA_NAME
5093 && !has_single_use (val))
5094 return NULL_TREE;
5095 return val;
5096 }
5097
5098 /* Valueization callback that follows all SSA edges. */
5099
5100 tree
5101 follow_all_ssa_edges (tree val)
5102 {
5103 return val;
5104 }
5105
5106 /* Fold the statement pointed to by GSI. In some cases, this function may
5107 replace the whole statement with a new one. Returns true iff folding
5108 makes any changes.
5109 The statement pointed to by GSI should be in valid gimple form but may
5110 be in unfolded state as resulting from for example constant propagation
5111 which can produce *&x = 0. */
5112
5113 bool
5114 fold_stmt (gimple_stmt_iterator *gsi)
5115 {
5116 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5117 }
5118
5119 bool
5120 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5121 {
5122 return fold_stmt_1 (gsi, false, valueize);
5123 }
5124
5125 /* Perform the minimal folding on statement *GSI. Only operations like
5126 *&x created by constant propagation are handled. The statement cannot
5127 be replaced with a new one. Return true if the statement was
5128 changed, false otherwise.
5129 The statement *GSI should be in valid gimple form but may
5130 be in unfolded state as resulting from for example constant propagation
5131 which can produce *&x = 0. */
5132
5133 bool
5134 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5135 {
5136 gimple *stmt = gsi_stmt (*gsi);
5137 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5138 gcc_assert (gsi_stmt (*gsi) == stmt);
5139 return changed;
5140 }
5141
5142 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5143 if EXPR is null or we don't know how.
5144 If non-null, the result always has boolean type. */
5145
5146 static tree
5147 canonicalize_bool (tree expr, bool invert)
5148 {
5149 if (!expr)
5150 return NULL_TREE;
5151 else if (invert)
5152 {
5153 if (integer_nonzerop (expr))
5154 return boolean_false_node;
5155 else if (integer_zerop (expr))
5156 return boolean_true_node;
5157 else if (TREE_CODE (expr) == SSA_NAME)
5158 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5159 build_int_cst (TREE_TYPE (expr), 0));
5160 else if (COMPARISON_CLASS_P (expr))
5161 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5162 boolean_type_node,
5163 TREE_OPERAND (expr, 0),
5164 TREE_OPERAND (expr, 1));
5165 else
5166 return NULL_TREE;
5167 }
5168 else
5169 {
5170 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5171 return expr;
5172 if (integer_nonzerop (expr))
5173 return boolean_true_node;
5174 else if (integer_zerop (expr))
5175 return boolean_false_node;
5176 else if (TREE_CODE (expr) == SSA_NAME)
5177 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5178 build_int_cst (TREE_TYPE (expr), 0));
5179 else if (COMPARISON_CLASS_P (expr))
5180 return fold_build2 (TREE_CODE (expr),
5181 boolean_type_node,
5182 TREE_OPERAND (expr, 0),
5183 TREE_OPERAND (expr, 1));
5184 else
5185 return NULL_TREE;
5186 }
5187 }
5188
5189 /* Check to see if a boolean expression EXPR is logically equivalent to the
5190 comparison (OP1 CODE OP2). Check for various identities involving
5191 SSA_NAMEs. */
5192
5193 static bool
5194 same_bool_comparison_p (const_tree expr, enum tree_code code,
5195 const_tree op1, const_tree op2)
5196 {
5197 gimple *s;
5198
5199 /* The obvious case. */
5200 if (TREE_CODE (expr) == code
5201 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5202 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5203 return true;
5204
5205 /* Check for comparing (name, name != 0) and the case where expr
5206 is an SSA_NAME with a definition matching the comparison. */
5207 if (TREE_CODE (expr) == SSA_NAME
5208 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5209 {
5210 if (operand_equal_p (expr, op1, 0))
5211 return ((code == NE_EXPR && integer_zerop (op2))
5212 || (code == EQ_EXPR && integer_nonzerop (op2)));
5213 s = SSA_NAME_DEF_STMT (expr);
5214 if (is_gimple_assign (s)
5215 && gimple_assign_rhs_code (s) == code
5216 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5217 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5218 return true;
5219 }
5220
5221 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5222 of name is a comparison, recurse. */
5223 if (TREE_CODE (op1) == SSA_NAME
5224 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5225 {
5226 s = SSA_NAME_DEF_STMT (op1);
5227 if (is_gimple_assign (s)
5228 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5229 {
5230 enum tree_code c = gimple_assign_rhs_code (s);
5231 if ((c == NE_EXPR && integer_zerop (op2))
5232 || (c == EQ_EXPR && integer_nonzerop (op2)))
5233 return same_bool_comparison_p (expr, c,
5234 gimple_assign_rhs1 (s),
5235 gimple_assign_rhs2 (s));
5236 if ((c == EQ_EXPR && integer_zerop (op2))
5237 || (c == NE_EXPR && integer_nonzerop (op2)))
5238 return same_bool_comparison_p (expr,
5239 invert_tree_comparison (c, false),
5240 gimple_assign_rhs1 (s),
5241 gimple_assign_rhs2 (s));
5242 }
5243 }
5244 return false;
5245 }
5246
5247 /* Check to see if two boolean expressions OP1 and OP2 are logically
5248 equivalent. */
5249
5250 static bool
5251 same_bool_result_p (const_tree op1, const_tree op2)
5252 {
5253 /* Simple cases first. */
5254 if (operand_equal_p (op1, op2, 0))
5255 return true;
5256
5257 /* Check the cases where at least one of the operands is a comparison.
5258 These are a bit smarter than operand_equal_p in that they apply some
5259 identifies on SSA_NAMEs. */
5260 if (COMPARISON_CLASS_P (op2)
5261 && same_bool_comparison_p (op1, TREE_CODE (op2),
5262 TREE_OPERAND (op2, 0),
5263 TREE_OPERAND (op2, 1)))
5264 return true;
5265 if (COMPARISON_CLASS_P (op1)
5266 && same_bool_comparison_p (op2, TREE_CODE (op1),
5267 TREE_OPERAND (op1, 0),
5268 TREE_OPERAND (op1, 1)))
5269 return true;
5270
5271 /* Default case. */
5272 return false;
5273 }
5274
5275 /* Forward declarations for some mutually recursive functions. */
5276
5277 static tree
5278 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5279 enum tree_code code2, tree op2a, tree op2b);
5280 static tree
5281 and_var_with_comparison (tree var, bool invert,
5282 enum tree_code code2, tree op2a, tree op2b);
5283 static tree
5284 and_var_with_comparison_1 (gimple *stmt,
5285 enum tree_code code2, tree op2a, tree op2b);
5286 static tree
5287 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5288 enum tree_code code2, tree op2a, tree op2b);
5289 static tree
5290 or_var_with_comparison (tree var, bool invert,
5291 enum tree_code code2, tree op2a, tree op2b);
5292 static tree
5293 or_var_with_comparison_1 (gimple *stmt,
5294 enum tree_code code2, tree op2a, tree op2b);
5295
5296 /* Helper function for and_comparisons_1: try to simplify the AND of the
5297 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5298 If INVERT is true, invert the value of the VAR before doing the AND.
5299 Return NULL_EXPR if we can't simplify this to a single expression. */
5300
5301 static tree
5302 and_var_with_comparison (tree var, bool invert,
5303 enum tree_code code2, tree op2a, tree op2b)
5304 {
5305 tree t;
5306 gimple *stmt = SSA_NAME_DEF_STMT (var);
5307
5308 /* We can only deal with variables whose definitions are assignments. */
5309 if (!is_gimple_assign (stmt))
5310 return NULL_TREE;
5311
5312 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5313 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5314 Then we only have to consider the simpler non-inverted cases. */
5315 if (invert)
5316 t = or_var_with_comparison_1 (stmt,
5317 invert_tree_comparison (code2, false),
5318 op2a, op2b);
5319 else
5320 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5321 return canonicalize_bool (t, invert);
5322 }
5323
5324 /* Try to simplify the AND of the ssa variable defined by the assignment
5325 STMT with the comparison specified by (OP2A CODE2 OP2B).
5326 Return NULL_EXPR if we can't simplify this to a single expression. */
5327
5328 static tree
5329 and_var_with_comparison_1 (gimple *stmt,
5330 enum tree_code code2, tree op2a, tree op2b)
5331 {
5332 tree var = gimple_assign_lhs (stmt);
5333 tree true_test_var = NULL_TREE;
5334 tree false_test_var = NULL_TREE;
5335 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5336
5337 /* Check for identities like (var AND (var == 0)) => false. */
5338 if (TREE_CODE (op2a) == SSA_NAME
5339 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5340 {
5341 if ((code2 == NE_EXPR && integer_zerop (op2b))
5342 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5343 {
5344 true_test_var = op2a;
5345 if (var == true_test_var)
5346 return var;
5347 }
5348 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5349 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5350 {
5351 false_test_var = op2a;
5352 if (var == false_test_var)
5353 return boolean_false_node;
5354 }
5355 }
5356
5357 /* If the definition is a comparison, recurse on it. */
5358 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5359 {
5360 tree t = and_comparisons_1 (innercode,
5361 gimple_assign_rhs1 (stmt),
5362 gimple_assign_rhs2 (stmt),
5363 code2,
5364 op2a,
5365 op2b);
5366 if (t)
5367 return t;
5368 }
5369
5370 /* If the definition is an AND or OR expression, we may be able to
5371 simplify by reassociating. */
5372 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5373 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5374 {
5375 tree inner1 = gimple_assign_rhs1 (stmt);
5376 tree inner2 = gimple_assign_rhs2 (stmt);
5377 gimple *s;
5378 tree t;
5379 tree partial = NULL_TREE;
5380 bool is_and = (innercode == BIT_AND_EXPR);
5381
5382 /* Check for boolean identities that don't require recursive examination
5383 of inner1/inner2:
5384 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5385 inner1 AND (inner1 OR inner2) => inner1
5386 !inner1 AND (inner1 AND inner2) => false
5387 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5388 Likewise for similar cases involving inner2. */
5389 if (inner1 == true_test_var)
5390 return (is_and ? var : inner1);
5391 else if (inner2 == true_test_var)
5392 return (is_and ? var : inner2);
5393 else if (inner1 == false_test_var)
5394 return (is_and
5395 ? boolean_false_node
5396 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5397 else if (inner2 == false_test_var)
5398 return (is_and
5399 ? boolean_false_node
5400 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5401
5402 /* Next, redistribute/reassociate the AND across the inner tests.
5403 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5404 if (TREE_CODE (inner1) == SSA_NAME
5405 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5406 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5407 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5408 gimple_assign_rhs1 (s),
5409 gimple_assign_rhs2 (s),
5410 code2, op2a, op2b)))
5411 {
5412 /* Handle the AND case, where we are reassociating:
5413 (inner1 AND inner2) AND (op2a code2 op2b)
5414 => (t AND inner2)
5415 If the partial result t is a constant, we win. Otherwise
5416 continue on to try reassociating with the other inner test. */
5417 if (is_and)
5418 {
5419 if (integer_onep (t))
5420 return inner2;
5421 else if (integer_zerop (t))
5422 return boolean_false_node;
5423 }
5424
5425 /* Handle the OR case, where we are redistributing:
5426 (inner1 OR inner2) AND (op2a code2 op2b)
5427 => (t OR (inner2 AND (op2a code2 op2b))) */
5428 else if (integer_onep (t))
5429 return boolean_true_node;
5430
5431 /* Save partial result for later. */
5432 partial = t;
5433 }
5434
5435 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5436 if (TREE_CODE (inner2) == SSA_NAME
5437 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5438 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5439 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5440 gimple_assign_rhs1 (s),
5441 gimple_assign_rhs2 (s),
5442 code2, op2a, op2b)))
5443 {
5444 /* Handle the AND case, where we are reassociating:
5445 (inner1 AND inner2) AND (op2a code2 op2b)
5446 => (inner1 AND t) */
5447 if (is_and)
5448 {
5449 if (integer_onep (t))
5450 return inner1;
5451 else if (integer_zerop (t))
5452 return boolean_false_node;
5453 /* If both are the same, we can apply the identity
5454 (x AND x) == x. */
5455 else if (partial && same_bool_result_p (t, partial))
5456 return t;
5457 }
5458
5459 /* Handle the OR case. where we are redistributing:
5460 (inner1 OR inner2) AND (op2a code2 op2b)
5461 => (t OR (inner1 AND (op2a code2 op2b)))
5462 => (t OR partial) */
5463 else
5464 {
5465 if (integer_onep (t))
5466 return boolean_true_node;
5467 else if (partial)
5468 {
5469 /* We already got a simplification for the other
5470 operand to the redistributed OR expression. The
5471 interesting case is when at least one is false.
5472 Or, if both are the same, we can apply the identity
5473 (x OR x) == x. */
5474 if (integer_zerop (partial))
5475 return t;
5476 else if (integer_zerop (t))
5477 return partial;
5478 else if (same_bool_result_p (t, partial))
5479 return t;
5480 }
5481 }
5482 }
5483 }
5484 return NULL_TREE;
5485 }
5486
5487 /* Try to simplify the AND of two comparisons defined by
5488 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5489 If this can be done without constructing an intermediate value,
5490 return the resulting tree; otherwise NULL_TREE is returned.
5491 This function is deliberately asymmetric as it recurses on SSA_DEFs
5492 in the first comparison but not the second. */
5493
5494 static tree
5495 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5496 enum tree_code code2, tree op2a, tree op2b)
5497 {
5498 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5499
5500 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5501 if (operand_equal_p (op1a, op2a, 0)
5502 && operand_equal_p (op1b, op2b, 0))
5503 {
5504 /* Result will be either NULL_TREE, or a combined comparison. */
5505 tree t = combine_comparisons (UNKNOWN_LOCATION,
5506 TRUTH_ANDIF_EXPR, code1, code2,
5507 truth_type, op1a, op1b);
5508 if (t)
5509 return t;
5510 }
5511
5512 /* Likewise the swapped case of the above. */
5513 if (operand_equal_p (op1a, op2b, 0)
5514 && operand_equal_p (op1b, op2a, 0))
5515 {
5516 /* Result will be either NULL_TREE, or a combined comparison. */
5517 tree t = combine_comparisons (UNKNOWN_LOCATION,
5518 TRUTH_ANDIF_EXPR, code1,
5519 swap_tree_comparison (code2),
5520 truth_type, op1a, op1b);
5521 if (t)
5522 return t;
5523 }
5524
5525 /* If both comparisons are of the same value against constants, we might
5526 be able to merge them. */
5527 if (operand_equal_p (op1a, op2a, 0)
5528 && TREE_CODE (op1b) == INTEGER_CST
5529 && TREE_CODE (op2b) == INTEGER_CST)
5530 {
5531 int cmp = tree_int_cst_compare (op1b, op2b);
5532
5533 /* If we have (op1a == op1b), we should either be able to
5534 return that or FALSE, depending on whether the constant op1b
5535 also satisfies the other comparison against op2b. */
5536 if (code1 == EQ_EXPR)
5537 {
5538 bool done = true;
5539 bool val;
5540 switch (code2)
5541 {
5542 case EQ_EXPR: val = (cmp == 0); break;
5543 case NE_EXPR: val = (cmp != 0); break;
5544 case LT_EXPR: val = (cmp < 0); break;
5545 case GT_EXPR: val = (cmp > 0); break;
5546 case LE_EXPR: val = (cmp <= 0); break;
5547 case GE_EXPR: val = (cmp >= 0); break;
5548 default: done = false;
5549 }
5550 if (done)
5551 {
5552 if (val)
5553 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5554 else
5555 return boolean_false_node;
5556 }
5557 }
5558 /* Likewise if the second comparison is an == comparison. */
5559 else if (code2 == EQ_EXPR)
5560 {
5561 bool done = true;
5562 bool val;
5563 switch (code1)
5564 {
5565 case EQ_EXPR: val = (cmp == 0); break;
5566 case NE_EXPR: val = (cmp != 0); break;
5567 case LT_EXPR: val = (cmp > 0); break;
5568 case GT_EXPR: val = (cmp < 0); break;
5569 case LE_EXPR: val = (cmp >= 0); break;
5570 case GE_EXPR: val = (cmp <= 0); break;
5571 default: done = false;
5572 }
5573 if (done)
5574 {
5575 if (val)
5576 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5577 else
5578 return boolean_false_node;
5579 }
5580 }
5581
5582 /* Same business with inequality tests. */
5583 else if (code1 == NE_EXPR)
5584 {
5585 bool val;
5586 switch (code2)
5587 {
5588 case EQ_EXPR: val = (cmp != 0); break;
5589 case NE_EXPR: val = (cmp == 0); break;
5590 case LT_EXPR: val = (cmp >= 0); break;
5591 case GT_EXPR: val = (cmp <= 0); break;
5592 case LE_EXPR: val = (cmp > 0); break;
5593 case GE_EXPR: val = (cmp < 0); break;
5594 default:
5595 val = false;
5596 }
5597 if (val)
5598 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5599 }
5600 else if (code2 == NE_EXPR)
5601 {
5602 bool val;
5603 switch (code1)
5604 {
5605 case EQ_EXPR: val = (cmp == 0); break;
5606 case NE_EXPR: val = (cmp != 0); break;
5607 case LT_EXPR: val = (cmp <= 0); break;
5608 case GT_EXPR: val = (cmp >= 0); break;
5609 case LE_EXPR: val = (cmp < 0); break;
5610 case GE_EXPR: val = (cmp > 0); break;
5611 default:
5612 val = false;
5613 }
5614 if (val)
5615 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5616 }
5617
5618 /* Chose the more restrictive of two < or <= comparisons. */
5619 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5620 && (code2 == LT_EXPR || code2 == LE_EXPR))
5621 {
5622 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5623 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5624 else
5625 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5626 }
5627
5628 /* Likewise chose the more restrictive of two > or >= comparisons. */
5629 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5630 && (code2 == GT_EXPR || code2 == GE_EXPR))
5631 {
5632 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5633 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5634 else
5635 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5636 }
5637
5638 /* Check for singleton ranges. */
5639 else if (cmp == 0
5640 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5641 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5642 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5643
5644 /* Check for disjoint ranges. */
5645 else if (cmp <= 0
5646 && (code1 == LT_EXPR || code1 == LE_EXPR)
5647 && (code2 == GT_EXPR || code2 == GE_EXPR))
5648 return boolean_false_node;
5649 else if (cmp >= 0
5650 && (code1 == GT_EXPR || code1 == GE_EXPR)
5651 && (code2 == LT_EXPR || code2 == LE_EXPR))
5652 return boolean_false_node;
5653 }
5654
5655 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5656 NAME's definition is a truth value. See if there are any simplifications
5657 that can be done against the NAME's definition. */
5658 if (TREE_CODE (op1a) == SSA_NAME
5659 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5660 && (integer_zerop (op1b) || integer_onep (op1b)))
5661 {
5662 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5663 || (code1 == NE_EXPR && integer_onep (op1b)));
5664 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5665 switch (gimple_code (stmt))
5666 {
5667 case GIMPLE_ASSIGN:
5668 /* Try to simplify by copy-propagating the definition. */
5669 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5670
5671 case GIMPLE_PHI:
5672 /* If every argument to the PHI produces the same result when
5673 ANDed with the second comparison, we win.
5674 Do not do this unless the type is bool since we need a bool
5675 result here anyway. */
5676 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5677 {
5678 tree result = NULL_TREE;
5679 unsigned i;
5680 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5681 {
5682 tree arg = gimple_phi_arg_def (stmt, i);
5683
5684 /* If this PHI has itself as an argument, ignore it.
5685 If all the other args produce the same result,
5686 we're still OK. */
5687 if (arg == gimple_phi_result (stmt))
5688 continue;
5689 else if (TREE_CODE (arg) == INTEGER_CST)
5690 {
5691 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5692 {
5693 if (!result)
5694 result = boolean_false_node;
5695 else if (!integer_zerop (result))
5696 return NULL_TREE;
5697 }
5698 else if (!result)
5699 result = fold_build2 (code2, boolean_type_node,
5700 op2a, op2b);
5701 else if (!same_bool_comparison_p (result,
5702 code2, op2a, op2b))
5703 return NULL_TREE;
5704 }
5705 else if (TREE_CODE (arg) == SSA_NAME
5706 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5707 {
5708 tree temp;
5709 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5710 /* In simple cases we can look through PHI nodes,
5711 but we have to be careful with loops.
5712 See PR49073. */
5713 if (! dom_info_available_p (CDI_DOMINATORS)
5714 || gimple_bb (def_stmt) == gimple_bb (stmt)
5715 || dominated_by_p (CDI_DOMINATORS,
5716 gimple_bb (def_stmt),
5717 gimple_bb (stmt)))
5718 return NULL_TREE;
5719 temp = and_var_with_comparison (arg, invert, code2,
5720 op2a, op2b);
5721 if (!temp)
5722 return NULL_TREE;
5723 else if (!result)
5724 result = temp;
5725 else if (!same_bool_result_p (result, temp))
5726 return NULL_TREE;
5727 }
5728 else
5729 return NULL_TREE;
5730 }
5731 return result;
5732 }
5733
5734 default:
5735 break;
5736 }
5737 }
5738 return NULL_TREE;
5739 }
5740
5741 /* Try to simplify the AND of two comparisons, specified by
5742 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5743 If this can be simplified to a single expression (without requiring
5744 introducing more SSA variables to hold intermediate values),
5745 return the resulting tree. Otherwise return NULL_TREE.
5746 If the result expression is non-null, it has boolean type. */
5747
5748 tree
5749 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5750 enum tree_code code2, tree op2a, tree op2b)
5751 {
5752 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5753 if (t)
5754 return t;
5755 else
5756 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5757 }
5758
5759 /* Helper function for or_comparisons_1: try to simplify the OR of the
5760 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5761 If INVERT is true, invert the value of VAR before doing the OR.
5762 Return NULL_EXPR if we can't simplify this to a single expression. */
5763
5764 static tree
5765 or_var_with_comparison (tree var, bool invert,
5766 enum tree_code code2, tree op2a, tree op2b)
5767 {
5768 tree t;
5769 gimple *stmt = SSA_NAME_DEF_STMT (var);
5770
5771 /* We can only deal with variables whose definitions are assignments. */
5772 if (!is_gimple_assign (stmt))
5773 return NULL_TREE;
5774
5775 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5776 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5777 Then we only have to consider the simpler non-inverted cases. */
5778 if (invert)
5779 t = and_var_with_comparison_1 (stmt,
5780 invert_tree_comparison (code2, false),
5781 op2a, op2b);
5782 else
5783 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5784 return canonicalize_bool (t, invert);
5785 }
5786
5787 /* Try to simplify the OR of the ssa variable defined by the assignment
5788 STMT with the comparison specified by (OP2A CODE2 OP2B).
5789 Return NULL_EXPR if we can't simplify this to a single expression. */
5790
5791 static tree
5792 or_var_with_comparison_1 (gimple *stmt,
5793 enum tree_code code2, tree op2a, tree op2b)
5794 {
5795 tree var = gimple_assign_lhs (stmt);
5796 tree true_test_var = NULL_TREE;
5797 tree false_test_var = NULL_TREE;
5798 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5799
5800 /* Check for identities like (var OR (var != 0)) => true . */
5801 if (TREE_CODE (op2a) == SSA_NAME
5802 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5803 {
5804 if ((code2 == NE_EXPR && integer_zerop (op2b))
5805 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5806 {
5807 true_test_var = op2a;
5808 if (var == true_test_var)
5809 return var;
5810 }
5811 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5812 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5813 {
5814 false_test_var = op2a;
5815 if (var == false_test_var)
5816 return boolean_true_node;
5817 }
5818 }
5819
5820 /* If the definition is a comparison, recurse on it. */
5821 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5822 {
5823 tree t = or_comparisons_1 (innercode,
5824 gimple_assign_rhs1 (stmt),
5825 gimple_assign_rhs2 (stmt),
5826 code2,
5827 op2a,
5828 op2b);
5829 if (t)
5830 return t;
5831 }
5832
5833 /* If the definition is an AND or OR expression, we may be able to
5834 simplify by reassociating. */
5835 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5836 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5837 {
5838 tree inner1 = gimple_assign_rhs1 (stmt);
5839 tree inner2 = gimple_assign_rhs2 (stmt);
5840 gimple *s;
5841 tree t;
5842 tree partial = NULL_TREE;
5843 bool is_or = (innercode == BIT_IOR_EXPR);
5844
5845 /* Check for boolean identities that don't require recursive examination
5846 of inner1/inner2:
5847 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5848 inner1 OR (inner1 AND inner2) => inner1
5849 !inner1 OR (inner1 OR inner2) => true
5850 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5851 */
5852 if (inner1 == true_test_var)
5853 return (is_or ? var : inner1);
5854 else if (inner2 == true_test_var)
5855 return (is_or ? var : inner2);
5856 else if (inner1 == false_test_var)
5857 return (is_or
5858 ? boolean_true_node
5859 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5860 else if (inner2 == false_test_var)
5861 return (is_or
5862 ? boolean_true_node
5863 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5864
5865 /* Next, redistribute/reassociate the OR across the inner tests.
5866 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5867 if (TREE_CODE (inner1) == SSA_NAME
5868 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5869 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5870 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5871 gimple_assign_rhs1 (s),
5872 gimple_assign_rhs2 (s),
5873 code2, op2a, op2b)))
5874 {
5875 /* Handle the OR case, where we are reassociating:
5876 (inner1 OR inner2) OR (op2a code2 op2b)
5877 => (t OR inner2)
5878 If the partial result t is a constant, we win. Otherwise
5879 continue on to try reassociating with the other inner test. */
5880 if (is_or)
5881 {
5882 if (integer_onep (t))
5883 return boolean_true_node;
5884 else if (integer_zerop (t))
5885 return inner2;
5886 }
5887
5888 /* Handle the AND case, where we are redistributing:
5889 (inner1 AND inner2) OR (op2a code2 op2b)
5890 => (t AND (inner2 OR (op2a code op2b))) */
5891 else if (integer_zerop (t))
5892 return boolean_false_node;
5893
5894 /* Save partial result for later. */
5895 partial = t;
5896 }
5897
5898 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5899 if (TREE_CODE (inner2) == SSA_NAME
5900 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5901 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5902 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5903 gimple_assign_rhs1 (s),
5904 gimple_assign_rhs2 (s),
5905 code2, op2a, op2b)))
5906 {
5907 /* Handle the OR case, where we are reassociating:
5908 (inner1 OR inner2) OR (op2a code2 op2b)
5909 => (inner1 OR t)
5910 => (t OR partial) */
5911 if (is_or)
5912 {
5913 if (integer_zerop (t))
5914 return inner1;
5915 else if (integer_onep (t))
5916 return boolean_true_node;
5917 /* If both are the same, we can apply the identity
5918 (x OR x) == x. */
5919 else if (partial && same_bool_result_p (t, partial))
5920 return t;
5921 }
5922
5923 /* Handle the AND case, where we are redistributing:
5924 (inner1 AND inner2) OR (op2a code2 op2b)
5925 => (t AND (inner1 OR (op2a code2 op2b)))
5926 => (t AND partial) */
5927 else
5928 {
5929 if (integer_zerop (t))
5930 return boolean_false_node;
5931 else if (partial)
5932 {
5933 /* We already got a simplification for the other
5934 operand to the redistributed AND expression. The
5935 interesting case is when at least one is true.
5936 Or, if both are the same, we can apply the identity
5937 (x AND x) == x. */
5938 if (integer_onep (partial))
5939 return t;
5940 else if (integer_onep (t))
5941 return partial;
5942 else if (same_bool_result_p (t, partial))
5943 return t;
5944 }
5945 }
5946 }
5947 }
5948 return NULL_TREE;
5949 }
5950
5951 /* Try to simplify the OR of two comparisons defined by
5952 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5953 If this can be done without constructing an intermediate value,
5954 return the resulting tree; otherwise NULL_TREE is returned.
5955 This function is deliberately asymmetric as it recurses on SSA_DEFs
5956 in the first comparison but not the second. */
5957
5958 static tree
5959 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5960 enum tree_code code2, tree op2a, tree op2b)
5961 {
5962 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5963
5964 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5965 if (operand_equal_p (op1a, op2a, 0)
5966 && operand_equal_p (op1b, op2b, 0))
5967 {
5968 /* Result will be either NULL_TREE, or a combined comparison. */
5969 tree t = combine_comparisons (UNKNOWN_LOCATION,
5970 TRUTH_ORIF_EXPR, code1, code2,
5971 truth_type, op1a, op1b);
5972 if (t)
5973 return t;
5974 }
5975
5976 /* Likewise the swapped case of the above. */
5977 if (operand_equal_p (op1a, op2b, 0)
5978 && operand_equal_p (op1b, op2a, 0))
5979 {
5980 /* Result will be either NULL_TREE, or a combined comparison. */
5981 tree t = combine_comparisons (UNKNOWN_LOCATION,
5982 TRUTH_ORIF_EXPR, code1,
5983 swap_tree_comparison (code2),
5984 truth_type, op1a, op1b);
5985 if (t)
5986 return t;
5987 }
5988
5989 /* If both comparisons are of the same value against constants, we might
5990 be able to merge them. */
5991 if (operand_equal_p (op1a, op2a, 0)
5992 && TREE_CODE (op1b) == INTEGER_CST
5993 && TREE_CODE (op2b) == INTEGER_CST)
5994 {
5995 int cmp = tree_int_cst_compare (op1b, op2b);
5996
5997 /* If we have (op1a != op1b), we should either be able to
5998 return that or TRUE, depending on whether the constant op1b
5999 also satisfies the other comparison against op2b. */
6000 if (code1 == NE_EXPR)
6001 {
6002 bool done = true;
6003 bool val;
6004 switch (code2)
6005 {
6006 case EQ_EXPR: val = (cmp == 0); break;
6007 case NE_EXPR: val = (cmp != 0); break;
6008 case LT_EXPR: val = (cmp < 0); break;
6009 case GT_EXPR: val = (cmp > 0); break;
6010 case LE_EXPR: val = (cmp <= 0); break;
6011 case GE_EXPR: val = (cmp >= 0); break;
6012 default: done = false;
6013 }
6014 if (done)
6015 {
6016 if (val)
6017 return boolean_true_node;
6018 else
6019 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6020 }
6021 }
6022 /* Likewise if the second comparison is a != comparison. */
6023 else if (code2 == NE_EXPR)
6024 {
6025 bool done = true;
6026 bool val;
6027 switch (code1)
6028 {
6029 case EQ_EXPR: val = (cmp == 0); break;
6030 case NE_EXPR: val = (cmp != 0); break;
6031 case LT_EXPR: val = (cmp > 0); break;
6032 case GT_EXPR: val = (cmp < 0); break;
6033 case LE_EXPR: val = (cmp >= 0); break;
6034 case GE_EXPR: val = (cmp <= 0); break;
6035 default: done = false;
6036 }
6037 if (done)
6038 {
6039 if (val)
6040 return boolean_true_node;
6041 else
6042 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6043 }
6044 }
6045
6046 /* See if an equality test is redundant with the other comparison. */
6047 else if (code1 == EQ_EXPR)
6048 {
6049 bool val;
6050 switch (code2)
6051 {
6052 case EQ_EXPR: val = (cmp == 0); break;
6053 case NE_EXPR: val = (cmp != 0); break;
6054 case LT_EXPR: val = (cmp < 0); break;
6055 case GT_EXPR: val = (cmp > 0); break;
6056 case LE_EXPR: val = (cmp <= 0); break;
6057 case GE_EXPR: val = (cmp >= 0); break;
6058 default:
6059 val = false;
6060 }
6061 if (val)
6062 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6063 }
6064 else if (code2 == EQ_EXPR)
6065 {
6066 bool val;
6067 switch (code1)
6068 {
6069 case EQ_EXPR: val = (cmp == 0); break;
6070 case NE_EXPR: val = (cmp != 0); break;
6071 case LT_EXPR: val = (cmp > 0); break;
6072 case GT_EXPR: val = (cmp < 0); break;
6073 case LE_EXPR: val = (cmp >= 0); break;
6074 case GE_EXPR: val = (cmp <= 0); break;
6075 default:
6076 val = false;
6077 }
6078 if (val)
6079 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6080 }
6081
6082 /* Chose the less restrictive of two < or <= comparisons. */
6083 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6084 && (code2 == LT_EXPR || code2 == LE_EXPR))
6085 {
6086 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6087 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6088 else
6089 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6090 }
6091
6092 /* Likewise chose the less restrictive of two > or >= comparisons. */
6093 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6094 && (code2 == GT_EXPR || code2 == GE_EXPR))
6095 {
6096 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6097 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6098 else
6099 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6100 }
6101
6102 /* Check for singleton ranges. */
6103 else if (cmp == 0
6104 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6105 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6106 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6107
6108 /* Check for less/greater pairs that don't restrict the range at all. */
6109 else if (cmp >= 0
6110 && (code1 == LT_EXPR || code1 == LE_EXPR)
6111 && (code2 == GT_EXPR || code2 == GE_EXPR))
6112 return boolean_true_node;
6113 else if (cmp <= 0
6114 && (code1 == GT_EXPR || code1 == GE_EXPR)
6115 && (code2 == LT_EXPR || code2 == LE_EXPR))
6116 return boolean_true_node;
6117 }
6118
6119 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6120 NAME's definition is a truth value. See if there are any simplifications
6121 that can be done against the NAME's definition. */
6122 if (TREE_CODE (op1a) == SSA_NAME
6123 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6124 && (integer_zerop (op1b) || integer_onep (op1b)))
6125 {
6126 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6127 || (code1 == NE_EXPR && integer_onep (op1b)));
6128 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6129 switch (gimple_code (stmt))
6130 {
6131 case GIMPLE_ASSIGN:
6132 /* Try to simplify by copy-propagating the definition. */
6133 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6134
6135 case GIMPLE_PHI:
6136 /* If every argument to the PHI produces the same result when
6137 ORed with the second comparison, we win.
6138 Do not do this unless the type is bool since we need a bool
6139 result here anyway. */
6140 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6141 {
6142 tree result = NULL_TREE;
6143 unsigned i;
6144 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6145 {
6146 tree arg = gimple_phi_arg_def (stmt, i);
6147
6148 /* If this PHI has itself as an argument, ignore it.
6149 If all the other args produce the same result,
6150 we're still OK. */
6151 if (arg == gimple_phi_result (stmt))
6152 continue;
6153 else if (TREE_CODE (arg) == INTEGER_CST)
6154 {
6155 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6156 {
6157 if (!result)
6158 result = boolean_true_node;
6159 else if (!integer_onep (result))
6160 return NULL_TREE;
6161 }
6162 else if (!result)
6163 result = fold_build2 (code2, boolean_type_node,
6164 op2a, op2b);
6165 else if (!same_bool_comparison_p (result,
6166 code2, op2a, op2b))
6167 return NULL_TREE;
6168 }
6169 else if (TREE_CODE (arg) == SSA_NAME
6170 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6171 {
6172 tree temp;
6173 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6174 /* In simple cases we can look through PHI nodes,
6175 but we have to be careful with loops.
6176 See PR49073. */
6177 if (! dom_info_available_p (CDI_DOMINATORS)
6178 || gimple_bb (def_stmt) == gimple_bb (stmt)
6179 || dominated_by_p (CDI_DOMINATORS,
6180 gimple_bb (def_stmt),
6181 gimple_bb (stmt)))
6182 return NULL_TREE;
6183 temp = or_var_with_comparison (arg, invert, code2,
6184 op2a, op2b);
6185 if (!temp)
6186 return NULL_TREE;
6187 else if (!result)
6188 result = temp;
6189 else if (!same_bool_result_p (result, temp))
6190 return NULL_TREE;
6191 }
6192 else
6193 return NULL_TREE;
6194 }
6195 return result;
6196 }
6197
6198 default:
6199 break;
6200 }
6201 }
6202 return NULL_TREE;
6203 }
6204
6205 /* Try to simplify the OR of two comparisons, specified by
6206 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6207 If this can be simplified to a single expression (without requiring
6208 introducing more SSA variables to hold intermediate values),
6209 return the resulting tree. Otherwise return NULL_TREE.
6210 If the result expression is non-null, it has boolean type. */
6211
6212 tree
6213 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6214 enum tree_code code2, tree op2a, tree op2b)
6215 {
6216 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6217 if (t)
6218 return t;
6219 else
6220 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6221 }
6222
6223
6224 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6225
6226 Either NULL_TREE, a simplified but non-constant or a constant
6227 is returned.
6228
6229 ??? This should go into a gimple-fold-inline.h file to be eventually
6230 privatized with the single valueize function used in the various TUs
6231 to avoid the indirect function call overhead. */
6232
6233 tree
6234 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6235 tree (*gvalueize) (tree))
6236 {
6237 gimple_match_op res_op;
6238 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6239 edges if there are intermediate VARYING defs. For this reason
6240 do not follow SSA edges here even though SCCVN can technically
6241 just deal fine with that. */
6242 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6243 {
6244 tree res = NULL_TREE;
6245 if (gimple_simplified_result_is_gimple_val (&res_op))
6246 res = res_op.ops[0];
6247 else if (mprts_hook)
6248 res = mprts_hook (&res_op);
6249 if (res)
6250 {
6251 if (dump_file && dump_flags & TDF_DETAILS)
6252 {
6253 fprintf (dump_file, "Match-and-simplified ");
6254 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6255 fprintf (dump_file, " to ");
6256 print_generic_expr (dump_file, res);
6257 fprintf (dump_file, "\n");
6258 }
6259 return res;
6260 }
6261 }
6262
6263 location_t loc = gimple_location (stmt);
6264 switch (gimple_code (stmt))
6265 {
6266 case GIMPLE_ASSIGN:
6267 {
6268 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6269
6270 switch (get_gimple_rhs_class (subcode))
6271 {
6272 case GIMPLE_SINGLE_RHS:
6273 {
6274 tree rhs = gimple_assign_rhs1 (stmt);
6275 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6276
6277 if (TREE_CODE (rhs) == SSA_NAME)
6278 {
6279 /* If the RHS is an SSA_NAME, return its known constant value,
6280 if any. */
6281 return (*valueize) (rhs);
6282 }
6283 /* Handle propagating invariant addresses into address
6284 operations. */
6285 else if (TREE_CODE (rhs) == ADDR_EXPR
6286 && !is_gimple_min_invariant (rhs))
6287 {
6288 poly_int64 offset = 0;
6289 tree base;
6290 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6291 &offset,
6292 valueize);
6293 if (base
6294 && (CONSTANT_CLASS_P (base)
6295 || decl_address_invariant_p (base)))
6296 return build_invariant_address (TREE_TYPE (rhs),
6297 base, offset);
6298 }
6299 else if (TREE_CODE (rhs) == CONSTRUCTOR
6300 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6301 && known_eq (CONSTRUCTOR_NELTS (rhs),
6302 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6303 {
6304 unsigned i, nelts;
6305 tree val;
6306
6307 nelts = CONSTRUCTOR_NELTS (rhs);
6308 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6309 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6310 {
6311 val = (*valueize) (val);
6312 if (TREE_CODE (val) == INTEGER_CST
6313 || TREE_CODE (val) == REAL_CST
6314 || TREE_CODE (val) == FIXED_CST)
6315 vec.quick_push (val);
6316 else
6317 return NULL_TREE;
6318 }
6319
6320 return vec.build ();
6321 }
6322 if (subcode == OBJ_TYPE_REF)
6323 {
6324 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6325 /* If callee is constant, we can fold away the wrapper. */
6326 if (is_gimple_min_invariant (val))
6327 return val;
6328 }
6329
6330 if (kind == tcc_reference)
6331 {
6332 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6333 || TREE_CODE (rhs) == REALPART_EXPR
6334 || TREE_CODE (rhs) == IMAGPART_EXPR)
6335 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6336 {
6337 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6338 return fold_unary_loc (EXPR_LOCATION (rhs),
6339 TREE_CODE (rhs),
6340 TREE_TYPE (rhs), val);
6341 }
6342 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6343 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6344 {
6345 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6346 return fold_ternary_loc (EXPR_LOCATION (rhs),
6347 TREE_CODE (rhs),
6348 TREE_TYPE (rhs), val,
6349 TREE_OPERAND (rhs, 1),
6350 TREE_OPERAND (rhs, 2));
6351 }
6352 else if (TREE_CODE (rhs) == MEM_REF
6353 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6354 {
6355 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6356 if (TREE_CODE (val) == ADDR_EXPR
6357 && is_gimple_min_invariant (val))
6358 {
6359 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6360 unshare_expr (val),
6361 TREE_OPERAND (rhs, 1));
6362 if (tem)
6363 rhs = tem;
6364 }
6365 }
6366 return fold_const_aggregate_ref_1 (rhs, valueize);
6367 }
6368 else if (kind == tcc_declaration)
6369 return get_symbol_constant_value (rhs);
6370 return rhs;
6371 }
6372
6373 case GIMPLE_UNARY_RHS:
6374 return NULL_TREE;
6375
6376 case GIMPLE_BINARY_RHS:
6377 /* Translate &x + CST into an invariant form suitable for
6378 further propagation. */
6379 if (subcode == POINTER_PLUS_EXPR)
6380 {
6381 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6382 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6383 if (TREE_CODE (op0) == ADDR_EXPR
6384 && TREE_CODE (op1) == INTEGER_CST)
6385 {
6386 tree off = fold_convert (ptr_type_node, op1);
6387 return build_fold_addr_expr_loc
6388 (loc,
6389 fold_build2 (MEM_REF,
6390 TREE_TYPE (TREE_TYPE (op0)),
6391 unshare_expr (op0), off));
6392 }
6393 }
6394 /* Canonicalize bool != 0 and bool == 0 appearing after
6395 valueization. While gimple_simplify handles this
6396 it can get confused by the ~X == 1 -> X == 0 transform
6397 which we cant reduce to a SSA name or a constant
6398 (and we have no way to tell gimple_simplify to not
6399 consider those transforms in the first place). */
6400 else if (subcode == EQ_EXPR
6401 || subcode == NE_EXPR)
6402 {
6403 tree lhs = gimple_assign_lhs (stmt);
6404 tree op0 = gimple_assign_rhs1 (stmt);
6405 if (useless_type_conversion_p (TREE_TYPE (lhs),
6406 TREE_TYPE (op0)))
6407 {
6408 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6409 op0 = (*valueize) (op0);
6410 if (TREE_CODE (op0) == INTEGER_CST)
6411 std::swap (op0, op1);
6412 if (TREE_CODE (op1) == INTEGER_CST
6413 && ((subcode == NE_EXPR && integer_zerop (op1))
6414 || (subcode == EQ_EXPR && integer_onep (op1))))
6415 return op0;
6416 }
6417 }
6418 return NULL_TREE;
6419
6420 case GIMPLE_TERNARY_RHS:
6421 {
6422 /* Handle ternary operators that can appear in GIMPLE form. */
6423 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6424 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6425 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6426 return fold_ternary_loc (loc, subcode,
6427 gimple_expr_type (stmt), op0, op1, op2);
6428 }
6429
6430 default:
6431 gcc_unreachable ();
6432 }
6433 }
6434
6435 case GIMPLE_CALL:
6436 {
6437 tree fn;
6438 gcall *call_stmt = as_a <gcall *> (stmt);
6439
6440 if (gimple_call_internal_p (stmt))
6441 {
6442 enum tree_code subcode = ERROR_MARK;
6443 switch (gimple_call_internal_fn (stmt))
6444 {
6445 case IFN_UBSAN_CHECK_ADD:
6446 subcode = PLUS_EXPR;
6447 break;
6448 case IFN_UBSAN_CHECK_SUB:
6449 subcode = MINUS_EXPR;
6450 break;
6451 case IFN_UBSAN_CHECK_MUL:
6452 subcode = MULT_EXPR;
6453 break;
6454 case IFN_BUILTIN_EXPECT:
6455 {
6456 tree arg0 = gimple_call_arg (stmt, 0);
6457 tree op0 = (*valueize) (arg0);
6458 if (TREE_CODE (op0) == INTEGER_CST)
6459 return op0;
6460 return NULL_TREE;
6461 }
6462 default:
6463 return NULL_TREE;
6464 }
6465 tree arg0 = gimple_call_arg (stmt, 0);
6466 tree arg1 = gimple_call_arg (stmt, 1);
6467 tree op0 = (*valueize) (arg0);
6468 tree op1 = (*valueize) (arg1);
6469
6470 if (TREE_CODE (op0) != INTEGER_CST
6471 || TREE_CODE (op1) != INTEGER_CST)
6472 {
6473 switch (subcode)
6474 {
6475 case MULT_EXPR:
6476 /* x * 0 = 0 * x = 0 without overflow. */
6477 if (integer_zerop (op0) || integer_zerop (op1))
6478 return build_zero_cst (TREE_TYPE (arg0));
6479 break;
6480 case MINUS_EXPR:
6481 /* y - y = 0 without overflow. */
6482 if (operand_equal_p (op0, op1, 0))
6483 return build_zero_cst (TREE_TYPE (arg0));
6484 break;
6485 default:
6486 break;
6487 }
6488 }
6489 tree res
6490 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6491 if (res
6492 && TREE_CODE (res) == INTEGER_CST
6493 && !TREE_OVERFLOW (res))
6494 return res;
6495 return NULL_TREE;
6496 }
6497
6498 fn = (*valueize) (gimple_call_fn (stmt));
6499 if (TREE_CODE (fn) == ADDR_EXPR
6500 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6501 && gimple_builtin_call_types_compatible_p (stmt,
6502 TREE_OPERAND (fn, 0)))
6503 {
6504 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6505 tree retval;
6506 unsigned i;
6507 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6508 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6509 retval = fold_builtin_call_array (loc,
6510 gimple_call_return_type (call_stmt),
6511 fn, gimple_call_num_args (stmt), args);
6512 if (retval)
6513 {
6514 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6515 STRIP_NOPS (retval);
6516 retval = fold_convert (gimple_call_return_type (call_stmt),
6517 retval);
6518 }
6519 return retval;
6520 }
6521 return NULL_TREE;
6522 }
6523
6524 default:
6525 return NULL_TREE;
6526 }
6527 }
6528
6529 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6530 Returns NULL_TREE if folding to a constant is not possible, otherwise
6531 returns a constant according to is_gimple_min_invariant. */
6532
6533 tree
6534 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6535 {
6536 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6537 if (res && is_gimple_min_invariant (res))
6538 return res;
6539 return NULL_TREE;
6540 }
6541
6542
6543 /* The following set of functions are supposed to fold references using
6544 their constant initializers. */
6545
6546 /* See if we can find constructor defining value of BASE.
6547 When we know the consructor with constant offset (such as
6548 base is array[40] and we do know constructor of array), then
6549 BIT_OFFSET is adjusted accordingly.
6550
6551 As a special case, return error_mark_node when constructor
6552 is not explicitly available, but it is known to be zero
6553 such as 'static const int a;'. */
6554 static tree
6555 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6556 tree (*valueize)(tree))
6557 {
6558 poly_int64 bit_offset2, size, max_size;
6559 bool reverse;
6560
6561 if (TREE_CODE (base) == MEM_REF)
6562 {
6563 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6564 if (!boff.to_shwi (bit_offset))
6565 return NULL_TREE;
6566
6567 if (valueize
6568 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6569 base = valueize (TREE_OPERAND (base, 0));
6570 if (!base || TREE_CODE (base) != ADDR_EXPR)
6571 return NULL_TREE;
6572 base = TREE_OPERAND (base, 0);
6573 }
6574 else if (valueize
6575 && TREE_CODE (base) == SSA_NAME)
6576 base = valueize (base);
6577
6578 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6579 DECL_INITIAL. If BASE is a nested reference into another
6580 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6581 the inner reference. */
6582 switch (TREE_CODE (base))
6583 {
6584 case VAR_DECL:
6585 case CONST_DECL:
6586 {
6587 tree init = ctor_for_folding (base);
6588
6589 /* Our semantic is exact opposite of ctor_for_folding;
6590 NULL means unknown, while error_mark_node is 0. */
6591 if (init == error_mark_node)
6592 return NULL_TREE;
6593 if (!init)
6594 return error_mark_node;
6595 return init;
6596 }
6597
6598 case VIEW_CONVERT_EXPR:
6599 return get_base_constructor (TREE_OPERAND (base, 0),
6600 bit_offset, valueize);
6601
6602 case ARRAY_REF:
6603 case COMPONENT_REF:
6604 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6605 &reverse);
6606 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6607 return NULL_TREE;
6608 *bit_offset += bit_offset2;
6609 return get_base_constructor (base, bit_offset, valueize);
6610
6611 case CONSTRUCTOR:
6612 return base;
6613
6614 default:
6615 if (CONSTANT_CLASS_P (base))
6616 return base;
6617
6618 return NULL_TREE;
6619 }
6620 }
6621
6622 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6623 to the memory at bit OFFSET. When non-null, TYPE is the expected
6624 type of the reference; otherwise the type of the referenced element
6625 is used instead. When SIZE is zero, attempt to fold a reference to
6626 the entire element which OFFSET refers to. Increment *SUBOFF by
6627 the bit offset of the accessed element. */
6628
6629 static tree
6630 fold_array_ctor_reference (tree type, tree ctor,
6631 unsigned HOST_WIDE_INT offset,
6632 unsigned HOST_WIDE_INT size,
6633 tree from_decl,
6634 unsigned HOST_WIDE_INT *suboff)
6635 {
6636 offset_int low_bound;
6637 offset_int elt_size;
6638 offset_int access_index;
6639 tree domain_type = NULL_TREE;
6640 HOST_WIDE_INT inner_offset;
6641
6642 /* Compute low bound and elt size. */
6643 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6644 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6645 if (domain_type && TYPE_MIN_VALUE (domain_type))
6646 {
6647 /* Static constructors for variably sized objects makes no sense. */
6648 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6649 return NULL_TREE;
6650 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6651 }
6652 else
6653 low_bound = 0;
6654 /* Static constructors for variably sized objects makes no sense. */
6655 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6656 return NULL_TREE;
6657 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6658
6659 /* When TYPE is non-null, verify that it specifies a constant-sized
6660 accessed not larger than size of array element. */
6661 if (type
6662 && (!TYPE_SIZE_UNIT (type)
6663 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6664 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6665 || elt_size == 0))
6666 return NULL_TREE;
6667
6668 /* Compute the array index we look for. */
6669 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6670 elt_size);
6671 access_index += low_bound;
6672
6673 /* And offset within the access. */
6674 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6675
6676 /* See if the array field is large enough to span whole access. We do not
6677 care to fold accesses spanning multiple array indexes. */
6678 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6679 return NULL_TREE;
6680 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6681 {
6682 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6683 {
6684 /* For the final reference to the entire accessed element
6685 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6686 may be null) in favor of the type of the element, and set
6687 SIZE to the size of the accessed element. */
6688 inner_offset = 0;
6689 type = TREE_TYPE (val);
6690 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6691 }
6692
6693 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6694 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6695 suboff);
6696 }
6697
6698 /* Memory not explicitly mentioned in constructor is 0 (or
6699 the reference is out of range). */
6700 return type ? build_zero_cst (type) : NULL_TREE;
6701 }
6702
6703 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6704 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6705 is the expected type of the reference; otherwise the type of
6706 the referenced member is used instead. When SIZE is zero,
6707 attempt to fold a reference to the entire member which OFFSET
6708 refers to; in this case. Increment *SUBOFF by the bit offset
6709 of the accessed member. */
6710
6711 static tree
6712 fold_nonarray_ctor_reference (tree type, tree ctor,
6713 unsigned HOST_WIDE_INT offset,
6714 unsigned HOST_WIDE_INT size,
6715 tree from_decl,
6716 unsigned HOST_WIDE_INT *suboff)
6717 {
6718 unsigned HOST_WIDE_INT cnt;
6719 tree cfield, cval;
6720
6721 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6722 cval)
6723 {
6724 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6725 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6726 tree field_size = DECL_SIZE (cfield);
6727
6728 if (!field_size)
6729 {
6730 /* Determine the size of the flexible array member from
6731 the size of the initializer provided for it. */
6732 field_size = TYPE_SIZE (TREE_TYPE (cval));
6733 }
6734
6735 /* Variable sized objects in static constructors makes no sense,
6736 but field_size can be NULL for flexible array members. */
6737 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6738 && TREE_CODE (byte_offset) == INTEGER_CST
6739 && (field_size != NULL_TREE
6740 ? TREE_CODE (field_size) == INTEGER_CST
6741 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6742
6743 /* Compute bit offset of the field. */
6744 offset_int bitoffset
6745 = (wi::to_offset (field_offset)
6746 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6747 /* Compute bit offset where the field ends. */
6748 offset_int bitoffset_end;
6749 if (field_size != NULL_TREE)
6750 bitoffset_end = bitoffset + wi::to_offset (field_size);
6751 else
6752 bitoffset_end = 0;
6753
6754 /* Compute the bit offset of the end of the desired access.
6755 As a special case, if the size of the desired access is
6756 zero, assume the access is to the entire field (and let
6757 the caller make any necessary adjustments by storing
6758 the actual bounds of the field in FIELDBOUNDS). */
6759 offset_int access_end = offset_int (offset);
6760 if (size)
6761 access_end += size;
6762 else
6763 access_end = bitoffset_end;
6764
6765 /* Is there any overlap between the desired access at
6766 [OFFSET, OFFSET+SIZE) and the offset of the field within
6767 the object at [BITOFFSET, BITOFFSET_END)? */
6768 if (wi::cmps (access_end, bitoffset) > 0
6769 && (field_size == NULL_TREE
6770 || wi::lts_p (offset, bitoffset_end)))
6771 {
6772 *suboff += bitoffset.to_uhwi ();
6773
6774 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6775 {
6776 /* For the final reference to the entire accessed member
6777 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6778 be null) in favor of the type of the member, and set
6779 SIZE to the size of the accessed member. */
6780 offset = bitoffset.to_uhwi ();
6781 type = TREE_TYPE (cval);
6782 size = (bitoffset_end - bitoffset).to_uhwi ();
6783 }
6784
6785 /* We do have overlap. Now see if the field is large enough
6786 to cover the access. Give up for accesses that extend
6787 beyond the end of the object or that span multiple fields. */
6788 if (wi::cmps (access_end, bitoffset_end) > 0)
6789 return NULL_TREE;
6790 if (offset < bitoffset)
6791 return NULL_TREE;
6792
6793 offset_int inner_offset = offset_int (offset) - bitoffset;
6794 return fold_ctor_reference (type, cval,
6795 inner_offset.to_uhwi (), size,
6796 from_decl, suboff);
6797 }
6798 }
6799 /* Memory not explicitly mentioned in constructor is 0. */
6800 return type ? build_zero_cst (type) : NULL_TREE;
6801 }
6802
6803 /* CTOR is value initializing memory. Fold a reference of TYPE and
6804 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6805 is zero, attempt to fold a reference to the entire subobject
6806 which OFFSET refers to. This is used when folding accesses to
6807 string members of aggregates. When non-null, set *SUBOFF to
6808 the bit offset of the accessed subobject. */
6809
6810 tree
6811 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6812 const poly_uint64 &poly_size, tree from_decl,
6813 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6814 {
6815 tree ret;
6816
6817 /* We found the field with exact match. */
6818 if (type
6819 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6820 && known_eq (poly_offset, 0U))
6821 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6822
6823 /* The remaining optimizations need a constant size and offset. */
6824 unsigned HOST_WIDE_INT size, offset;
6825 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6826 return NULL_TREE;
6827
6828 /* We are at the end of walk, see if we can view convert the
6829 result. */
6830 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6831 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6832 && !compare_tree_int (TYPE_SIZE (type), size)
6833 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6834 {
6835 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6836 if (ret)
6837 {
6838 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6839 if (ret)
6840 STRIP_USELESS_TYPE_CONVERSION (ret);
6841 }
6842 return ret;
6843 }
6844 /* For constants and byte-aligned/sized reads try to go through
6845 native_encode/interpret. */
6846 if (CONSTANT_CLASS_P (ctor)
6847 && BITS_PER_UNIT == 8
6848 && offset % BITS_PER_UNIT == 0
6849 && size % BITS_PER_UNIT == 0
6850 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6851 {
6852 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6853 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6854 offset / BITS_PER_UNIT);
6855 if (len > 0)
6856 return native_interpret_expr (type, buf, len);
6857 }
6858 if (TREE_CODE (ctor) == CONSTRUCTOR)
6859 {
6860 unsigned HOST_WIDE_INT dummy = 0;
6861 if (!suboff)
6862 suboff = &dummy;
6863
6864 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6865 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6866 return fold_array_ctor_reference (type, ctor, offset, size,
6867 from_decl, suboff);
6868
6869 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6870 from_decl, suboff);
6871 }
6872
6873 return NULL_TREE;
6874 }
6875
6876 /* Return the tree representing the element referenced by T if T is an
6877 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6878 names using VALUEIZE. Return NULL_TREE otherwise. */
6879
6880 tree
6881 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6882 {
6883 tree ctor, idx, base;
6884 poly_int64 offset, size, max_size;
6885 tree tem;
6886 bool reverse;
6887
6888 if (TREE_THIS_VOLATILE (t))
6889 return NULL_TREE;
6890
6891 if (DECL_P (t))
6892 return get_symbol_constant_value (t);
6893
6894 tem = fold_read_from_constant_string (t);
6895 if (tem)
6896 return tem;
6897
6898 switch (TREE_CODE (t))
6899 {
6900 case ARRAY_REF:
6901 case ARRAY_RANGE_REF:
6902 /* Constant indexes are handled well by get_base_constructor.
6903 Only special case variable offsets.
6904 FIXME: This code can't handle nested references with variable indexes
6905 (they will be handled only by iteration of ccp). Perhaps we can bring
6906 get_ref_base_and_extent here and make it use a valueize callback. */
6907 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6908 && valueize
6909 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6910 && poly_int_tree_p (idx))
6911 {
6912 tree low_bound, unit_size;
6913
6914 /* If the resulting bit-offset is constant, track it. */
6915 if ((low_bound = array_ref_low_bound (t),
6916 poly_int_tree_p (low_bound))
6917 && (unit_size = array_ref_element_size (t),
6918 tree_fits_uhwi_p (unit_size)))
6919 {
6920 poly_offset_int woffset
6921 = wi::sext (wi::to_poly_offset (idx)
6922 - wi::to_poly_offset (low_bound),
6923 TYPE_PRECISION (TREE_TYPE (idx)));
6924
6925 if (woffset.to_shwi (&offset))
6926 {
6927 /* TODO: This code seems wrong, multiply then check
6928 to see if it fits. */
6929 offset *= tree_to_uhwi (unit_size);
6930 offset *= BITS_PER_UNIT;
6931
6932 base = TREE_OPERAND (t, 0);
6933 ctor = get_base_constructor (base, &offset, valueize);
6934 /* Empty constructor. Always fold to 0. */
6935 if (ctor == error_mark_node)
6936 return build_zero_cst (TREE_TYPE (t));
6937 /* Out of bound array access. Value is undefined,
6938 but don't fold. */
6939 if (maybe_lt (offset, 0))
6940 return NULL_TREE;
6941 /* We can not determine ctor. */
6942 if (!ctor)
6943 return NULL_TREE;
6944 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6945 tree_to_uhwi (unit_size)
6946 * BITS_PER_UNIT,
6947 base);
6948 }
6949 }
6950 }
6951 /* Fallthru. */
6952
6953 case COMPONENT_REF:
6954 case BIT_FIELD_REF:
6955 case TARGET_MEM_REF:
6956 case MEM_REF:
6957 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6958 ctor = get_base_constructor (base, &offset, valueize);
6959
6960 /* Empty constructor. Always fold to 0. */
6961 if (ctor == error_mark_node)
6962 return build_zero_cst (TREE_TYPE (t));
6963 /* We do not know precise address. */
6964 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6965 return NULL_TREE;
6966 /* We can not determine ctor. */
6967 if (!ctor)
6968 return NULL_TREE;
6969
6970 /* Out of bound array access. Value is undefined, but don't fold. */
6971 if (maybe_lt (offset, 0))
6972 return NULL_TREE;
6973
6974 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6975 base);
6976
6977 case REALPART_EXPR:
6978 case IMAGPART_EXPR:
6979 {
6980 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6981 if (c && TREE_CODE (c) == COMPLEX_CST)
6982 return fold_build1_loc (EXPR_LOCATION (t),
6983 TREE_CODE (t), TREE_TYPE (t), c);
6984 break;
6985 }
6986
6987 default:
6988 break;
6989 }
6990
6991 return NULL_TREE;
6992 }
6993
6994 tree
6995 fold_const_aggregate_ref (tree t)
6996 {
6997 return fold_const_aggregate_ref_1 (t, NULL);
6998 }
6999
7000 /* Lookup virtual method with index TOKEN in a virtual table V
7001 at OFFSET.
7002 Set CAN_REFER if non-NULL to false if method
7003 is not referable or if the virtual table is ill-formed (such as rewriten
7004 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7005
7006 tree
7007 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7008 tree v,
7009 unsigned HOST_WIDE_INT offset,
7010 bool *can_refer)
7011 {
7012 tree vtable = v, init, fn;
7013 unsigned HOST_WIDE_INT size;
7014 unsigned HOST_WIDE_INT elt_size, access_index;
7015 tree domain_type;
7016
7017 if (can_refer)
7018 *can_refer = true;
7019
7020 /* First of all double check we have virtual table. */
7021 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7022 {
7023 /* Pass down that we lost track of the target. */
7024 if (can_refer)
7025 *can_refer = false;
7026 return NULL_TREE;
7027 }
7028
7029 init = ctor_for_folding (v);
7030
7031 /* The virtual tables should always be born with constructors
7032 and we always should assume that they are avaialble for
7033 folding. At the moment we do not stream them in all cases,
7034 but it should never happen that ctor seem unreachable. */
7035 gcc_assert (init);
7036 if (init == error_mark_node)
7037 {
7038 /* Pass down that we lost track of the target. */
7039 if (can_refer)
7040 *can_refer = false;
7041 return NULL_TREE;
7042 }
7043 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7044 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7045 offset *= BITS_PER_UNIT;
7046 offset += token * size;
7047
7048 /* Lookup the value in the constructor that is assumed to be array.
7049 This is equivalent to
7050 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7051 offset, size, NULL);
7052 but in a constant time. We expect that frontend produced a simple
7053 array without indexed initializers. */
7054
7055 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7056 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7057 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7058 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7059
7060 access_index = offset / BITS_PER_UNIT / elt_size;
7061 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7062
7063 /* The C++ FE can now produce indexed fields, and we check if the indexes
7064 match. */
7065 if (access_index < CONSTRUCTOR_NELTS (init))
7066 {
7067 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7068 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7069 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7070 STRIP_NOPS (fn);
7071 }
7072 else
7073 fn = NULL;
7074
7075 /* For type inconsistent program we may end up looking up virtual method
7076 in virtual table that does not contain TOKEN entries. We may overrun
7077 the virtual table and pick up a constant or RTTI info pointer.
7078 In any case the call is undefined. */
7079 if (!fn
7080 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7081 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7082 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7083 else
7084 {
7085 fn = TREE_OPERAND (fn, 0);
7086
7087 /* When cgraph node is missing and function is not public, we cannot
7088 devirtualize. This can happen in WHOPR when the actual method
7089 ends up in other partition, because we found devirtualization
7090 possibility too late. */
7091 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7092 {
7093 if (can_refer)
7094 {
7095 *can_refer = false;
7096 return fn;
7097 }
7098 return NULL_TREE;
7099 }
7100 }
7101
7102 /* Make sure we create a cgraph node for functions we'll reference.
7103 They can be non-existent if the reference comes from an entry
7104 of an external vtable for example. */
7105 cgraph_node::get_create (fn);
7106
7107 return fn;
7108 }
7109
7110 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7111 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7112 KNOWN_BINFO carries the binfo describing the true type of
7113 OBJ_TYPE_REF_OBJECT(REF).
7114 Set CAN_REFER if non-NULL to false if method
7115 is not referable or if the virtual table is ill-formed (such as rewriten
7116 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7117
7118 tree
7119 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7120 bool *can_refer)
7121 {
7122 unsigned HOST_WIDE_INT offset;
7123 tree v;
7124
7125 v = BINFO_VTABLE (known_binfo);
7126 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7127 if (!v)
7128 return NULL_TREE;
7129
7130 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7131 {
7132 if (can_refer)
7133 *can_refer = false;
7134 return NULL_TREE;
7135 }
7136 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7137 }
7138
7139 /* Given a pointer value T, return a simplified version of an
7140 indirection through T, or NULL_TREE if no simplification is
7141 possible. Note that the resulting type may be different from
7142 the type pointed to in the sense that it is still compatible
7143 from the langhooks point of view. */
7144
7145 tree
7146 gimple_fold_indirect_ref (tree t)
7147 {
7148 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7149 tree sub = t;
7150 tree subtype;
7151
7152 STRIP_NOPS (sub);
7153 subtype = TREE_TYPE (sub);
7154 if (!POINTER_TYPE_P (subtype)
7155 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7156 return NULL_TREE;
7157
7158 if (TREE_CODE (sub) == ADDR_EXPR)
7159 {
7160 tree op = TREE_OPERAND (sub, 0);
7161 tree optype = TREE_TYPE (op);
7162 /* *&p => p */
7163 if (useless_type_conversion_p (type, optype))
7164 return op;
7165
7166 /* *(foo *)&fooarray => fooarray[0] */
7167 if (TREE_CODE (optype) == ARRAY_TYPE
7168 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7169 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7170 {
7171 tree type_domain = TYPE_DOMAIN (optype);
7172 tree min_val = size_zero_node;
7173 if (type_domain && TYPE_MIN_VALUE (type_domain))
7174 min_val = TYPE_MIN_VALUE (type_domain);
7175 if (TREE_CODE (min_val) == INTEGER_CST)
7176 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7177 }
7178 /* *(foo *)&complexfoo => __real__ complexfoo */
7179 else if (TREE_CODE (optype) == COMPLEX_TYPE
7180 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7181 return fold_build1 (REALPART_EXPR, type, op);
7182 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7183 else if (TREE_CODE (optype) == VECTOR_TYPE
7184 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7185 {
7186 tree part_width = TYPE_SIZE (type);
7187 tree index = bitsize_int (0);
7188 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7189 }
7190 }
7191
7192 /* *(p + CST) -> ... */
7193 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7194 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7195 {
7196 tree addr = TREE_OPERAND (sub, 0);
7197 tree off = TREE_OPERAND (sub, 1);
7198 tree addrtype;
7199
7200 STRIP_NOPS (addr);
7201 addrtype = TREE_TYPE (addr);
7202
7203 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7204 if (TREE_CODE (addr) == ADDR_EXPR
7205 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7206 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7207 && tree_fits_uhwi_p (off))
7208 {
7209 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7210 tree part_width = TYPE_SIZE (type);
7211 unsigned HOST_WIDE_INT part_widthi
7212 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7213 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7214 tree index = bitsize_int (indexi);
7215 if (known_lt (offset / part_widthi,
7216 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7217 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7218 part_width, index);
7219 }
7220
7221 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7222 if (TREE_CODE (addr) == ADDR_EXPR
7223 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7224 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7225 {
7226 tree size = TYPE_SIZE_UNIT (type);
7227 if (tree_int_cst_equal (size, off))
7228 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7229 }
7230
7231 /* *(p + CST) -> MEM_REF <p, CST>. */
7232 if (TREE_CODE (addr) != ADDR_EXPR
7233 || DECL_P (TREE_OPERAND (addr, 0)))
7234 return fold_build2 (MEM_REF, type,
7235 addr,
7236 wide_int_to_tree (ptype, wi::to_wide (off)));
7237 }
7238
7239 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7240 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7241 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7242 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7243 {
7244 tree type_domain;
7245 tree min_val = size_zero_node;
7246 tree osub = sub;
7247 sub = gimple_fold_indirect_ref (sub);
7248 if (! sub)
7249 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7250 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7251 if (type_domain && TYPE_MIN_VALUE (type_domain))
7252 min_val = TYPE_MIN_VALUE (type_domain);
7253 if (TREE_CODE (min_val) == INTEGER_CST)
7254 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7255 }
7256
7257 return NULL_TREE;
7258 }
7259
7260 /* Return true if CODE is an operation that when operating on signed
7261 integer types involves undefined behavior on overflow and the
7262 operation can be expressed with unsigned arithmetic. */
7263
7264 bool
7265 arith_code_with_undefined_signed_overflow (tree_code code)
7266 {
7267 switch (code)
7268 {
7269 case PLUS_EXPR:
7270 case MINUS_EXPR:
7271 case MULT_EXPR:
7272 case NEGATE_EXPR:
7273 case POINTER_PLUS_EXPR:
7274 return true;
7275 default:
7276 return false;
7277 }
7278 }
7279
7280 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7281 operation that can be transformed to unsigned arithmetic by converting
7282 its operand, carrying out the operation in the corresponding unsigned
7283 type and converting the result back to the original type.
7284
7285 Returns a sequence of statements that replace STMT and also contain
7286 a modified form of STMT itself. */
7287
7288 gimple_seq
7289 rewrite_to_defined_overflow (gimple *stmt)
7290 {
7291 if (dump_file && (dump_flags & TDF_DETAILS))
7292 {
7293 fprintf (dump_file, "rewriting stmt with undefined signed "
7294 "overflow ");
7295 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7296 }
7297
7298 tree lhs = gimple_assign_lhs (stmt);
7299 tree type = unsigned_type_for (TREE_TYPE (lhs));
7300 gimple_seq stmts = NULL;
7301 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7302 {
7303 tree op = gimple_op (stmt, i);
7304 op = gimple_convert (&stmts, type, op);
7305 gimple_set_op (stmt, i, op);
7306 }
7307 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7308 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7309 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7310 gimple_seq_add_stmt (&stmts, stmt);
7311 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7312 gimple_seq_add_stmt (&stmts, cvt);
7313
7314 return stmts;
7315 }
7316
7317
7318 /* The valueization hook we use for the gimple_build API simplification.
7319 This makes us match fold_buildN behavior by only combining with
7320 statements in the sequence(s) we are currently building. */
7321
7322 static tree
7323 gimple_build_valueize (tree op)
7324 {
7325 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7326 return op;
7327 return NULL_TREE;
7328 }
7329
7330 /* Build the expression CODE OP0 of type TYPE with location LOC,
7331 simplifying it first if possible. Returns the built
7332 expression value and appends statements possibly defining it
7333 to SEQ. */
7334
7335 tree
7336 gimple_build (gimple_seq *seq, location_t loc,
7337 enum tree_code code, tree type, tree op0)
7338 {
7339 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7340 if (!res)
7341 {
7342 res = create_tmp_reg_or_ssa_name (type);
7343 gimple *stmt;
7344 if (code == REALPART_EXPR
7345 || code == IMAGPART_EXPR
7346 || code == VIEW_CONVERT_EXPR)
7347 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7348 else
7349 stmt = gimple_build_assign (res, code, op0);
7350 gimple_set_location (stmt, loc);
7351 gimple_seq_add_stmt_without_update (seq, stmt);
7352 }
7353 return res;
7354 }
7355
7356 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7357 simplifying it first if possible. Returns the built
7358 expression value and appends statements possibly defining it
7359 to SEQ. */
7360
7361 tree
7362 gimple_build (gimple_seq *seq, location_t loc,
7363 enum tree_code code, tree type, tree op0, tree op1)
7364 {
7365 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7366 if (!res)
7367 {
7368 res = create_tmp_reg_or_ssa_name (type);
7369 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7370 gimple_set_location (stmt, loc);
7371 gimple_seq_add_stmt_without_update (seq, stmt);
7372 }
7373 return res;
7374 }
7375
7376 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7377 simplifying it first if possible. Returns the built
7378 expression value and appends statements possibly defining it
7379 to SEQ. */
7380
7381 tree
7382 gimple_build (gimple_seq *seq, location_t loc,
7383 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7384 {
7385 tree res = gimple_simplify (code, type, op0, op1, op2,
7386 seq, gimple_build_valueize);
7387 if (!res)
7388 {
7389 res = create_tmp_reg_or_ssa_name (type);
7390 gimple *stmt;
7391 if (code == BIT_FIELD_REF)
7392 stmt = gimple_build_assign (res, code,
7393 build3 (code, type, op0, op1, op2));
7394 else
7395 stmt = gimple_build_assign (res, code, op0, op1, op2);
7396 gimple_set_location (stmt, loc);
7397 gimple_seq_add_stmt_without_update (seq, stmt);
7398 }
7399 return res;
7400 }
7401
7402 /* Build the call FN (ARG0) with a result of type TYPE
7403 (or no result if TYPE is void) with location LOC,
7404 simplifying it first if possible. Returns the built
7405 expression value (or NULL_TREE if TYPE is void) and appends
7406 statements possibly defining it to SEQ. */
7407
7408 tree
7409 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7410 tree type, tree arg0)
7411 {
7412 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7413 if (!res)
7414 {
7415 gcall *stmt;
7416 if (internal_fn_p (fn))
7417 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7418 else
7419 {
7420 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7421 stmt = gimple_build_call (decl, 1, arg0);
7422 }
7423 if (!VOID_TYPE_P (type))
7424 {
7425 res = create_tmp_reg_or_ssa_name (type);
7426 gimple_call_set_lhs (stmt, res);
7427 }
7428 gimple_set_location (stmt, loc);
7429 gimple_seq_add_stmt_without_update (seq, stmt);
7430 }
7431 return res;
7432 }
7433
7434 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7435 (or no result if TYPE is void) with location LOC,
7436 simplifying it first if possible. Returns the built
7437 expression value (or NULL_TREE if TYPE is void) and appends
7438 statements possibly defining it to SEQ. */
7439
7440 tree
7441 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7442 tree type, tree arg0, tree arg1)
7443 {
7444 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7445 if (!res)
7446 {
7447 gcall *stmt;
7448 if (internal_fn_p (fn))
7449 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7450 else
7451 {
7452 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7453 stmt = gimple_build_call (decl, 2, arg0, arg1);
7454 }
7455 if (!VOID_TYPE_P (type))
7456 {
7457 res = create_tmp_reg_or_ssa_name (type);
7458 gimple_call_set_lhs (stmt, res);
7459 }
7460 gimple_set_location (stmt, loc);
7461 gimple_seq_add_stmt_without_update (seq, stmt);
7462 }
7463 return res;
7464 }
7465
7466 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7467 (or no result if TYPE is void) with location LOC,
7468 simplifying it first if possible. Returns the built
7469 expression value (or NULL_TREE if TYPE is void) and appends
7470 statements possibly defining it to SEQ. */
7471
7472 tree
7473 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7474 tree type, tree arg0, tree arg1, tree arg2)
7475 {
7476 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7477 seq, gimple_build_valueize);
7478 if (!res)
7479 {
7480 gcall *stmt;
7481 if (internal_fn_p (fn))
7482 stmt = gimple_build_call_internal (as_internal_fn (fn),
7483 3, arg0, arg1, arg2);
7484 else
7485 {
7486 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7487 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7488 }
7489 if (!VOID_TYPE_P (type))
7490 {
7491 res = create_tmp_reg_or_ssa_name (type);
7492 gimple_call_set_lhs (stmt, res);
7493 }
7494 gimple_set_location (stmt, loc);
7495 gimple_seq_add_stmt_without_update (seq, stmt);
7496 }
7497 return res;
7498 }
7499
7500 /* Build the conversion (TYPE) OP with a result of type TYPE
7501 with location LOC if such conversion is neccesary in GIMPLE,
7502 simplifying it first.
7503 Returns the built expression value and appends
7504 statements possibly defining it to SEQ. */
7505
7506 tree
7507 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7508 {
7509 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7510 return op;
7511 return gimple_build (seq, loc, NOP_EXPR, type, op);
7512 }
7513
7514 /* Build the conversion (ptrofftype) OP with a result of a type
7515 compatible with ptrofftype with location LOC if such conversion
7516 is neccesary in GIMPLE, simplifying it first.
7517 Returns the built expression value and appends
7518 statements possibly defining it to SEQ. */
7519
7520 tree
7521 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7522 {
7523 if (ptrofftype_p (TREE_TYPE (op)))
7524 return op;
7525 return gimple_convert (seq, loc, sizetype, op);
7526 }
7527
7528 /* Build a vector of type TYPE in which each element has the value OP.
7529 Return a gimple value for the result, appending any new statements
7530 to SEQ. */
7531
7532 tree
7533 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7534 tree op)
7535 {
7536 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7537 && !CONSTANT_CLASS_P (op))
7538 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7539
7540 tree res, vec = build_vector_from_val (type, op);
7541 if (is_gimple_val (vec))
7542 return vec;
7543 if (gimple_in_ssa_p (cfun))
7544 res = make_ssa_name (type);
7545 else
7546 res = create_tmp_reg (type);
7547 gimple *stmt = gimple_build_assign (res, vec);
7548 gimple_set_location (stmt, loc);
7549 gimple_seq_add_stmt_without_update (seq, stmt);
7550 return res;
7551 }
7552
7553 /* Build a vector from BUILDER, handling the case in which some elements
7554 are non-constant. Return a gimple value for the result, appending any
7555 new instructions to SEQ.
7556
7557 BUILDER must not have a stepped encoding on entry. This is because
7558 the function is not geared up to handle the arithmetic that would
7559 be needed in the variable case, and any code building a vector that
7560 is known to be constant should use BUILDER->build () directly. */
7561
7562 tree
7563 gimple_build_vector (gimple_seq *seq, location_t loc,
7564 tree_vector_builder *builder)
7565 {
7566 gcc_assert (builder->nelts_per_pattern () <= 2);
7567 unsigned int encoded_nelts = builder->encoded_nelts ();
7568 for (unsigned int i = 0; i < encoded_nelts; ++i)
7569 if (!TREE_CONSTANT ((*builder)[i]))
7570 {
7571 tree type = builder->type ();
7572 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7573 vec<constructor_elt, va_gc> *v;
7574 vec_alloc (v, nelts);
7575 for (i = 0; i < nelts; ++i)
7576 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7577
7578 tree res;
7579 if (gimple_in_ssa_p (cfun))
7580 res = make_ssa_name (type);
7581 else
7582 res = create_tmp_reg (type);
7583 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7584 gimple_set_location (stmt, loc);
7585 gimple_seq_add_stmt_without_update (seq, stmt);
7586 return res;
7587 }
7588 return builder->build ();
7589 }
7590
7591 /* Return true if the result of assignment STMT is known to be non-negative.
7592 If the return value is based on the assumption that signed overflow is
7593 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7594 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7595
7596 static bool
7597 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7598 int depth)
7599 {
7600 enum tree_code code = gimple_assign_rhs_code (stmt);
7601 switch (get_gimple_rhs_class (code))
7602 {
7603 case GIMPLE_UNARY_RHS:
7604 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7605 gimple_expr_type (stmt),
7606 gimple_assign_rhs1 (stmt),
7607 strict_overflow_p, depth);
7608 case GIMPLE_BINARY_RHS:
7609 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7610 gimple_expr_type (stmt),
7611 gimple_assign_rhs1 (stmt),
7612 gimple_assign_rhs2 (stmt),
7613 strict_overflow_p, depth);
7614 case GIMPLE_TERNARY_RHS:
7615 return false;
7616 case GIMPLE_SINGLE_RHS:
7617 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7618 strict_overflow_p, depth);
7619 case GIMPLE_INVALID_RHS:
7620 break;
7621 }
7622 gcc_unreachable ();
7623 }
7624
7625 /* Return true if return value of call STMT is known to be non-negative.
7626 If the return value is based on the assumption that signed overflow is
7627 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7628 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7629
7630 static bool
7631 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7632 int depth)
7633 {
7634 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7635 gimple_call_arg (stmt, 0) : NULL_TREE;
7636 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7637 gimple_call_arg (stmt, 1) : NULL_TREE;
7638
7639 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7640 gimple_call_combined_fn (stmt),
7641 arg0,
7642 arg1,
7643 strict_overflow_p, depth);
7644 }
7645
7646 /* Return true if return value of call STMT is known to be non-negative.
7647 If the return value is based on the assumption that signed overflow is
7648 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7649 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7650
7651 static bool
7652 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7653 int depth)
7654 {
7655 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7656 {
7657 tree arg = gimple_phi_arg_def (stmt, i);
7658 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7659 return false;
7660 }
7661 return true;
7662 }
7663
7664 /* Return true if STMT is known to compute a non-negative value.
7665 If the return value is based on the assumption that signed overflow is
7666 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7667 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7668
7669 bool
7670 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7671 int depth)
7672 {
7673 switch (gimple_code (stmt))
7674 {
7675 case GIMPLE_ASSIGN:
7676 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7677 depth);
7678 case GIMPLE_CALL:
7679 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7680 depth);
7681 case GIMPLE_PHI:
7682 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7683 depth);
7684 default:
7685 return false;
7686 }
7687 }
7688
7689 /* Return true if the floating-point value computed by assignment STMT
7690 is known to have an integer value. We also allow +Inf, -Inf and NaN
7691 to be considered integer values. Return false for signaling NaN.
7692
7693 DEPTH is the current nesting depth of the query. */
7694
7695 static bool
7696 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7697 {
7698 enum tree_code code = gimple_assign_rhs_code (stmt);
7699 switch (get_gimple_rhs_class (code))
7700 {
7701 case GIMPLE_UNARY_RHS:
7702 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7703 gimple_assign_rhs1 (stmt), depth);
7704 case GIMPLE_BINARY_RHS:
7705 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7706 gimple_assign_rhs1 (stmt),
7707 gimple_assign_rhs2 (stmt), depth);
7708 case GIMPLE_TERNARY_RHS:
7709 return false;
7710 case GIMPLE_SINGLE_RHS:
7711 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7712 case GIMPLE_INVALID_RHS:
7713 break;
7714 }
7715 gcc_unreachable ();
7716 }
7717
7718 /* Return true if the floating-point value computed by call STMT is known
7719 to have an integer value. We also allow +Inf, -Inf and NaN to be
7720 considered integer values. Return false for signaling NaN.
7721
7722 DEPTH is the current nesting depth of the query. */
7723
7724 static bool
7725 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7726 {
7727 tree arg0 = (gimple_call_num_args (stmt) > 0
7728 ? gimple_call_arg (stmt, 0)
7729 : NULL_TREE);
7730 tree arg1 = (gimple_call_num_args (stmt) > 1
7731 ? gimple_call_arg (stmt, 1)
7732 : NULL_TREE);
7733 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7734 arg0, arg1, depth);
7735 }
7736
7737 /* Return true if the floating-point result of phi STMT is known to have
7738 an integer value. We also allow +Inf, -Inf and NaN to be considered
7739 integer values. Return false for signaling NaN.
7740
7741 DEPTH is the current nesting depth of the query. */
7742
7743 static bool
7744 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7745 {
7746 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7747 {
7748 tree arg = gimple_phi_arg_def (stmt, i);
7749 if (!integer_valued_real_single_p (arg, depth + 1))
7750 return false;
7751 }
7752 return true;
7753 }
7754
7755 /* Return true if the floating-point value computed by STMT is known
7756 to have an integer value. We also allow +Inf, -Inf and NaN to be
7757 considered integer values. Return false for signaling NaN.
7758
7759 DEPTH is the current nesting depth of the query. */
7760
7761 bool
7762 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7763 {
7764 switch (gimple_code (stmt))
7765 {
7766 case GIMPLE_ASSIGN:
7767 return gimple_assign_integer_valued_real_p (stmt, depth);
7768 case GIMPLE_CALL:
7769 return gimple_call_integer_valued_real_p (stmt, depth);
7770 case GIMPLE_PHI:
7771 return gimple_phi_integer_valued_real_p (stmt, depth);
7772 default:
7773 return false;
7774 }
7775 }