]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
The base class for ranges is currently value_range_base, which is rather long and...
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
82 };
83
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
86
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
91
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
108
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
111 {
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
115
116 if (DECL_ABSTRACT_P (decl))
117 return false;
118
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
123
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
127 {
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->inlined_to;
139 }
140
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
169
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
179
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->inlined_to;
188 }
189
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
193
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
196 {
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
201 }
202
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
206
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
209 {
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
212
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
217 {
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
226 }
227 if (TREE_CODE (cval) == ADDR_EXPR)
228 {
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
231 {
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
235 }
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
238 if (!base)
239 return NULL_TREE;
240
241 if (VAR_OR_FUNCTION_DECL_P (base)
242 && !can_refer_decl_in_current_unit_p (base, from_decl))
243 return NULL_TREE;
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
246 if (VAR_P (base))
247 TREE_ADDRESSABLE (base) = 1;
248 else if (TREE_CODE (base) == FUNCTION_DECL)
249 {
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
253 cgraph_node::get_create (base);
254 }
255 /* Fixup types in global initializers. */
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
258
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
262 }
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
265 {
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
271 }
272 return orig_cval;
273 }
274
275 /* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
277
278 tree
279 get_symbol_constant_value (tree sym)
280 {
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
283 {
284 if (val)
285 {
286 val = canonicalize_constructor_val (unshare_expr (val), sym);
287 if (val && is_gimple_min_invariant (val))
288 return val;
289 else
290 return NULL_TREE;
291 }
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
296 && is_gimple_reg_type (TREE_TYPE (sym)))
297 return build_zero_cst (TREE_TYPE (sym));
298 }
299
300 return NULL_TREE;
301 }
302
303
304
305 /* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
309
310 static tree
311 maybe_fold_reference (tree expr, bool is_lhs)
312 {
313 tree result;
314
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
331
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
336
337 return NULL_TREE;
338 }
339
340
341 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
345
346 static tree
347 fold_gimple_assign (gimple_stmt_iterator *si)
348 {
349 gimple *stmt = gsi_stmt (*si);
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
352
353 tree result = NULL_TREE;
354
355 switch (get_gimple_rhs_class (subcode))
356 {
357 case GIMPLE_SINGLE_RHS:
358 {
359 tree rhs = gimple_assign_rhs1 (stmt);
360
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
363
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
366
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
368 {
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
372 else if (flag_devirtualize && virtual_method_call_p (rhs))
373 {
374 bool final;
375 vec <cgraph_node *>targets
376 = possible_polymorphic_call_targets (rhs, stmt, &final);
377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
378 {
379 if (dump_enabled_p ())
380 {
381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
386 : "NULL");
387 }
388 if (targets.length () == 1)
389 {
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
394 }
395 else
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
398 val = build_int_cst (TREE_TYPE (val), 0);
399 return val;
400 }
401 }
402 }
403
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
405 {
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
418
419 if (result)
420 {
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
426
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
429 }
430 }
431
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
434 {
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
438
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
440 if (! CONSTANT_CLASS_P (val))
441 return NULL_TREE;
442
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
445 }
446
447 else if (DECL_P (rhs))
448 return get_symbol_constant_value (rhs);
449 }
450 break;
451
452 case GIMPLE_UNARY_RHS:
453 break;
454
455 case GIMPLE_BINARY_RHS:
456 break;
457
458 case GIMPLE_TERNARY_RHS:
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
464
465 if (result)
466 {
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
470 }
471 break;
472
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
475 }
476
477 return NULL_TREE;
478 }
479
480
481 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
485
486 static void
487 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
488 {
489 gimple *stmt = gsi_stmt (*si_p);
490
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
493
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
496 gimple *laststore = NULL;
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
499 {
500 gimple *new_stmt = gsi_stmt (i);
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
506 {
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
516 }
517 }
518
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
524 {
525 gimple *new_stmt = gsi_stmt (i);
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
533 }
534
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
539 {
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
543 {
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
546 }
547 }
548
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
551 }
552
553 /* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
562
563 void
564 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
565 {
566 tree lhs;
567 gimple *stmt, *new_stmt;
568 gimple_stmt_iterator i;
569 gimple_seq stmts = NULL;
570
571 stmt = gsi_stmt (*si_p);
572
573 gcc_assert (is_gimple_call (stmt));
574
575 push_gimplify_context (gimple_in_ssa_p (cfun));
576
577 lhs = gimple_call_lhs (stmt);
578 if (lhs == NULL_TREE)
579 {
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
584 {
585 pop_gimplify_context (NULL);
586 if (gimple_in_ssa_p (cfun))
587 {
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
590 }
591 gsi_replace (si_p, gimple_build_nop (), false);
592 return;
593 }
594 }
595 else
596 {
597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
602 }
603
604 pop_gimplify_context (NULL);
605
606 gsi_replace_with_seq_vops (si_p, stmts);
607 }
608
609
610 /* Replace the call at *GSI with the gimple value VAL. */
611
612 void
613 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
614 {
615 gimple *stmt = gsi_stmt (*gsi);
616 tree lhs = gimple_call_lhs (stmt);
617 gimple *repl;
618 if (lhs)
619 {
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
623 }
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
628 {
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
631 }
632 gsi_replace (gsi, repl, false);
633 }
634
635 /* Replace the call at *GSI with the new call REPL and fold that
636 again. */
637
638 static void
639 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
640 {
641 gimple *stmt = gsi_stmt (*gsi);
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
644 gimple_move_vops (repl, stmt);
645 gsi_replace (gsi, repl, false);
646 fold_stmt (gsi);
647 }
648
649 /* Return true if VAR is a VAR_DECL or a component thereof. */
650
651 static bool
652 var_decl_component_p (tree var)
653 {
654 tree inner = var;
655 while (handled_component_p (inner))
656 inner = TREE_OPERAND (inner, 0);
657 return (DECL_P (inner)
658 || (TREE_CODE (inner) == MEM_REF
659 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
660 }
661
662 /* Return TRUE if the SIZE argument, representing the size of an
663 object, is in a range of values of which exactly zero is valid. */
664
665 static bool
666 size_must_be_zero_p (tree size)
667 {
668 if (integer_zerop (size))
669 return true;
670
671 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
672 return false;
673
674 tree type = TREE_TYPE (size);
675 int prec = TYPE_PRECISION (type);
676
677 /* Compute the value of SSIZE_MAX, the largest positive value that
678 can be stored in ssize_t, the signed counterpart of size_t. */
679 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
680 value_range valid_range (VR_RANGE,
681 build_int_cst (type, 0),
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
687 }
688
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
695
696 static bool
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 tree dest, tree src, enum built_in_function code)
699 {
700 gimple *stmt = gsi_stmt (*gsi);
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 tree destvar, srcvar;
704 location_t loc = gimple_location (stmt);
705
706 /* If the LEN parameter is a constant zero or in range where
707 the only valid value is zero, return DEST. */
708 if (size_must_be_zero_p (len))
709 {
710 gimple *repl;
711 if (gimple_call_lhs (stmt))
712 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
713 else
714 repl = gimple_build_nop ();
715 tree vdef = gimple_vdef (stmt);
716 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 {
718 unlink_stmt_vdef (stmt);
719 release_ssa_name (vdef);
720 }
721 gsi_replace (gsi, repl, false);
722 return true;
723 }
724
725 /* If SRC and DEST are the same (and not volatile), return
726 DEST{,+LEN,+LEN-1}. */
727 if (operand_equal_p (src, dest, 0))
728 {
729 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 It's safe and may even be emitted by GCC itself (see bug
731 32667). */
732 unlink_stmt_vdef (stmt);
733 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 release_ssa_name (gimple_vdef (stmt));
735 if (!lhs)
736 {
737 gsi_replace (gsi, gimple_build_nop (), false);
738 return true;
739 }
740 goto done;
741 }
742 else
743 {
744 tree srctype, desttype;
745 unsigned int src_align, dest_align;
746 tree off0;
747 const char *tmp_str;
748 unsigned HOST_WIDE_INT tmp_len;
749
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
753
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* FIXME: Don't transform copies from strings with known length.
763 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
764 from being handled, and the case was XFAILed for that reason.
765 Now that it is handled and the XFAIL removed, as soon as other
766 strlenopt tests that rely on it for passing are adjusted, this
767 hack can be removed. */
768 && !c_strlen (src, 1)
769 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
770 && memchr (tmp_str, 0, tmp_len) == NULL))
771 {
772 unsigned ilen = tree_to_uhwi (len);
773 if (pow2p_hwi (ilen))
774 {
775 /* Detect out-of-bounds accesses without issuing warnings.
776 Avoid folding out-of-bounds copies but to avoid false
777 positives for unreachable code defer warning until after
778 DCE has worked its magic.
779 -Wrestrict is still diagnosed. */
780 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
781 dest, src, len, len,
782 false, false))
783 if (warning != OPT_Wrestrict)
784 return false;
785
786 scalar_int_mode mode;
787 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
788 if (type
789 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
790 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
791 /* If the destination pointer is not aligned we must be able
792 to emit an unaligned store. */
793 && (dest_align >= GET_MODE_ALIGNMENT (mode)
794 || !targetm.slow_unaligned_access (mode, dest_align)
795 || (optab_handler (movmisalign_optab, mode)
796 != CODE_FOR_nothing)))
797 {
798 tree srctype = type;
799 tree desttype = type;
800 if (src_align < GET_MODE_ALIGNMENT (mode))
801 srctype = build_aligned_type (type, src_align);
802 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
803 tree tem = fold_const_aggregate_ref (srcmem);
804 if (tem)
805 srcmem = tem;
806 else if (src_align < GET_MODE_ALIGNMENT (mode)
807 && targetm.slow_unaligned_access (mode, src_align)
808 && (optab_handler (movmisalign_optab, mode)
809 == CODE_FOR_nothing))
810 srcmem = NULL_TREE;
811 if (srcmem)
812 {
813 gimple *new_stmt;
814 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
815 {
816 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
817 srcmem
818 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
819 new_stmt);
820 gimple_assign_set_lhs (new_stmt, srcmem);
821 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
822 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
823 }
824 if (dest_align < GET_MODE_ALIGNMENT (mode))
825 desttype = build_aligned_type (type, dest_align);
826 new_stmt
827 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
828 dest, off0),
829 srcmem);
830 gimple_move_vops (new_stmt, stmt);
831 if (!lhs)
832 {
833 gsi_replace (gsi, new_stmt, false);
834 return true;
835 }
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
837 goto done;
838 }
839 }
840 }
841 }
842
843 if (code == BUILT_IN_MEMMOVE)
844 {
845 /* Both DEST and SRC must be pointer types.
846 ??? This is what old code did. Is the testing for pointer types
847 really mandatory?
848
849 If either SRC is readonly or length is 1, we can use memcpy. */
850 if (!dest_align || !src_align)
851 return false;
852 if (readonly_data_expr (src)
853 || (tree_fits_uhwi_p (len)
854 && (MIN (src_align, dest_align) / BITS_PER_UNIT
855 >= tree_to_uhwi (len))))
856 {
857 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
858 if (!fn)
859 return false;
860 gimple_call_set_fndecl (stmt, fn);
861 gimple_call_set_arg (stmt, 0, dest);
862 gimple_call_set_arg (stmt, 1, src);
863 fold_stmt (gsi);
864 return true;
865 }
866
867 /* If *src and *dest can't overlap, optimize into memcpy as well. */
868 if (TREE_CODE (src) == ADDR_EXPR
869 && TREE_CODE (dest) == ADDR_EXPR)
870 {
871 tree src_base, dest_base, fn;
872 poly_int64 src_offset = 0, dest_offset = 0;
873 poly_uint64 maxsize;
874
875 srcvar = TREE_OPERAND (src, 0);
876 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
877 if (src_base == NULL)
878 src_base = srcvar;
879 destvar = TREE_OPERAND (dest, 0);
880 dest_base = get_addr_base_and_unit_offset (destvar,
881 &dest_offset);
882 if (dest_base == NULL)
883 dest_base = destvar;
884 if (!poly_int_tree_p (len, &maxsize))
885 maxsize = -1;
886 if (SSA_VAR_P (src_base)
887 && SSA_VAR_P (dest_base))
888 {
889 if (operand_equal_p (src_base, dest_base, 0)
890 && ranges_maybe_overlap_p (src_offset, maxsize,
891 dest_offset, maxsize))
892 return false;
893 }
894 else if (TREE_CODE (src_base) == MEM_REF
895 && TREE_CODE (dest_base) == MEM_REF)
896 {
897 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
898 TREE_OPERAND (dest_base, 0), 0))
899 return false;
900 poly_offset_int full_src_offset
901 = mem_ref_offset (src_base) + src_offset;
902 poly_offset_int full_dest_offset
903 = mem_ref_offset (dest_base) + dest_offset;
904 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
905 full_dest_offset, maxsize))
906 return false;
907 }
908 else
909 return false;
910
911 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
912 if (!fn)
913 return false;
914 gimple_call_set_fndecl (stmt, fn);
915 gimple_call_set_arg (stmt, 0, dest);
916 gimple_call_set_arg (stmt, 1, src);
917 fold_stmt (gsi);
918 return true;
919 }
920
921 /* If the destination and source do not alias optimize into
922 memcpy as well. */
923 if ((is_gimple_min_invariant (dest)
924 || TREE_CODE (dest) == SSA_NAME)
925 && (is_gimple_min_invariant (src)
926 || TREE_CODE (src) == SSA_NAME))
927 {
928 ao_ref destr, srcr;
929 ao_ref_init_from_ptr_and_size (&destr, dest, len);
930 ao_ref_init_from_ptr_and_size (&srcr, src, len);
931 if (!refs_may_alias_p_1 (&destr, &srcr, false))
932 {
933 tree fn;
934 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
935 if (!fn)
936 return false;
937 gimple_call_set_fndecl (stmt, fn);
938 gimple_call_set_arg (stmt, 0, dest);
939 gimple_call_set_arg (stmt, 1, src);
940 fold_stmt (gsi);
941 return true;
942 }
943 }
944
945 return false;
946 }
947
948 if (!tree_fits_shwi_p (len))
949 return false;
950 if (!POINTER_TYPE_P (TREE_TYPE (src))
951 || !POINTER_TYPE_P (TREE_TYPE (dest)))
952 return false;
953 /* In the following try to find a type that is most natural to be
954 used for the memcpy source and destination and that allows
955 the most optimization when memcpy is turned into a plain assignment
956 using that type. In theory we could always use a char[len] type
957 but that only gains us that the destination and source possibly
958 no longer will have their address taken. */
959 srctype = TREE_TYPE (TREE_TYPE (src));
960 if (TREE_CODE (srctype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
962 srctype = TREE_TYPE (srctype);
963 desttype = TREE_TYPE (TREE_TYPE (dest));
964 if (TREE_CODE (desttype) == ARRAY_TYPE
965 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
966 desttype = TREE_TYPE (desttype);
967 if (TREE_ADDRESSABLE (srctype)
968 || TREE_ADDRESSABLE (desttype))
969 return false;
970
971 /* Make sure we are not copying using a floating-point mode or
972 a type whose size possibly does not match its precision. */
973 if (FLOAT_MODE_P (TYPE_MODE (desttype))
974 || TREE_CODE (desttype) == BOOLEAN_TYPE
975 || TREE_CODE (desttype) == ENUMERAL_TYPE)
976 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
977 if (FLOAT_MODE_P (TYPE_MODE (srctype))
978 || TREE_CODE (srctype) == BOOLEAN_TYPE
979 || TREE_CODE (srctype) == ENUMERAL_TYPE)
980 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
981 if (!srctype)
982 srctype = desttype;
983 if (!desttype)
984 desttype = srctype;
985 if (!srctype)
986 return false;
987
988 src_align = get_pointer_alignment (src);
989 dest_align = get_pointer_alignment (dest);
990 if (dest_align < TYPE_ALIGN (desttype)
991 || src_align < TYPE_ALIGN (srctype))
992 return false;
993
994 destvar = NULL_TREE;
995 if (TREE_CODE (dest) == ADDR_EXPR
996 && var_decl_component_p (TREE_OPERAND (dest, 0))
997 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
998 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
999
1000 srcvar = NULL_TREE;
1001 if (TREE_CODE (src) == ADDR_EXPR
1002 && var_decl_component_p (TREE_OPERAND (src, 0))
1003 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1004 {
1005 if (!destvar
1006 || src_align >= TYPE_ALIGN (desttype))
1007 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1008 src, off0);
1009 else if (!STRICT_ALIGNMENT)
1010 {
1011 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1012 src_align);
1013 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1014 }
1015 }
1016
1017 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1018 return false;
1019
1020 if (srcvar == NULL_TREE)
1021 {
1022 if (src_align >= TYPE_ALIGN (desttype))
1023 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1024 else
1025 {
1026 if (STRICT_ALIGNMENT)
1027 return false;
1028 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1029 src_align);
1030 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1031 }
1032 }
1033 else if (destvar == NULL_TREE)
1034 {
1035 if (dest_align >= TYPE_ALIGN (srctype))
1036 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1037 else
1038 {
1039 if (STRICT_ALIGNMENT)
1040 return false;
1041 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1042 dest_align);
1043 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1044 }
1045 }
1046
1047 /* Same as above, detect out-of-bounds accesses without issuing
1048 warnings. Avoid folding out-of-bounds copies but to avoid
1049 false positives for unreachable code defer warning until
1050 after DCE has worked its magic.
1051 -Wrestrict is still diagnosed. */
1052 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1053 dest, src, len, len,
1054 false, false))
1055 if (warning != OPT_Wrestrict)
1056 return false;
1057
1058 gimple *new_stmt;
1059 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1060 {
1061 tree tem = fold_const_aggregate_ref (srcvar);
1062 if (tem)
1063 srcvar = tem;
1064 if (! is_gimple_min_invariant (srcvar))
1065 {
1066 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1067 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1068 new_stmt);
1069 gimple_assign_set_lhs (new_stmt, srcvar);
1070 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1071 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1072 }
1073 new_stmt = gimple_build_assign (destvar, srcvar);
1074 goto set_vop_and_replace;
1075 }
1076
1077 /* We get an aggregate copy. Use an unsigned char[] type to
1078 perform the copying to preserve padding and to avoid any issues
1079 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1080 desttype = build_array_type_nelts (unsigned_char_type_node,
1081 tree_to_uhwi (len));
1082 srctype = desttype;
1083 if (src_align > TYPE_ALIGN (srctype))
1084 srctype = build_aligned_type (srctype, src_align);
1085 if (dest_align > TYPE_ALIGN (desttype))
1086 desttype = build_aligned_type (desttype, dest_align);
1087 new_stmt
1088 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1089 fold_build2 (MEM_REF, srctype, src, off0));
1090 set_vop_and_replace:
1091 gimple_move_vops (new_stmt, stmt);
1092 if (!lhs)
1093 {
1094 gsi_replace (gsi, new_stmt, false);
1095 return true;
1096 }
1097 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 }
1099
1100 done:
1101 gimple_seq stmts = NULL;
1102 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1103 len = NULL_TREE;
1104 else if (code == BUILT_IN_MEMPCPY)
1105 {
1106 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1107 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1108 TREE_TYPE (dest), dest, len);
1109 }
1110 else
1111 gcc_unreachable ();
1112
1113 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1114 gimple *repl = gimple_build_assign (lhs, dest);
1115 gsi_replace (gsi, repl, false);
1116 return true;
1117 }
1118
1119 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1120 to built-in memcmp (a, b, len). */
1121
1122 static bool
1123 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1124 {
1125 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1126
1127 if (!fn)
1128 return false;
1129
1130 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1131
1132 gimple *stmt = gsi_stmt (*gsi);
1133 tree a = gimple_call_arg (stmt, 0);
1134 tree b = gimple_call_arg (stmt, 1);
1135 tree len = gimple_call_arg (stmt, 2);
1136
1137 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1138 replace_call_with_call_and_fold (gsi, repl);
1139
1140 return true;
1141 }
1142
1143 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1144 to built-in memmove (dest, src, len). */
1145
1146 static bool
1147 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1148 {
1149 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1150
1151 if (!fn)
1152 return false;
1153
1154 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1155 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1156 len) into memmove (dest, src, len). */
1157
1158 gimple *stmt = gsi_stmt (*gsi);
1159 tree src = gimple_call_arg (stmt, 0);
1160 tree dest = gimple_call_arg (stmt, 1);
1161 tree len = gimple_call_arg (stmt, 2);
1162
1163 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1164 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1165 replace_call_with_call_and_fold (gsi, repl);
1166
1167 return true;
1168 }
1169
1170 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1171 to built-in memset (dest, 0, len). */
1172
1173 static bool
1174 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1175 {
1176 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1177
1178 if (!fn)
1179 return false;
1180
1181 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1182
1183 gimple *stmt = gsi_stmt (*gsi);
1184 tree dest = gimple_call_arg (stmt, 0);
1185 tree len = gimple_call_arg (stmt, 1);
1186
1187 gimple_seq seq = NULL;
1188 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1189 gimple_seq_add_stmt_without_update (&seq, repl);
1190 gsi_replace_with_seq_vops (gsi, seq);
1191 fold_stmt (gsi);
1192
1193 return true;
1194 }
1195
1196 /* Fold function call to builtin memset or bzero at *GSI setting the
1197 memory of size LEN to VAL. Return whether a simplification was made. */
1198
1199 static bool
1200 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1201 {
1202 gimple *stmt = gsi_stmt (*gsi);
1203 tree etype;
1204 unsigned HOST_WIDE_INT length, cval;
1205
1206 /* If the LEN parameter is zero, return DEST. */
1207 if (integer_zerop (len))
1208 {
1209 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1210 return true;
1211 }
1212
1213 if (! tree_fits_uhwi_p (len))
1214 return false;
1215
1216 if (TREE_CODE (c) != INTEGER_CST)
1217 return false;
1218
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree var = dest;
1221 if (TREE_CODE (var) != ADDR_EXPR)
1222 return false;
1223
1224 var = TREE_OPERAND (var, 0);
1225 if (TREE_THIS_VOLATILE (var))
1226 return false;
1227
1228 etype = TREE_TYPE (var);
1229 if (TREE_CODE (etype) == ARRAY_TYPE)
1230 etype = TREE_TYPE (etype);
1231
1232 if (!INTEGRAL_TYPE_P (etype)
1233 && !POINTER_TYPE_P (etype))
1234 return NULL_TREE;
1235
1236 if (! var_decl_component_p (var))
1237 return NULL_TREE;
1238
1239 length = tree_to_uhwi (len);
1240 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1241 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1242 return NULL_TREE;
1243
1244 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1245 return NULL_TREE;
1246
1247 if (integer_zerop (c))
1248 cval = 0;
1249 else
1250 {
1251 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1252 return NULL_TREE;
1253
1254 cval = TREE_INT_CST_LOW (c);
1255 cval &= 0xff;
1256 cval |= cval << 8;
1257 cval |= cval << 16;
1258 cval |= (cval << 31) << 1;
1259 }
1260
1261 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1262 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1263 gimple_move_vops (store, stmt);
1264 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1265 if (gimple_call_lhs (stmt))
1266 {
1267 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1268 gsi_replace (gsi, asgn, false);
1269 }
1270 else
1271 {
1272 gimple_stmt_iterator gsi2 = *gsi;
1273 gsi_prev (gsi);
1274 gsi_remove (&gsi2, true);
1275 }
1276
1277 return true;
1278 }
1279
1280 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1281
1282 static bool
1283 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1284 c_strlen_data *pdata, unsigned eltsize)
1285 {
1286 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1287
1288 /* The length computed by this invocation of the function. */
1289 tree val = NULL_TREE;
1290
1291 /* True if VAL is an optimistic (tight) bound determined from
1292 the size of the character array in which the string may be
1293 stored. In that case, the computed VAL is used to set
1294 PDATA->MAXBOUND. */
1295 bool tight_bound = false;
1296
1297 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1298 if (TREE_CODE (arg) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1300 {
1301 tree op = TREE_OPERAND (arg, 0);
1302 if (integer_zerop (TREE_OPERAND (op, 1)))
1303 {
1304 tree aop0 = TREE_OPERAND (op, 0);
1305 if (TREE_CODE (aop0) == INDIRECT_REF
1306 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1307 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1308 pdata, eltsize);
1309 }
1310 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1311 && rkind == SRK_LENRANGE)
1312 {
1313 /* Fail if an array is the last member of a struct object
1314 since it could be treated as a (fake) flexible array
1315 member. */
1316 tree idx = TREE_OPERAND (op, 1);
1317
1318 arg = TREE_OPERAND (op, 0);
1319 tree optype = TREE_TYPE (arg);
1320 if (tree dom = TYPE_DOMAIN (optype))
1321 if (tree bound = TYPE_MAX_VALUE (dom))
1322 if (TREE_CODE (bound) == INTEGER_CST
1323 && TREE_CODE (idx) == INTEGER_CST
1324 && tree_int_cst_lt (bound, idx))
1325 return false;
1326 }
1327 }
1328
1329 if (rkind == SRK_INT_VALUE)
1330 {
1331 /* We are computing the maximum value (not string length). */
1332 val = arg;
1333 if (TREE_CODE (val) != INTEGER_CST
1334 || tree_int_cst_sgn (val) < 0)
1335 return false;
1336 }
1337 else
1338 {
1339 c_strlen_data lendata = { };
1340 val = c_strlen (arg, 1, &lendata, eltsize);
1341
1342 if (!val && lendata.decl)
1343 {
1344 /* ARG refers to an unterminated const character array.
1345 DATA.DECL with size DATA.LEN. */
1346 val = lendata.minlen;
1347 pdata->decl = lendata.decl;
1348 }
1349 }
1350
1351 /* Set if VAL represents the maximum length based on array size (set
1352 when exact length cannot be determined). */
1353 bool maxbound = false;
1354
1355 if (!val && rkind == SRK_LENRANGE)
1356 {
1357 if (TREE_CODE (arg) == ADDR_EXPR)
1358 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1359 pdata, eltsize);
1360
1361 if (TREE_CODE (arg) == ARRAY_REF)
1362 {
1363 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1364
1365 /* Determine the "innermost" array type. */
1366 while (TREE_CODE (optype) == ARRAY_TYPE
1367 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1368 optype = TREE_TYPE (optype);
1369
1370 /* Avoid arrays of pointers. */
1371 tree eltype = TREE_TYPE (optype);
1372 if (TREE_CODE (optype) != ARRAY_TYPE
1373 || !INTEGRAL_TYPE_P (eltype))
1374 return false;
1375
1376 /* Fail when the array bound is unknown or zero. */
1377 val = TYPE_SIZE_UNIT (optype);
1378 if (!val || integer_zerop (val))
1379 return false;
1380
1381 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1382 integer_one_node);
1383
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 pdata->minlen = ssize_int (0);
1387
1388 tight_bound = true;
1389 }
1390 else if (TREE_CODE (arg) == COMPONENT_REF
1391 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1392 == ARRAY_TYPE))
1393 {
1394 /* Use the type of the member array to determine the upper
1395 bound on the length of the array. This may be overly
1396 optimistic if the array itself isn't NUL-terminated and
1397 the caller relies on the subsequent member to contain
1398 the NUL but that would only be considered valid if
1399 the array were the last member of a struct. */
1400
1401 tree fld = TREE_OPERAND (arg, 1);
1402
1403 tree optype = TREE_TYPE (fld);
1404
1405 /* Determine the "innermost" array type. */
1406 while (TREE_CODE (optype) == ARRAY_TYPE
1407 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1408 optype = TREE_TYPE (optype);
1409
1410 /* Fail when the array bound is unknown or zero. */
1411 val = TYPE_SIZE_UNIT (optype);
1412 if (!val || integer_zerop (val))
1413 return false;
1414 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1415 integer_one_node);
1416
1417 /* Set the minimum size to zero since the string in
1418 the array could have zero length. */
1419 pdata->minlen = ssize_int (0);
1420
1421 /* The array size determined above is an optimistic bound
1422 on the length. If the array isn't nul-terminated the
1423 length computed by the library function would be greater.
1424 Even though using strlen to cross the subobject boundary
1425 is undefined, avoid drawing conclusions from the member
1426 type about the length here. */
1427 tight_bound = true;
1428 }
1429 else if (VAR_P (arg))
1430 {
1431 /* Avoid handling pointers to arrays. GCC might misuse
1432 a pointer to an array of one bound to point to an array
1433 object of a greater bound. */
1434 tree argtype = TREE_TYPE (arg);
1435 if (TREE_CODE (argtype) == ARRAY_TYPE)
1436 {
1437 val = TYPE_SIZE_UNIT (argtype);
1438 if (!val
1439 || TREE_CODE (val) != INTEGER_CST
1440 || integer_zerop (val))
1441 return false;
1442 val = wide_int_to_tree (TREE_TYPE (val),
1443 wi::sub (wi::to_wide (val), 1));
1444
1445 /* Set the minimum size to zero since the string in
1446 the array could have zero length. */
1447 pdata->minlen = ssize_int (0);
1448 }
1449 }
1450 maxbound = true;
1451 }
1452
1453 if (!val)
1454 return false;
1455
1456 /* Adjust the lower bound on the string length as necessary. */
1457 if (!pdata->minlen
1458 || (rkind != SRK_STRLEN
1459 && TREE_CODE (pdata->minlen) == INTEGER_CST
1460 && TREE_CODE (val) == INTEGER_CST
1461 && tree_int_cst_lt (val, pdata->minlen)))
1462 pdata->minlen = val;
1463
1464 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1465 {
1466 /* Adjust the tighter (more optimistic) string length bound
1467 if necessary and proceed to adjust the more conservative
1468 bound. */
1469 if (TREE_CODE (val) == INTEGER_CST)
1470 {
1471 if (tree_int_cst_lt (pdata->maxbound, val))
1472 pdata->maxbound = val;
1473 }
1474 else
1475 pdata->maxbound = val;
1476 }
1477 else if (pdata->maxbound || maxbound)
1478 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1479 if VAL corresponds to the maximum length determined based
1480 on the type of the object. */
1481 pdata->maxbound = val;
1482
1483 if (tight_bound)
1484 {
1485 /* VAL computed above represents an optimistically tight bound
1486 on the length of the string based on the referenced object's
1487 or subobject's type. Determine the conservative upper bound
1488 based on the enclosing object's size if possible. */
1489 if (rkind == SRK_LENRANGE)
1490 {
1491 poly_int64 offset;
1492 tree base = get_addr_base_and_unit_offset (arg, &offset);
1493 if (!base)
1494 {
1495 /* When the call above fails due to a non-constant offset
1496 assume the offset is zero and use the size of the whole
1497 enclosing object instead. */
1498 base = get_base_address (arg);
1499 offset = 0;
1500 }
1501 /* If the base object is a pointer no upper bound on the length
1502 can be determined. Otherwise the maximum length is equal to
1503 the size of the enclosing object minus the offset of
1504 the referenced subobject minus 1 (for the terminating nul). */
1505 tree type = TREE_TYPE (base);
1506 if (TREE_CODE (type) == POINTER_TYPE
1507 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1508 val = build_all_ones_cst (size_type_node);
1509 else
1510 {
1511 val = DECL_SIZE_UNIT (base);
1512 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1513 size_int (offset + 1));
1514 }
1515 }
1516 else
1517 return false;
1518 }
1519
1520 if (pdata->maxlen)
1521 {
1522 /* Adjust the more conservative bound if possible/necessary
1523 and fail otherwise. */
1524 if (rkind != SRK_STRLEN)
1525 {
1526 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1527 || TREE_CODE (val) != INTEGER_CST)
1528 return false;
1529
1530 if (tree_int_cst_lt (pdata->maxlen, val))
1531 pdata->maxlen = val;
1532 return true;
1533 }
1534 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1535 {
1536 /* Fail if the length of this ARG is different from that
1537 previously determined from another ARG. */
1538 return false;
1539 }
1540 }
1541
1542 pdata->maxlen = val;
1543 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1544 }
1545
1546 /* For an ARG referencing one or more strings, try to obtain the range
1547 of their lengths, or the size of the largest array ARG referes to if
1548 the range of lengths cannot be determined, and store all in *PDATA.
1549 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1550 the maximum constant value.
1551 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1552 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1553 length or if we are unable to determine the length, return false.
1554 VISITED is a bitmap of visited variables.
1555 RKIND determines the kind of value or range to obtain (see
1556 strlen_range_kind).
1557 Set PDATA->DECL if ARG refers to an unterminated constant array.
1558 On input, set ELTSIZE to 1 for normal single byte character strings,
1559 and either 2 or 4 for wide characer strings (the size of wchar_t).
1560 Return true if *PDATA was successfully populated and false otherwise. */
1561
1562 static bool
1563 get_range_strlen (tree arg, bitmap *visited,
1564 strlen_range_kind rkind,
1565 c_strlen_data *pdata, unsigned eltsize)
1566 {
1567
1568 if (TREE_CODE (arg) != SSA_NAME)
1569 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1570
1571 /* If ARG is registered for SSA update we cannot look at its defining
1572 statement. */
1573 if (name_registered_for_update_p (arg))
1574 return false;
1575
1576 /* If we were already here, break the infinite cycle. */
1577 if (!*visited)
1578 *visited = BITMAP_ALLOC (NULL);
1579 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1580 return true;
1581
1582 tree var = arg;
1583 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1584
1585 switch (gimple_code (def_stmt))
1586 {
1587 case GIMPLE_ASSIGN:
1588 /* The RHS of the statement defining VAR must either have a
1589 constant length or come from another SSA_NAME with a constant
1590 length. */
1591 if (gimple_assign_single_p (def_stmt)
1592 || gimple_assign_unary_nop_p (def_stmt))
1593 {
1594 tree rhs = gimple_assign_rhs1 (def_stmt);
1595 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1596 }
1597 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1598 {
1599 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1600 gimple_assign_rhs3 (def_stmt) };
1601
1602 for (unsigned int i = 0; i < 2; i++)
1603 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1604 {
1605 if (rkind != SRK_LENRANGE)
1606 return false;
1607 /* Set the upper bound to the maximum to prevent
1608 it from being adjusted in the next iteration but
1609 leave MINLEN and the more conservative MAXBOUND
1610 determined so far alone (or leave them null if
1611 they haven't been set yet). That the MINLEN is
1612 in fact zero can be determined from MAXLEN being
1613 unbounded but the discovered minimum is used for
1614 diagnostics. */
1615 pdata->maxlen = build_all_ones_cst (size_type_node);
1616 }
1617 return true;
1618 }
1619 return false;
1620
1621 case GIMPLE_PHI:
1622 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1623 must have a constant length. */
1624 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1625 {
1626 tree arg = gimple_phi_arg (def_stmt, i)->def;
1627
1628 /* If this PHI has itself as an argument, we cannot
1629 determine the string length of this argument. However,
1630 if we can find a constant string length for the other
1631 PHI args then we can still be sure that this is a
1632 constant string length. So be optimistic and just
1633 continue with the next argument. */
1634 if (arg == gimple_phi_result (def_stmt))
1635 continue;
1636
1637 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1638 {
1639 if (rkind != SRK_LENRANGE)
1640 return false;
1641 /* Set the upper bound to the maximum to prevent
1642 it from being adjusted in the next iteration but
1643 leave MINLEN and the more conservative MAXBOUND
1644 determined so far alone (or leave them null if
1645 they haven't been set yet). That the MINLEN is
1646 in fact zero can be determined from MAXLEN being
1647 unbounded but the discovered minimum is used for
1648 diagnostics. */
1649 pdata->maxlen = build_all_ones_cst (size_type_node);
1650 }
1651 }
1652 return true;
1653
1654 default:
1655 return false;
1656 }
1657 }
1658
1659 /* Try to obtain the range of the lengths of the string(s) referenced
1660 by ARG, or the size of the largest array ARG refers to if the range
1661 of lengths cannot be determined, and store all in *PDATA which must
1662 be zero-initialized on input except PDATA->MAXBOUND may be set to
1663 a non-null tree node other than INTEGER_CST to request to have it
1664 set to the length of the longest string in a PHI. ELTSIZE is
1665 the expected size of the string element in bytes: 1 for char and
1666 some power of 2 for wide characters.
1667 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1668 for optimization. Returning false means that a nonzero PDATA->MINLEN
1669 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1670 is -1 (in that case, the actual range is indeterminate, i.e.,
1671 [0, PTRDIFF_MAX - 2]. */
1672
1673 bool
1674 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1675 {
1676 bitmap visited = NULL;
1677 tree maxbound = pdata->maxbound;
1678
1679 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1680 {
1681 /* On failure extend the length range to an impossible maximum
1682 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1683 members can stay unchanged regardless. */
1684 pdata->minlen = ssize_int (0);
1685 pdata->maxlen = build_all_ones_cst (size_type_node);
1686 }
1687 else if (!pdata->minlen)
1688 pdata->minlen = ssize_int (0);
1689
1690 /* If it's unchanged from it initial non-null value, set the conservative
1691 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1692 if (maxbound && pdata->maxbound == maxbound)
1693 pdata->maxbound = build_all_ones_cst (size_type_node);
1694
1695 if (visited)
1696 BITMAP_FREE (visited);
1697
1698 return !integer_all_onesp (pdata->maxlen);
1699 }
1700
1701 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1702 For ARG of pointer types, NONSTR indicates if the caller is prepared
1703 to handle unterminated strings. For integer ARG and when RKIND ==
1704 SRK_INT_VALUE, NONSTR must be null.
1705
1706 If an unterminated array is discovered and our caller handles
1707 unterminated arrays, then bubble up the offending DECL and
1708 return the maximum size. Otherwise return NULL. */
1709
1710 static tree
1711 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1712 {
1713 /* A non-null NONSTR is meaningless when determining the maximum
1714 value of an integer ARG. */
1715 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1716 /* ARG must have an integral type when RKIND says so. */
1717 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1718
1719 bitmap visited = NULL;
1720
1721 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1722 is unbounded. */
1723 c_strlen_data lendata = { };
1724 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1725 lendata.maxlen = NULL_TREE;
1726 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1727 lendata.maxlen = NULL_TREE;
1728
1729 if (visited)
1730 BITMAP_FREE (visited);
1731
1732 if (nonstr)
1733 {
1734 /* For callers prepared to handle unterminated arrays set
1735 *NONSTR to point to the declaration of the array and return
1736 the maximum length/size. */
1737 *nonstr = lendata.decl;
1738 return lendata.maxlen;
1739 }
1740
1741 /* Fail if the constant array isn't nul-terminated. */
1742 return lendata.decl ? NULL_TREE : lendata.maxlen;
1743 }
1744
1745
1746 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1747 If LEN is not NULL, it represents the length of the string to be
1748 copied. Return NULL_TREE if no simplification can be made. */
1749
1750 static bool
1751 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1752 tree dest, tree src)
1753 {
1754 gimple *stmt = gsi_stmt (*gsi);
1755 location_t loc = gimple_location (stmt);
1756 tree fn;
1757
1758 /* If SRC and DEST are the same (and not volatile), return DEST. */
1759 if (operand_equal_p (src, dest, 0))
1760 {
1761 /* Issue -Wrestrict unless the pointers are null (those do
1762 not point to objects and so do not indicate an overlap;
1763 such calls could be the result of sanitization and jump
1764 threading). */
1765 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1766 {
1767 tree func = gimple_call_fndecl (stmt);
1768
1769 warning_at (loc, OPT_Wrestrict,
1770 "%qD source argument is the same as destination",
1771 func);
1772 }
1773
1774 replace_call_with_value (gsi, dest);
1775 return true;
1776 }
1777
1778 if (optimize_function_for_size_p (cfun))
1779 return false;
1780
1781 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1782 if (!fn)
1783 return false;
1784
1785 /* Set to non-null if ARG refers to an unterminated array. */
1786 tree nonstr = NULL;
1787 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1788
1789 if (nonstr)
1790 {
1791 /* Avoid folding calls with unterminated arrays. */
1792 if (!gimple_no_warning_p (stmt))
1793 warn_string_no_nul (loc, "strcpy", src, nonstr);
1794 gimple_set_no_warning (stmt, true);
1795 return false;
1796 }
1797
1798 if (!len)
1799 return false;
1800
1801 len = fold_convert_loc (loc, size_type_node, len);
1802 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1803 len = force_gimple_operand_gsi (gsi, len, true,
1804 NULL_TREE, true, GSI_SAME_STMT);
1805 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1806 replace_call_with_call_and_fold (gsi, repl);
1807 return true;
1808 }
1809
1810 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1811 If SLEN is not NULL, it represents the length of the source string.
1812 Return NULL_TREE if no simplification can be made. */
1813
1814 static bool
1815 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1816 tree dest, tree src, tree len)
1817 {
1818 gimple *stmt = gsi_stmt (*gsi);
1819 location_t loc = gimple_location (stmt);
1820 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1821
1822 /* If the LEN parameter is zero, return DEST. */
1823 if (integer_zerop (len))
1824 {
1825 /* Avoid warning if the destination refers to a an array/pointer
1826 decorate with attribute nonstring. */
1827 if (!nonstring)
1828 {
1829 tree fndecl = gimple_call_fndecl (stmt);
1830
1831 /* Warn about the lack of nul termination: the result is not
1832 a (nul-terminated) string. */
1833 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1834 if (slen && !integer_zerop (slen))
1835 warning_at (loc, OPT_Wstringop_truncation,
1836 "%G%qD destination unchanged after copying no bytes "
1837 "from a string of length %E",
1838 stmt, fndecl, slen);
1839 else
1840 warning_at (loc, OPT_Wstringop_truncation,
1841 "%G%qD destination unchanged after copying no bytes",
1842 stmt, fndecl);
1843 }
1844
1845 replace_call_with_value (gsi, dest);
1846 return true;
1847 }
1848
1849 /* We can't compare slen with len as constants below if len is not a
1850 constant. */
1851 if (TREE_CODE (len) != INTEGER_CST)
1852 return false;
1853
1854 /* Now, we must be passed a constant src ptr parameter. */
1855 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1856 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1857 return false;
1858
1859 /* The size of the source string including the terminating nul. */
1860 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1861
1862 /* We do not support simplification of this case, though we do
1863 support it when expanding trees into RTL. */
1864 /* FIXME: generate a call to __builtin_memset. */
1865 if (tree_int_cst_lt (ssize, len))
1866 return false;
1867
1868 /* Diagnose truncation that leaves the copy unterminated. */
1869 maybe_diag_stxncpy_trunc (*gsi, src, len);
1870
1871 /* OK transform into builtin memcpy. */
1872 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1873 if (!fn)
1874 return false;
1875
1876 len = fold_convert_loc (loc, size_type_node, len);
1877 len = force_gimple_operand_gsi (gsi, len, true,
1878 NULL_TREE, true, GSI_SAME_STMT);
1879 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1880 replace_call_with_call_and_fold (gsi, repl);
1881
1882 return true;
1883 }
1884
1885 /* Fold function call to builtin strchr or strrchr.
1886 If both arguments are constant, evaluate and fold the result,
1887 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1888 In general strlen is significantly faster than strchr
1889 due to being a simpler operation. */
1890 static bool
1891 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1892 {
1893 gimple *stmt = gsi_stmt (*gsi);
1894 tree str = gimple_call_arg (stmt, 0);
1895 tree c = gimple_call_arg (stmt, 1);
1896 location_t loc = gimple_location (stmt);
1897 const char *p;
1898 char ch;
1899
1900 if (!gimple_call_lhs (stmt))
1901 return false;
1902
1903 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1904 {
1905 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1906
1907 if (p1 == NULL)
1908 {
1909 replace_call_with_value (gsi, integer_zero_node);
1910 return true;
1911 }
1912
1913 tree len = build_int_cst (size_type_node, p1 - p);
1914 gimple_seq stmts = NULL;
1915 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1916 POINTER_PLUS_EXPR, str, len);
1917 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1918 gsi_replace_with_seq_vops (gsi, stmts);
1919 return true;
1920 }
1921
1922 if (!integer_zerop (c))
1923 return false;
1924
1925 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1926 if (is_strrchr && optimize_function_for_size_p (cfun))
1927 {
1928 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1929
1930 if (strchr_fn)
1931 {
1932 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1933 replace_call_with_call_and_fold (gsi, repl);
1934 return true;
1935 }
1936
1937 return false;
1938 }
1939
1940 tree len;
1941 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1942
1943 if (!strlen_fn)
1944 return false;
1945
1946 /* Create newstr = strlen (str). */
1947 gimple_seq stmts = NULL;
1948 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1949 gimple_set_location (new_stmt, loc);
1950 len = create_tmp_reg_or_ssa_name (size_type_node);
1951 gimple_call_set_lhs (new_stmt, len);
1952 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1953
1954 /* Create (str p+ strlen (str)). */
1955 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1956 POINTER_PLUS_EXPR, str, len);
1957 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1958 gsi_replace_with_seq_vops (gsi, stmts);
1959 /* gsi now points at the assignment to the lhs, get a
1960 stmt iterator to the strlen.
1961 ??? We can't use gsi_for_stmt as that doesn't work when the
1962 CFG isn't built yet. */
1963 gimple_stmt_iterator gsi2 = *gsi;
1964 gsi_prev (&gsi2);
1965 fold_stmt (&gsi2);
1966 return true;
1967 }
1968
1969 /* Fold function call to builtin strstr.
1970 If both arguments are constant, evaluate and fold the result,
1971 additionally fold strstr (x, "") into x and strstr (x, "c")
1972 into strchr (x, 'c'). */
1973 static bool
1974 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1975 {
1976 gimple *stmt = gsi_stmt (*gsi);
1977 tree haystack = gimple_call_arg (stmt, 0);
1978 tree needle = gimple_call_arg (stmt, 1);
1979 const char *p, *q;
1980
1981 if (!gimple_call_lhs (stmt))
1982 return false;
1983
1984 q = c_getstr (needle);
1985 if (q == NULL)
1986 return false;
1987
1988 if ((p = c_getstr (haystack)))
1989 {
1990 const char *r = strstr (p, q);
1991
1992 if (r == NULL)
1993 {
1994 replace_call_with_value (gsi, integer_zero_node);
1995 return true;
1996 }
1997
1998 tree len = build_int_cst (size_type_node, r - p);
1999 gimple_seq stmts = NULL;
2000 gimple *new_stmt
2001 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2002 haystack, len);
2003 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2004 gsi_replace_with_seq_vops (gsi, stmts);
2005 return true;
2006 }
2007
2008 /* For strstr (x, "") return x. */
2009 if (q[0] == '\0')
2010 {
2011 replace_call_with_value (gsi, haystack);
2012 return true;
2013 }
2014
2015 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2016 if (q[1] == '\0')
2017 {
2018 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2019 if (strchr_fn)
2020 {
2021 tree c = build_int_cst (integer_type_node, q[0]);
2022 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2025 }
2026 }
2027
2028 return false;
2029 }
2030
2031 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2032 to the call.
2033
2034 Return NULL_TREE if no simplification was possible, otherwise return the
2035 simplified form of the call as a tree.
2036
2037 The simplified form may be a constant or other expression which
2038 computes the same value, but in a more efficient manner (including
2039 calls to other builtin functions).
2040
2041 The call may contain arguments which need to be evaluated, but
2042 which are not useful to determine the result of the call. In
2043 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2044 COMPOUND_EXPR will be an argument which must be evaluated.
2045 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2046 COMPOUND_EXPR in the chain will contain the tree for the simplified
2047 form of the builtin function call. */
2048
2049 static bool
2050 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2051 {
2052 gimple *stmt = gsi_stmt (*gsi);
2053 location_t loc = gimple_location (stmt);
2054
2055 const char *p = c_getstr (src);
2056
2057 /* If the string length is zero, return the dst parameter. */
2058 if (p && *p == '\0')
2059 {
2060 replace_call_with_value (gsi, dst);
2061 return true;
2062 }
2063
2064 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2065 return false;
2066
2067 /* See if we can store by pieces into (dst + strlen(dst)). */
2068 tree newdst;
2069 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2070 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2071
2072 if (!strlen_fn || !memcpy_fn)
2073 return false;
2074
2075 /* If the length of the source string isn't computable don't
2076 split strcat into strlen and memcpy. */
2077 tree len = get_maxval_strlen (src, SRK_STRLEN);
2078 if (! len)
2079 return false;
2080
2081 /* Create strlen (dst). */
2082 gimple_seq stmts = NULL, stmts2;
2083 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2084 gimple_set_location (repl, loc);
2085 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2086 gimple_call_set_lhs (repl, newdst);
2087 gimple_seq_add_stmt_without_update (&stmts, repl);
2088
2089 /* Create (dst p+ strlen (dst)). */
2090 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2091 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2092 gimple_seq_add_seq_without_update (&stmts, stmts2);
2093
2094 len = fold_convert_loc (loc, size_type_node, len);
2095 len = size_binop_loc (loc, PLUS_EXPR, len,
2096 build_int_cst (size_type_node, 1));
2097 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2098 gimple_seq_add_seq_without_update (&stmts, stmts2);
2099
2100 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2101 gimple_seq_add_stmt_without_update (&stmts, repl);
2102 if (gimple_call_lhs (stmt))
2103 {
2104 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2105 gimple_seq_add_stmt_without_update (&stmts, repl);
2106 gsi_replace_with_seq_vops (gsi, stmts);
2107 /* gsi now points at the assignment to the lhs, get a
2108 stmt iterator to the memcpy call.
2109 ??? We can't use gsi_for_stmt as that doesn't work when the
2110 CFG isn't built yet. */
2111 gimple_stmt_iterator gsi2 = *gsi;
2112 gsi_prev (&gsi2);
2113 fold_stmt (&gsi2);
2114 }
2115 else
2116 {
2117 gsi_replace_with_seq_vops (gsi, stmts);
2118 fold_stmt (gsi);
2119 }
2120 return true;
2121 }
2122
2123 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2124 are the arguments to the call. */
2125
2126 static bool
2127 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2128 {
2129 gimple *stmt = gsi_stmt (*gsi);
2130 tree dest = gimple_call_arg (stmt, 0);
2131 tree src = gimple_call_arg (stmt, 1);
2132 tree size = gimple_call_arg (stmt, 2);
2133 tree fn;
2134 const char *p;
2135
2136
2137 p = c_getstr (src);
2138 /* If the SRC parameter is "", return DEST. */
2139 if (p && *p == '\0')
2140 {
2141 replace_call_with_value (gsi, dest);
2142 return true;
2143 }
2144
2145 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2146 return false;
2147
2148 /* If __builtin_strcat_chk is used, assume strcat is available. */
2149 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2150 if (!fn)
2151 return false;
2152
2153 gimple *repl = gimple_build_call (fn, 2, dest, src);
2154 replace_call_with_call_and_fold (gsi, repl);
2155 return true;
2156 }
2157
2158 /* Simplify a call to the strncat builtin. */
2159
2160 static bool
2161 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2162 {
2163 gimple *stmt = gsi_stmt (*gsi);
2164 tree dst = gimple_call_arg (stmt, 0);
2165 tree src = gimple_call_arg (stmt, 1);
2166 tree len = gimple_call_arg (stmt, 2);
2167
2168 const char *p = c_getstr (src);
2169
2170 /* If the requested length is zero, or the src parameter string
2171 length is zero, return the dst parameter. */
2172 if (integer_zerop (len) || (p && *p == '\0'))
2173 {
2174 replace_call_with_value (gsi, dst);
2175 return true;
2176 }
2177
2178 if (TREE_CODE (len) != INTEGER_CST || !p)
2179 return false;
2180
2181 unsigned srclen = strlen (p);
2182
2183 int cmpsrc = compare_tree_int (len, srclen);
2184
2185 /* Return early if the requested len is less than the string length.
2186 Warnings will be issued elsewhere later. */
2187 if (cmpsrc < 0)
2188 return false;
2189
2190 unsigned HOST_WIDE_INT dstsize;
2191
2192 bool nowarn = gimple_no_warning_p (stmt);
2193
2194 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2195 {
2196 int cmpdst = compare_tree_int (len, dstsize);
2197
2198 if (cmpdst >= 0)
2199 {
2200 tree fndecl = gimple_call_fndecl (stmt);
2201
2202 /* Strncat copies (at most) LEN bytes and always appends
2203 the terminating NUL so the specified bound should never
2204 be equal to (or greater than) the size of the destination.
2205 If it is, the copy could overflow. */
2206 location_t loc = gimple_location (stmt);
2207 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2208 cmpdst == 0
2209 ? G_("%G%qD specified bound %E equals "
2210 "destination size")
2211 : G_("%G%qD specified bound %E exceeds "
2212 "destination size %wu"),
2213 stmt, fndecl, len, dstsize);
2214 if (nowarn)
2215 gimple_set_no_warning (stmt, true);
2216 }
2217 }
2218
2219 if (!nowarn && cmpsrc == 0)
2220 {
2221 tree fndecl = gimple_call_fndecl (stmt);
2222 location_t loc = gimple_location (stmt);
2223
2224 /* To avoid possible overflow the specified bound should also
2225 not be equal to the length of the source, even when the size
2226 of the destination is unknown (it's not an uncommon mistake
2227 to specify as the bound to strncpy the length of the source). */
2228 if (warning_at (loc, OPT_Wstringop_overflow_,
2229 "%G%qD specified bound %E equals source length",
2230 stmt, fndecl, len))
2231 gimple_set_no_warning (stmt, true);
2232 }
2233
2234 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2235
2236 /* If the replacement _DECL isn't initialized, don't do the
2237 transformation. */
2238 if (!fn)
2239 return false;
2240
2241 /* Otherwise, emit a call to strcat. */
2242 gcall *repl = gimple_build_call (fn, 2, dst, src);
2243 replace_call_with_call_and_fold (gsi, repl);
2244 return true;
2245 }
2246
2247 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2248 LEN, and SIZE. */
2249
2250 static bool
2251 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2252 {
2253 gimple *stmt = gsi_stmt (*gsi);
2254 tree dest = gimple_call_arg (stmt, 0);
2255 tree src = gimple_call_arg (stmt, 1);
2256 tree len = gimple_call_arg (stmt, 2);
2257 tree size = gimple_call_arg (stmt, 3);
2258 tree fn;
2259 const char *p;
2260
2261 p = c_getstr (src);
2262 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2263 if ((p && *p == '\0')
2264 || integer_zerop (len))
2265 {
2266 replace_call_with_value (gsi, dest);
2267 return true;
2268 }
2269
2270 if (! tree_fits_uhwi_p (size))
2271 return false;
2272
2273 if (! integer_all_onesp (size))
2274 {
2275 tree src_len = c_strlen (src, 1);
2276 if (src_len
2277 && tree_fits_uhwi_p (src_len)
2278 && tree_fits_uhwi_p (len)
2279 && ! tree_int_cst_lt (len, src_len))
2280 {
2281 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2282 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2283 if (!fn)
2284 return false;
2285
2286 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2287 replace_call_with_call_and_fold (gsi, repl);
2288 return true;
2289 }
2290 return false;
2291 }
2292
2293 /* If __builtin_strncat_chk is used, assume strncat is available. */
2294 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2295 if (!fn)
2296 return false;
2297
2298 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2299 replace_call_with_call_and_fold (gsi, repl);
2300 return true;
2301 }
2302
2303 /* Build and append gimple statements to STMTS that would load a first
2304 character of a memory location identified by STR. LOC is location
2305 of the statement. */
2306
2307 static tree
2308 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2309 {
2310 tree var;
2311
2312 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2313 tree cst_uchar_ptr_node
2314 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2315 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2316
2317 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2318 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2319 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2320
2321 gimple_assign_set_lhs (stmt, var);
2322 gimple_seq_add_stmt_without_update (stmts, stmt);
2323
2324 return var;
2325 }
2326
2327 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2328 FCODE is the name of the builtin. */
2329
2330 static bool
2331 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2332 {
2333 gimple *stmt = gsi_stmt (*gsi);
2334 tree callee = gimple_call_fndecl (stmt);
2335 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2336
2337 tree type = integer_type_node;
2338 tree str1 = gimple_call_arg (stmt, 0);
2339 tree str2 = gimple_call_arg (stmt, 1);
2340 tree lhs = gimple_call_lhs (stmt);
2341 HOST_WIDE_INT length = -1;
2342
2343 /* Handle strncmp and strncasecmp functions. */
2344 if (gimple_call_num_args (stmt) == 3)
2345 {
2346 tree len = gimple_call_arg (stmt, 2);
2347 if (tree_fits_uhwi_p (len))
2348 length = tree_to_uhwi (len);
2349 }
2350
2351 /* If the LEN parameter is zero, return zero. */
2352 if (length == 0)
2353 {
2354 replace_call_with_value (gsi, integer_zero_node);
2355 return true;
2356 }
2357
2358 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2359 if (operand_equal_p (str1, str2, 0))
2360 {
2361 replace_call_with_value (gsi, integer_zero_node);
2362 return true;
2363 }
2364
2365 const char *p1 = c_getstr (str1);
2366 const char *p2 = c_getstr (str2);
2367
2368 /* For known strings, return an immediate value. */
2369 if (p1 && p2)
2370 {
2371 int r = 0;
2372 bool known_result = false;
2373
2374 switch (fcode)
2375 {
2376 case BUILT_IN_STRCMP:
2377 case BUILT_IN_STRCMP_EQ:
2378 {
2379 r = strcmp (p1, p2);
2380 known_result = true;
2381 break;
2382 }
2383 case BUILT_IN_STRNCMP:
2384 case BUILT_IN_STRNCMP_EQ:
2385 {
2386 if (length == -1)
2387 break;
2388 r = strncmp (p1, p2, length);
2389 known_result = true;
2390 break;
2391 }
2392 /* Only handleable situation is where the string are equal (result 0),
2393 which is already handled by operand_equal_p case. */
2394 case BUILT_IN_STRCASECMP:
2395 break;
2396 case BUILT_IN_STRNCASECMP:
2397 {
2398 if (length == -1)
2399 break;
2400 r = strncmp (p1, p2, length);
2401 if (r == 0)
2402 known_result = true;
2403 break;
2404 }
2405 default:
2406 gcc_unreachable ();
2407 }
2408
2409 if (known_result)
2410 {
2411 replace_call_with_value (gsi, build_cmp_result (type, r));
2412 return true;
2413 }
2414 }
2415
2416 bool nonzero_length = length >= 1
2417 || fcode == BUILT_IN_STRCMP
2418 || fcode == BUILT_IN_STRCMP_EQ
2419 || fcode == BUILT_IN_STRCASECMP;
2420
2421 location_t loc = gimple_location (stmt);
2422
2423 /* If the second arg is "", return *(const unsigned char*)arg1. */
2424 if (p2 && *p2 == '\0' && nonzero_length)
2425 {
2426 gimple_seq stmts = NULL;
2427 tree var = gimple_load_first_char (loc, str1, &stmts);
2428 if (lhs)
2429 {
2430 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2431 gimple_seq_add_stmt_without_update (&stmts, stmt);
2432 }
2433
2434 gsi_replace_with_seq_vops (gsi, stmts);
2435 return true;
2436 }
2437
2438 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2439 if (p1 && *p1 == '\0' && nonzero_length)
2440 {
2441 gimple_seq stmts = NULL;
2442 tree var = gimple_load_first_char (loc, str2, &stmts);
2443
2444 if (lhs)
2445 {
2446 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2447 stmt = gimple_build_assign (c, NOP_EXPR, var);
2448 gimple_seq_add_stmt_without_update (&stmts, stmt);
2449
2450 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2451 gimple_seq_add_stmt_without_update (&stmts, stmt);
2452 }
2453
2454 gsi_replace_with_seq_vops (gsi, stmts);
2455 return true;
2456 }
2457
2458 /* If len parameter is one, return an expression corresponding to
2459 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2460 if (fcode == BUILT_IN_STRNCMP && length == 1)
2461 {
2462 gimple_seq stmts = NULL;
2463 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2464 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2465
2466 if (lhs)
2467 {
2468 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2469 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2470 gimple_seq_add_stmt_without_update (&stmts, convert1);
2471
2472 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2473 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2474 gimple_seq_add_stmt_without_update (&stmts, convert2);
2475
2476 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2477 gimple_seq_add_stmt_without_update (&stmts, stmt);
2478 }
2479
2480 gsi_replace_with_seq_vops (gsi, stmts);
2481 return true;
2482 }
2483
2484 /* If length is larger than the length of one constant string,
2485 replace strncmp with corresponding strcmp */
2486 if (fcode == BUILT_IN_STRNCMP
2487 && length > 0
2488 && ((p2 && (size_t) length > strlen (p2))
2489 || (p1 && (size_t) length > strlen (p1))))
2490 {
2491 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2492 if (!fn)
2493 return false;
2494 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2495 replace_call_with_call_and_fold (gsi, repl);
2496 return true;
2497 }
2498
2499 return false;
2500 }
2501
2502 /* Fold a call to the memchr pointed by GSI iterator. */
2503
2504 static bool
2505 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2506 {
2507 gimple *stmt = gsi_stmt (*gsi);
2508 tree lhs = gimple_call_lhs (stmt);
2509 tree arg1 = gimple_call_arg (stmt, 0);
2510 tree arg2 = gimple_call_arg (stmt, 1);
2511 tree len = gimple_call_arg (stmt, 2);
2512
2513 /* If the LEN parameter is zero, return zero. */
2514 if (integer_zerop (len))
2515 {
2516 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2517 return true;
2518 }
2519
2520 char c;
2521 if (TREE_CODE (arg2) != INTEGER_CST
2522 || !tree_fits_uhwi_p (len)
2523 || !target_char_cst_p (arg2, &c))
2524 return false;
2525
2526 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2527 unsigned HOST_WIDE_INT string_length;
2528 const char *p1 = c_getstr (arg1, &string_length);
2529
2530 if (p1)
2531 {
2532 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2533 if (r == NULL)
2534 {
2535 tree mem_size, offset_node;
2536 string_constant (arg1, &offset_node, &mem_size, NULL);
2537 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2538 ? 0 : tree_to_uhwi (offset_node);
2539 /* MEM_SIZE is the size of the array the string literal
2540 is stored in. */
2541 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2542 gcc_checking_assert (string_length <= string_size);
2543 if (length <= string_size)
2544 {
2545 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2546 return true;
2547 }
2548 }
2549 else
2550 {
2551 unsigned HOST_WIDE_INT offset = r - p1;
2552 gimple_seq stmts = NULL;
2553 if (lhs != NULL_TREE)
2554 {
2555 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2556 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2557 arg1, offset_cst);
2558 gimple_seq_add_stmt_without_update (&stmts, stmt);
2559 }
2560 else
2561 gimple_seq_add_stmt_without_update (&stmts,
2562 gimple_build_nop ());
2563
2564 gsi_replace_with_seq_vops (gsi, stmts);
2565 return true;
2566 }
2567 }
2568
2569 return false;
2570 }
2571
2572 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2573 to the call. IGNORE is true if the value returned
2574 by the builtin will be ignored. UNLOCKED is true is true if this
2575 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2576 the known length of the string. Return NULL_TREE if no simplification
2577 was possible. */
2578
2579 static bool
2580 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2581 tree arg0, tree arg1,
2582 bool unlocked)
2583 {
2584 gimple *stmt = gsi_stmt (*gsi);
2585
2586 /* If we're using an unlocked function, assume the other unlocked
2587 functions exist explicitly. */
2588 tree const fn_fputc = (unlocked
2589 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2590 : builtin_decl_implicit (BUILT_IN_FPUTC));
2591 tree const fn_fwrite = (unlocked
2592 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2593 : builtin_decl_implicit (BUILT_IN_FWRITE));
2594
2595 /* If the return value is used, don't do the transformation. */
2596 if (gimple_call_lhs (stmt))
2597 return false;
2598
2599 /* Get the length of the string passed to fputs. If the length
2600 can't be determined, punt. */
2601 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2602 if (!len
2603 || TREE_CODE (len) != INTEGER_CST)
2604 return false;
2605
2606 switch (compare_tree_int (len, 1))
2607 {
2608 case -1: /* length is 0, delete the call entirely . */
2609 replace_call_with_value (gsi, integer_zero_node);
2610 return true;
2611
2612 case 0: /* length is 1, call fputc. */
2613 {
2614 const char *p = c_getstr (arg0);
2615 if (p != NULL)
2616 {
2617 if (!fn_fputc)
2618 return false;
2619
2620 gimple *repl = gimple_build_call (fn_fputc, 2,
2621 build_int_cst
2622 (integer_type_node, p[0]), arg1);
2623 replace_call_with_call_and_fold (gsi, repl);
2624 return true;
2625 }
2626 }
2627 /* FALLTHROUGH */
2628 case 1: /* length is greater than 1, call fwrite. */
2629 {
2630 /* If optimizing for size keep fputs. */
2631 if (optimize_function_for_size_p (cfun))
2632 return false;
2633 /* New argument list transforming fputs(string, stream) to
2634 fwrite(string, 1, len, stream). */
2635 if (!fn_fwrite)
2636 return false;
2637
2638 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2639 size_one_node, len, arg1);
2640 replace_call_with_call_and_fold (gsi, repl);
2641 return true;
2642 }
2643 default:
2644 gcc_unreachable ();
2645 }
2646 return false;
2647 }
2648
2649 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2650 DEST, SRC, LEN, and SIZE are the arguments to the call.
2651 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2652 code of the builtin. If MAXLEN is not NULL, it is maximum length
2653 passed as third argument. */
2654
2655 static bool
2656 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2657 tree dest, tree src, tree len, tree size,
2658 enum built_in_function fcode)
2659 {
2660 gimple *stmt = gsi_stmt (*gsi);
2661 location_t loc = gimple_location (stmt);
2662 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2663 tree fn;
2664
2665 /* If SRC and DEST are the same (and not volatile), return DEST
2666 (resp. DEST+LEN for __mempcpy_chk). */
2667 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2668 {
2669 if (fcode != BUILT_IN_MEMPCPY_CHK)
2670 {
2671 replace_call_with_value (gsi, dest);
2672 return true;
2673 }
2674 else
2675 {
2676 gimple_seq stmts = NULL;
2677 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2678 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2679 TREE_TYPE (dest), dest, len);
2680 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2681 replace_call_with_value (gsi, temp);
2682 return true;
2683 }
2684 }
2685
2686 if (! tree_fits_uhwi_p (size))
2687 return false;
2688
2689 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2690 if (! integer_all_onesp (size))
2691 {
2692 if (! tree_fits_uhwi_p (len))
2693 {
2694 /* If LEN is not constant, try MAXLEN too.
2695 For MAXLEN only allow optimizing into non-_ocs function
2696 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2697 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2698 {
2699 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2700 {
2701 /* (void) __mempcpy_chk () can be optimized into
2702 (void) __memcpy_chk (). */
2703 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2704 if (!fn)
2705 return false;
2706
2707 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2708 replace_call_with_call_and_fold (gsi, repl);
2709 return true;
2710 }
2711 return false;
2712 }
2713 }
2714 else
2715 maxlen = len;
2716
2717 if (tree_int_cst_lt (size, maxlen))
2718 return false;
2719 }
2720
2721 fn = NULL_TREE;
2722 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2723 mem{cpy,pcpy,move,set} is available. */
2724 switch (fcode)
2725 {
2726 case BUILT_IN_MEMCPY_CHK:
2727 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2728 break;
2729 case BUILT_IN_MEMPCPY_CHK:
2730 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2731 break;
2732 case BUILT_IN_MEMMOVE_CHK:
2733 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2734 break;
2735 case BUILT_IN_MEMSET_CHK:
2736 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2737 break;
2738 default:
2739 break;
2740 }
2741
2742 if (!fn)
2743 return false;
2744
2745 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2746 replace_call_with_call_and_fold (gsi, repl);
2747 return true;
2748 }
2749
2750 /* Fold a call to the __st[rp]cpy_chk builtin.
2751 DEST, SRC, and SIZE are the arguments to the call.
2752 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2753 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2754 strings passed as second argument. */
2755
2756 static bool
2757 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2758 tree dest,
2759 tree src, tree size,
2760 enum built_in_function fcode)
2761 {
2762 gimple *stmt = gsi_stmt (*gsi);
2763 location_t loc = gimple_location (stmt);
2764 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2765 tree len, fn;
2766
2767 /* If SRC and DEST are the same (and not volatile), return DEST. */
2768 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2769 {
2770 /* Issue -Wrestrict unless the pointers are null (those do
2771 not point to objects and so do not indicate an overlap;
2772 such calls could be the result of sanitization and jump
2773 threading). */
2774 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2775 {
2776 tree func = gimple_call_fndecl (stmt);
2777
2778 warning_at (loc, OPT_Wrestrict,
2779 "%qD source argument is the same as destination",
2780 func);
2781 }
2782
2783 replace_call_with_value (gsi, dest);
2784 return true;
2785 }
2786
2787 if (! tree_fits_uhwi_p (size))
2788 return false;
2789
2790 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2791 if (! integer_all_onesp (size))
2792 {
2793 len = c_strlen (src, 1);
2794 if (! len || ! tree_fits_uhwi_p (len))
2795 {
2796 /* If LEN is not constant, try MAXLEN too.
2797 For MAXLEN only allow optimizing into non-_ocs function
2798 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2799 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2800 {
2801 if (fcode == BUILT_IN_STPCPY_CHK)
2802 {
2803 if (! ignore)
2804 return false;
2805
2806 /* If return value of __stpcpy_chk is ignored,
2807 optimize into __strcpy_chk. */
2808 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2809 if (!fn)
2810 return false;
2811
2812 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2813 replace_call_with_call_and_fold (gsi, repl);
2814 return true;
2815 }
2816
2817 if (! len || TREE_SIDE_EFFECTS (len))
2818 return false;
2819
2820 /* If c_strlen returned something, but not a constant,
2821 transform __strcpy_chk into __memcpy_chk. */
2822 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2823 if (!fn)
2824 return false;
2825
2826 gimple_seq stmts = NULL;
2827 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2828 len = gimple_convert (&stmts, loc, size_type_node, len);
2829 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2830 build_int_cst (size_type_node, 1));
2831 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2832 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2833 replace_call_with_call_and_fold (gsi, repl);
2834 return true;
2835 }
2836 }
2837 else
2838 maxlen = len;
2839
2840 if (! tree_int_cst_lt (maxlen, size))
2841 return false;
2842 }
2843
2844 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2845 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2846 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2847 if (!fn)
2848 return false;
2849
2850 gimple *repl = gimple_build_call (fn, 2, dest, src);
2851 replace_call_with_call_and_fold (gsi, repl);
2852 return true;
2853 }
2854
2855 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2856 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2857 length passed as third argument. IGNORE is true if return value can be
2858 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2859
2860 static bool
2861 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2862 tree dest, tree src,
2863 tree len, tree size,
2864 enum built_in_function fcode)
2865 {
2866 gimple *stmt = gsi_stmt (*gsi);
2867 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2868 tree fn;
2869
2870 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2871 {
2872 /* If return value of __stpncpy_chk is ignored,
2873 optimize into __strncpy_chk. */
2874 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2875 if (fn)
2876 {
2877 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2878 replace_call_with_call_and_fold (gsi, repl);
2879 return true;
2880 }
2881 }
2882
2883 if (! tree_fits_uhwi_p (size))
2884 return false;
2885
2886 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2887 if (! integer_all_onesp (size))
2888 {
2889 if (! tree_fits_uhwi_p (len))
2890 {
2891 /* If LEN is not constant, try MAXLEN too.
2892 For MAXLEN only allow optimizing into non-_ocs function
2893 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2894 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2895 return false;
2896 }
2897 else
2898 maxlen = len;
2899
2900 if (tree_int_cst_lt (size, maxlen))
2901 return false;
2902 }
2903
2904 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2905 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2906 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2907 if (!fn)
2908 return false;
2909
2910 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2911 replace_call_with_call_and_fold (gsi, repl);
2912 return true;
2913 }
2914
2915 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2916 Return NULL_TREE if no simplification can be made. */
2917
2918 static bool
2919 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2920 {
2921 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2922 location_t loc = gimple_location (stmt);
2923 tree dest = gimple_call_arg (stmt, 0);
2924 tree src = gimple_call_arg (stmt, 1);
2925 tree fn, lenp1;
2926
2927 /* If the result is unused, replace stpcpy with strcpy. */
2928 if (gimple_call_lhs (stmt) == NULL_TREE)
2929 {
2930 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2931 if (!fn)
2932 return false;
2933 gimple_call_set_fndecl (stmt, fn);
2934 fold_stmt (gsi);
2935 return true;
2936 }
2937
2938 /* Set to non-null if ARG refers to an unterminated array. */
2939 c_strlen_data data = { };
2940 tree len = c_strlen (src, 1, &data, 1);
2941 if (!len
2942 || TREE_CODE (len) != INTEGER_CST)
2943 {
2944 data.decl = unterminated_array (src);
2945 if (!data.decl)
2946 return false;
2947 }
2948
2949 if (data.decl)
2950 {
2951 /* Avoid folding calls with unterminated arrays. */
2952 if (!gimple_no_warning_p (stmt))
2953 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2954 gimple_set_no_warning (stmt, true);
2955 return false;
2956 }
2957
2958 if (optimize_function_for_size_p (cfun)
2959 /* If length is zero it's small enough. */
2960 && !integer_zerop (len))
2961 return false;
2962
2963 /* If the source has a known length replace stpcpy with memcpy. */
2964 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2965 if (!fn)
2966 return false;
2967
2968 gimple_seq stmts = NULL;
2969 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2970 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2971 tem, build_int_cst (size_type_node, 1));
2972 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2973 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2974 gimple_move_vops (repl, stmt);
2975 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2976 /* Replace the result with dest + len. */
2977 stmts = NULL;
2978 tem = gimple_convert (&stmts, loc, sizetype, len);
2979 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2980 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2981 POINTER_PLUS_EXPR, dest, tem);
2982 gsi_replace (gsi, ret, false);
2983 /* Finally fold the memcpy call. */
2984 gimple_stmt_iterator gsi2 = *gsi;
2985 gsi_prev (&gsi2);
2986 fold_stmt (&gsi2);
2987 return true;
2988 }
2989
2990 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2991 NULL_TREE if a normal call should be emitted rather than expanding
2992 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2993 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2994 passed as second argument. */
2995
2996 static bool
2997 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2998 enum built_in_function fcode)
2999 {
3000 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3001 tree dest, size, len, fn, fmt, flag;
3002 const char *fmt_str;
3003
3004 /* Verify the required arguments in the original call. */
3005 if (gimple_call_num_args (stmt) < 5)
3006 return false;
3007
3008 dest = gimple_call_arg (stmt, 0);
3009 len = gimple_call_arg (stmt, 1);
3010 flag = gimple_call_arg (stmt, 2);
3011 size = gimple_call_arg (stmt, 3);
3012 fmt = gimple_call_arg (stmt, 4);
3013
3014 if (! tree_fits_uhwi_p (size))
3015 return false;
3016
3017 if (! integer_all_onesp (size))
3018 {
3019 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3020 if (! tree_fits_uhwi_p (len))
3021 {
3022 /* If LEN is not constant, try MAXLEN too.
3023 For MAXLEN only allow optimizing into non-_ocs function
3024 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3025 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3026 return false;
3027 }
3028 else
3029 maxlen = len;
3030
3031 if (tree_int_cst_lt (size, maxlen))
3032 return false;
3033 }
3034
3035 if (!init_target_chars ())
3036 return false;
3037
3038 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3039 or if format doesn't contain % chars or is "%s". */
3040 if (! integer_zerop (flag))
3041 {
3042 fmt_str = c_getstr (fmt);
3043 if (fmt_str == NULL)
3044 return false;
3045 if (strchr (fmt_str, target_percent) != NULL
3046 && strcmp (fmt_str, target_percent_s))
3047 return false;
3048 }
3049
3050 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3051 available. */
3052 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3053 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3054 if (!fn)
3055 return false;
3056
3057 /* Replace the called function and the first 5 argument by 3 retaining
3058 trailing varargs. */
3059 gimple_call_set_fndecl (stmt, fn);
3060 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3061 gimple_call_set_arg (stmt, 0, dest);
3062 gimple_call_set_arg (stmt, 1, len);
3063 gimple_call_set_arg (stmt, 2, fmt);
3064 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3065 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3066 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3067 fold_stmt (gsi);
3068 return true;
3069 }
3070
3071 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3072 Return NULL_TREE if a normal call should be emitted rather than
3073 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3074 or BUILT_IN_VSPRINTF_CHK. */
3075
3076 static bool
3077 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3078 enum built_in_function fcode)
3079 {
3080 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3081 tree dest, size, len, fn, fmt, flag;
3082 const char *fmt_str;
3083 unsigned nargs = gimple_call_num_args (stmt);
3084
3085 /* Verify the required arguments in the original call. */
3086 if (nargs < 4)
3087 return false;
3088 dest = gimple_call_arg (stmt, 0);
3089 flag = gimple_call_arg (stmt, 1);
3090 size = gimple_call_arg (stmt, 2);
3091 fmt = gimple_call_arg (stmt, 3);
3092
3093 if (! tree_fits_uhwi_p (size))
3094 return false;
3095
3096 len = NULL_TREE;
3097
3098 if (!init_target_chars ())
3099 return false;
3100
3101 /* Check whether the format is a literal string constant. */
3102 fmt_str = c_getstr (fmt);
3103 if (fmt_str != NULL)
3104 {
3105 /* If the format doesn't contain % args or %%, we know the size. */
3106 if (strchr (fmt_str, target_percent) == 0)
3107 {
3108 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3109 len = build_int_cstu (size_type_node, strlen (fmt_str));
3110 }
3111 /* If the format is "%s" and first ... argument is a string literal,
3112 we know the size too. */
3113 else if (fcode == BUILT_IN_SPRINTF_CHK
3114 && strcmp (fmt_str, target_percent_s) == 0)
3115 {
3116 tree arg;
3117
3118 if (nargs == 5)
3119 {
3120 arg = gimple_call_arg (stmt, 4);
3121 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3122 {
3123 len = c_strlen (arg, 1);
3124 if (! len || ! tree_fits_uhwi_p (len))
3125 len = NULL_TREE;
3126 }
3127 }
3128 }
3129 }
3130
3131 if (! integer_all_onesp (size))
3132 {
3133 if (! len || ! tree_int_cst_lt (len, size))
3134 return false;
3135 }
3136
3137 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3138 or if format doesn't contain % chars or is "%s". */
3139 if (! integer_zerop (flag))
3140 {
3141 if (fmt_str == NULL)
3142 return false;
3143 if (strchr (fmt_str, target_percent) != NULL
3144 && strcmp (fmt_str, target_percent_s))
3145 return false;
3146 }
3147
3148 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3149 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3150 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3151 if (!fn)
3152 return false;
3153
3154 /* Replace the called function and the first 4 argument by 2 retaining
3155 trailing varargs. */
3156 gimple_call_set_fndecl (stmt, fn);
3157 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3158 gimple_call_set_arg (stmt, 0, dest);
3159 gimple_call_set_arg (stmt, 1, fmt);
3160 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3161 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3162 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3163 fold_stmt (gsi);
3164 return true;
3165 }
3166
3167 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3168 ORIG may be null if this is a 2-argument call. We don't attempt to
3169 simplify calls with more than 3 arguments.
3170
3171 Return true if simplification was possible, otherwise false. */
3172
3173 bool
3174 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3175 {
3176 gimple *stmt = gsi_stmt (*gsi);
3177 tree dest = gimple_call_arg (stmt, 0);
3178 tree fmt = gimple_call_arg (stmt, 1);
3179 tree orig = NULL_TREE;
3180 const char *fmt_str = NULL;
3181
3182 /* Verify the required arguments in the original call. We deal with two
3183 types of sprintf() calls: 'sprintf (str, fmt)' and
3184 'sprintf (dest, "%s", orig)'. */
3185 if (gimple_call_num_args (stmt) > 3)
3186 return false;
3187
3188 if (gimple_call_num_args (stmt) == 3)
3189 orig = gimple_call_arg (stmt, 2);
3190
3191 /* Check whether the format is a literal string constant. */
3192 fmt_str = c_getstr (fmt);
3193 if (fmt_str == NULL)
3194 return false;
3195
3196 if (!init_target_chars ())
3197 return false;
3198
3199 /* If the format doesn't contain % args or %%, use strcpy. */
3200 if (strchr (fmt_str, target_percent) == NULL)
3201 {
3202 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3203
3204 if (!fn)
3205 return false;
3206
3207 /* Don't optimize sprintf (buf, "abc", ptr++). */
3208 if (orig)
3209 return false;
3210
3211 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3212 'format' is known to contain no % formats. */
3213 gimple_seq stmts = NULL;
3214 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3215
3216 /* Propagate the NO_WARNING bit to avoid issuing the same
3217 warning more than once. */
3218 if (gimple_no_warning_p (stmt))
3219 gimple_set_no_warning (repl, true);
3220
3221 gimple_seq_add_stmt_without_update (&stmts, repl);
3222 if (tree lhs = gimple_call_lhs (stmt))
3223 {
3224 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3225 strlen (fmt_str)));
3226 gimple_seq_add_stmt_without_update (&stmts, repl);
3227 gsi_replace_with_seq_vops (gsi, stmts);
3228 /* gsi now points at the assignment to the lhs, get a
3229 stmt iterator to the memcpy call.
3230 ??? We can't use gsi_for_stmt as that doesn't work when the
3231 CFG isn't built yet. */
3232 gimple_stmt_iterator gsi2 = *gsi;
3233 gsi_prev (&gsi2);
3234 fold_stmt (&gsi2);
3235 }
3236 else
3237 {
3238 gsi_replace_with_seq_vops (gsi, stmts);
3239 fold_stmt (gsi);
3240 }
3241 return true;
3242 }
3243
3244 /* If the format is "%s", use strcpy if the result isn't used. */
3245 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3246 {
3247 tree fn;
3248 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3249
3250 if (!fn)
3251 return false;
3252
3253 /* Don't crash on sprintf (str1, "%s"). */
3254 if (!orig)
3255 return false;
3256
3257 tree orig_len = NULL_TREE;
3258 if (gimple_call_lhs (stmt))
3259 {
3260 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3261 if (!orig_len)
3262 return false;
3263 }
3264
3265 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3266 gimple_seq stmts = NULL;
3267 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3268
3269 /* Propagate the NO_WARNING bit to avoid issuing the same
3270 warning more than once. */
3271 if (gimple_no_warning_p (stmt))
3272 gimple_set_no_warning (repl, true);
3273
3274 gimple_seq_add_stmt_without_update (&stmts, repl);
3275 if (tree lhs = gimple_call_lhs (stmt))
3276 {
3277 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3278 TREE_TYPE (orig_len)))
3279 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3280 repl = gimple_build_assign (lhs, orig_len);
3281 gimple_seq_add_stmt_without_update (&stmts, repl);
3282 gsi_replace_with_seq_vops (gsi, stmts);
3283 /* gsi now points at the assignment to the lhs, get a
3284 stmt iterator to the memcpy call.
3285 ??? We can't use gsi_for_stmt as that doesn't work when the
3286 CFG isn't built yet. */
3287 gimple_stmt_iterator gsi2 = *gsi;
3288 gsi_prev (&gsi2);
3289 fold_stmt (&gsi2);
3290 }
3291 else
3292 {
3293 gsi_replace_with_seq_vops (gsi, stmts);
3294 fold_stmt (gsi);
3295 }
3296 return true;
3297 }
3298 return false;
3299 }
3300
3301 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3302 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3303 attempt to simplify calls with more than 4 arguments.
3304
3305 Return true if simplification was possible, otherwise false. */
3306
3307 bool
3308 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3309 {
3310 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3311 tree dest = gimple_call_arg (stmt, 0);
3312 tree destsize = gimple_call_arg (stmt, 1);
3313 tree fmt = gimple_call_arg (stmt, 2);
3314 tree orig = NULL_TREE;
3315 const char *fmt_str = NULL;
3316
3317 if (gimple_call_num_args (stmt) > 4)
3318 return false;
3319
3320 if (gimple_call_num_args (stmt) == 4)
3321 orig = gimple_call_arg (stmt, 3);
3322
3323 if (!tree_fits_uhwi_p (destsize))
3324 return false;
3325 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3326
3327 /* Check whether the format is a literal string constant. */
3328 fmt_str = c_getstr (fmt);
3329 if (fmt_str == NULL)
3330 return false;
3331
3332 if (!init_target_chars ())
3333 return false;
3334
3335 /* If the format doesn't contain % args or %%, use strcpy. */
3336 if (strchr (fmt_str, target_percent) == NULL)
3337 {
3338 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3339 if (!fn)
3340 return false;
3341
3342 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3343 if (orig)
3344 return false;
3345
3346 /* We could expand this as
3347 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3348 or to
3349 memcpy (str, fmt_with_nul_at_cstm1, cst);
3350 but in the former case that might increase code size
3351 and in the latter case grow .rodata section too much.
3352 So punt for now. */
3353 size_t len = strlen (fmt_str);
3354 if (len >= destlen)
3355 return false;
3356
3357 gimple_seq stmts = NULL;
3358 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3359 gimple_seq_add_stmt_without_update (&stmts, repl);
3360 if (tree lhs = gimple_call_lhs (stmt))
3361 {
3362 repl = gimple_build_assign (lhs,
3363 build_int_cst (TREE_TYPE (lhs), len));
3364 gimple_seq_add_stmt_without_update (&stmts, repl);
3365 gsi_replace_with_seq_vops (gsi, stmts);
3366 /* gsi now points at the assignment to the lhs, get a
3367 stmt iterator to the memcpy call.
3368 ??? We can't use gsi_for_stmt as that doesn't work when the
3369 CFG isn't built yet. */
3370 gimple_stmt_iterator gsi2 = *gsi;
3371 gsi_prev (&gsi2);
3372 fold_stmt (&gsi2);
3373 }
3374 else
3375 {
3376 gsi_replace_with_seq_vops (gsi, stmts);
3377 fold_stmt (gsi);
3378 }
3379 return true;
3380 }
3381
3382 /* If the format is "%s", use strcpy if the result isn't used. */
3383 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3384 {
3385 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3386 if (!fn)
3387 return false;
3388
3389 /* Don't crash on snprintf (str1, cst, "%s"). */
3390 if (!orig)
3391 return false;
3392
3393 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3394 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3395 return false;
3396
3397 /* We could expand this as
3398 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3399 or to
3400 memcpy (str1, str2_with_nul_at_cstm1, cst);
3401 but in the former case that might increase code size
3402 and in the latter case grow .rodata section too much.
3403 So punt for now. */
3404 if (compare_tree_int (orig_len, destlen) >= 0)
3405 return false;
3406
3407 /* Convert snprintf (str1, cst, "%s", str2) into
3408 strcpy (str1, str2) if strlen (str2) < cst. */
3409 gimple_seq stmts = NULL;
3410 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3411 gimple_seq_add_stmt_without_update (&stmts, repl);
3412 if (tree lhs = gimple_call_lhs (stmt))
3413 {
3414 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3415 TREE_TYPE (orig_len)))
3416 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3417 repl = gimple_build_assign (lhs, orig_len);
3418 gimple_seq_add_stmt_without_update (&stmts, repl);
3419 gsi_replace_with_seq_vops (gsi, stmts);
3420 /* gsi now points at the assignment to the lhs, get a
3421 stmt iterator to the memcpy call.
3422 ??? We can't use gsi_for_stmt as that doesn't work when the
3423 CFG isn't built yet. */
3424 gimple_stmt_iterator gsi2 = *gsi;
3425 gsi_prev (&gsi2);
3426 fold_stmt (&gsi2);
3427 }
3428 else
3429 {
3430 gsi_replace_with_seq_vops (gsi, stmts);
3431 fold_stmt (gsi);
3432 }
3433 return true;
3434 }
3435 return false;
3436 }
3437
3438 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3439 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3440 more than 3 arguments, and ARG may be null in the 2-argument case.
3441
3442 Return NULL_TREE if no simplification was possible, otherwise return the
3443 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3444 code of the function to be simplified. */
3445
3446 static bool
3447 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3448 tree fp, tree fmt, tree arg,
3449 enum built_in_function fcode)
3450 {
3451 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3452 tree fn_fputc, fn_fputs;
3453 const char *fmt_str = NULL;
3454
3455 /* If the return value is used, don't do the transformation. */
3456 if (gimple_call_lhs (stmt) != NULL_TREE)
3457 return false;
3458
3459 /* Check whether the format is a literal string constant. */
3460 fmt_str = c_getstr (fmt);
3461 if (fmt_str == NULL)
3462 return false;
3463
3464 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3465 {
3466 /* If we're using an unlocked function, assume the other
3467 unlocked functions exist explicitly. */
3468 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3469 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3470 }
3471 else
3472 {
3473 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3474 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3475 }
3476
3477 if (!init_target_chars ())
3478 return false;
3479
3480 /* If the format doesn't contain % args or %%, use strcpy. */
3481 if (strchr (fmt_str, target_percent) == NULL)
3482 {
3483 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3484 && arg)
3485 return false;
3486
3487 /* If the format specifier was "", fprintf does nothing. */
3488 if (fmt_str[0] == '\0')
3489 {
3490 replace_call_with_value (gsi, NULL_TREE);
3491 return true;
3492 }
3493
3494 /* When "string" doesn't contain %, replace all cases of
3495 fprintf (fp, string) with fputs (string, fp). The fputs
3496 builtin will take care of special cases like length == 1. */
3497 if (fn_fputs)
3498 {
3499 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3500 replace_call_with_call_and_fold (gsi, repl);
3501 return true;
3502 }
3503 }
3504
3505 /* The other optimizations can be done only on the non-va_list variants. */
3506 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3507 return false;
3508
3509 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3510 else if (strcmp (fmt_str, target_percent_s) == 0)
3511 {
3512 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3513 return false;
3514 if (fn_fputs)
3515 {
3516 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3517 replace_call_with_call_and_fold (gsi, repl);
3518 return true;
3519 }
3520 }
3521
3522 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3523 else if (strcmp (fmt_str, target_percent_c) == 0)
3524 {
3525 if (!arg
3526 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3527 return false;
3528 if (fn_fputc)
3529 {
3530 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3531 replace_call_with_call_and_fold (gsi, repl);
3532 return true;
3533 }
3534 }
3535
3536 return false;
3537 }
3538
3539 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3540 FMT and ARG are the arguments to the call; we don't fold cases with
3541 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3542
3543 Return NULL_TREE if no simplification was possible, otherwise return the
3544 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3545 code of the function to be simplified. */
3546
3547 static bool
3548 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3549 tree arg, enum built_in_function fcode)
3550 {
3551 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3552 tree fn_putchar, fn_puts, newarg;
3553 const char *fmt_str = NULL;
3554
3555 /* If the return value is used, don't do the transformation. */
3556 if (gimple_call_lhs (stmt) != NULL_TREE)
3557 return false;
3558
3559 /* Check whether the format is a literal string constant. */
3560 fmt_str = c_getstr (fmt);
3561 if (fmt_str == NULL)
3562 return false;
3563
3564 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3565 {
3566 /* If we're using an unlocked function, assume the other
3567 unlocked functions exist explicitly. */
3568 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3569 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3570 }
3571 else
3572 {
3573 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3574 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3575 }
3576
3577 if (!init_target_chars ())
3578 return false;
3579
3580 if (strcmp (fmt_str, target_percent_s) == 0
3581 || strchr (fmt_str, target_percent) == NULL)
3582 {
3583 const char *str;
3584
3585 if (strcmp (fmt_str, target_percent_s) == 0)
3586 {
3587 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3588 return false;
3589
3590 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3591 return false;
3592
3593 str = c_getstr (arg);
3594 if (str == NULL)
3595 return false;
3596 }
3597 else
3598 {
3599 /* The format specifier doesn't contain any '%' characters. */
3600 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3601 && arg)
3602 return false;
3603 str = fmt_str;
3604 }
3605
3606 /* If the string was "", printf does nothing. */
3607 if (str[0] == '\0')
3608 {
3609 replace_call_with_value (gsi, NULL_TREE);
3610 return true;
3611 }
3612
3613 /* If the string has length of 1, call putchar. */
3614 if (str[1] == '\0')
3615 {
3616 /* Given printf("c"), (where c is any one character,)
3617 convert "c"[0] to an int and pass that to the replacement
3618 function. */
3619 newarg = build_int_cst (integer_type_node, str[0]);
3620 if (fn_putchar)
3621 {
3622 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3623 replace_call_with_call_and_fold (gsi, repl);
3624 return true;
3625 }
3626 }
3627 else
3628 {
3629 /* If the string was "string\n", call puts("string"). */
3630 size_t len = strlen (str);
3631 if ((unsigned char)str[len - 1] == target_newline
3632 && (size_t) (int) len == len
3633 && (int) len > 0)
3634 {
3635 char *newstr;
3636
3637 /* Create a NUL-terminated string that's one char shorter
3638 than the original, stripping off the trailing '\n'. */
3639 newstr = xstrdup (str);
3640 newstr[len - 1] = '\0';
3641 newarg = build_string_literal (len, newstr);
3642 free (newstr);
3643 if (fn_puts)
3644 {
3645 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3646 replace_call_with_call_and_fold (gsi, repl);
3647 return true;
3648 }
3649 }
3650 else
3651 /* We'd like to arrange to call fputs(string,stdout) here,
3652 but we need stdout and don't have a way to get it yet. */
3653 return false;
3654 }
3655 }
3656
3657 /* The other optimizations can be done only on the non-va_list variants. */
3658 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3659 return false;
3660
3661 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3662 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3663 {
3664 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3665 return false;
3666 if (fn_puts)
3667 {
3668 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3669 replace_call_with_call_and_fold (gsi, repl);
3670 return true;
3671 }
3672 }
3673
3674 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3675 else if (strcmp (fmt_str, target_percent_c) == 0)
3676 {
3677 if (!arg || ! useless_type_conversion_p (integer_type_node,
3678 TREE_TYPE (arg)))
3679 return false;
3680 if (fn_putchar)
3681 {
3682 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3683 replace_call_with_call_and_fold (gsi, repl);
3684 return true;
3685 }
3686 }
3687
3688 return false;
3689 }
3690
3691
3692
3693 /* Fold a call to __builtin_strlen with known length LEN. */
3694
3695 static bool
3696 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3697 {
3698 gimple *stmt = gsi_stmt (*gsi);
3699 tree arg = gimple_call_arg (stmt, 0);
3700
3701 wide_int minlen;
3702 wide_int maxlen;
3703
3704 c_strlen_data lendata = { };
3705 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3706 && !lendata.decl
3707 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3708 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3709 {
3710 /* The range of lengths refers to either a single constant
3711 string or to the longest and shortest constant string
3712 referenced by the argument of the strlen() call, or to
3713 the strings that can possibly be stored in the arrays
3714 the argument refers to. */
3715 minlen = wi::to_wide (lendata.minlen);
3716 maxlen = wi::to_wide (lendata.maxlen);
3717 }
3718 else
3719 {
3720 unsigned prec = TYPE_PRECISION (sizetype);
3721
3722 minlen = wi::shwi (0, prec);
3723 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3724 }
3725
3726 if (minlen == maxlen)
3727 {
3728 /* Fold the strlen call to a constant. */
3729 tree type = TREE_TYPE (lendata.minlen);
3730 tree len = force_gimple_operand_gsi (gsi,
3731 wide_int_to_tree (type, minlen),
3732 true, NULL, true, GSI_SAME_STMT);
3733 replace_call_with_value (gsi, len);
3734 return true;
3735 }
3736
3737 /* Set the strlen() range to [0, MAXLEN]. */
3738 if (tree lhs = gimple_call_lhs (stmt))
3739 set_strlen_range (lhs, minlen, maxlen);
3740
3741 return false;
3742 }
3743
3744 /* Fold a call to __builtin_acc_on_device. */
3745
3746 static bool
3747 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3748 {
3749 /* Defer folding until we know which compiler we're in. */
3750 if (symtab->state != EXPANSION)
3751 return false;
3752
3753 unsigned val_host = GOMP_DEVICE_HOST;
3754 unsigned val_dev = GOMP_DEVICE_NONE;
3755
3756 #ifdef ACCEL_COMPILER
3757 val_host = GOMP_DEVICE_NOT_HOST;
3758 val_dev = ACCEL_COMPILER_acc_device;
3759 #endif
3760
3761 location_t loc = gimple_location (gsi_stmt (*gsi));
3762
3763 tree host_eq = make_ssa_name (boolean_type_node);
3764 gimple *host_ass = gimple_build_assign
3765 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3766 gimple_set_location (host_ass, loc);
3767 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3768
3769 tree dev_eq = make_ssa_name (boolean_type_node);
3770 gimple *dev_ass = gimple_build_assign
3771 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3772 gimple_set_location (dev_ass, loc);
3773 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3774
3775 tree result = make_ssa_name (boolean_type_node);
3776 gimple *result_ass = gimple_build_assign
3777 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3778 gimple_set_location (result_ass, loc);
3779 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3780
3781 replace_call_with_value (gsi, result);
3782
3783 return true;
3784 }
3785
3786 /* Fold realloc (0, n) -> malloc (n). */
3787
3788 static bool
3789 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3790 {
3791 gimple *stmt = gsi_stmt (*gsi);
3792 tree arg = gimple_call_arg (stmt, 0);
3793 tree size = gimple_call_arg (stmt, 1);
3794
3795 if (operand_equal_p (arg, null_pointer_node, 0))
3796 {
3797 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3798 if (fn_malloc)
3799 {
3800 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3801 replace_call_with_call_and_fold (gsi, repl);
3802 return true;
3803 }
3804 }
3805 return false;
3806 }
3807
3808 /* Fold the non-target builtin at *GSI and return whether any simplification
3809 was made. */
3810
3811 static bool
3812 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3813 {
3814 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3815 tree callee = gimple_call_fndecl (stmt);
3816
3817 /* Give up for always_inline inline builtins until they are
3818 inlined. */
3819 if (avoid_folding_inline_builtin (callee))
3820 return false;
3821
3822 unsigned n = gimple_call_num_args (stmt);
3823 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3824 switch (fcode)
3825 {
3826 case BUILT_IN_BCMP:
3827 return gimple_fold_builtin_bcmp (gsi);
3828 case BUILT_IN_BCOPY:
3829 return gimple_fold_builtin_bcopy (gsi);
3830 case BUILT_IN_BZERO:
3831 return gimple_fold_builtin_bzero (gsi);
3832
3833 case BUILT_IN_MEMSET:
3834 return gimple_fold_builtin_memset (gsi,
3835 gimple_call_arg (stmt, 1),
3836 gimple_call_arg (stmt, 2));
3837 case BUILT_IN_MEMCPY:
3838 case BUILT_IN_MEMPCPY:
3839 case BUILT_IN_MEMMOVE:
3840 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3841 gimple_call_arg (stmt, 1), fcode);
3842 case BUILT_IN_SPRINTF_CHK:
3843 case BUILT_IN_VSPRINTF_CHK:
3844 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3845 case BUILT_IN_STRCAT_CHK:
3846 return gimple_fold_builtin_strcat_chk (gsi);
3847 case BUILT_IN_STRNCAT_CHK:
3848 return gimple_fold_builtin_strncat_chk (gsi);
3849 case BUILT_IN_STRLEN:
3850 return gimple_fold_builtin_strlen (gsi);
3851 case BUILT_IN_STRCPY:
3852 return gimple_fold_builtin_strcpy (gsi,
3853 gimple_call_arg (stmt, 0),
3854 gimple_call_arg (stmt, 1));
3855 case BUILT_IN_STRNCPY:
3856 return gimple_fold_builtin_strncpy (gsi,
3857 gimple_call_arg (stmt, 0),
3858 gimple_call_arg (stmt, 1),
3859 gimple_call_arg (stmt, 2));
3860 case BUILT_IN_STRCAT:
3861 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3862 gimple_call_arg (stmt, 1));
3863 case BUILT_IN_STRNCAT:
3864 return gimple_fold_builtin_strncat (gsi);
3865 case BUILT_IN_INDEX:
3866 case BUILT_IN_STRCHR:
3867 return gimple_fold_builtin_strchr (gsi, false);
3868 case BUILT_IN_RINDEX:
3869 case BUILT_IN_STRRCHR:
3870 return gimple_fold_builtin_strchr (gsi, true);
3871 case BUILT_IN_STRSTR:
3872 return gimple_fold_builtin_strstr (gsi);
3873 case BUILT_IN_STRCMP:
3874 case BUILT_IN_STRCMP_EQ:
3875 case BUILT_IN_STRCASECMP:
3876 case BUILT_IN_STRNCMP:
3877 case BUILT_IN_STRNCMP_EQ:
3878 case BUILT_IN_STRNCASECMP:
3879 return gimple_fold_builtin_string_compare (gsi);
3880 case BUILT_IN_MEMCHR:
3881 return gimple_fold_builtin_memchr (gsi);
3882 case BUILT_IN_FPUTS:
3883 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3884 gimple_call_arg (stmt, 1), false);
3885 case BUILT_IN_FPUTS_UNLOCKED:
3886 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3887 gimple_call_arg (stmt, 1), true);
3888 case BUILT_IN_MEMCPY_CHK:
3889 case BUILT_IN_MEMPCPY_CHK:
3890 case BUILT_IN_MEMMOVE_CHK:
3891 case BUILT_IN_MEMSET_CHK:
3892 return gimple_fold_builtin_memory_chk (gsi,
3893 gimple_call_arg (stmt, 0),
3894 gimple_call_arg (stmt, 1),
3895 gimple_call_arg (stmt, 2),
3896 gimple_call_arg (stmt, 3),
3897 fcode);
3898 case BUILT_IN_STPCPY:
3899 return gimple_fold_builtin_stpcpy (gsi);
3900 case BUILT_IN_STRCPY_CHK:
3901 case BUILT_IN_STPCPY_CHK:
3902 return gimple_fold_builtin_stxcpy_chk (gsi,
3903 gimple_call_arg (stmt, 0),
3904 gimple_call_arg (stmt, 1),
3905 gimple_call_arg (stmt, 2),
3906 fcode);
3907 case BUILT_IN_STRNCPY_CHK:
3908 case BUILT_IN_STPNCPY_CHK:
3909 return gimple_fold_builtin_stxncpy_chk (gsi,
3910 gimple_call_arg (stmt, 0),
3911 gimple_call_arg (stmt, 1),
3912 gimple_call_arg (stmt, 2),
3913 gimple_call_arg (stmt, 3),
3914 fcode);
3915 case BUILT_IN_SNPRINTF_CHK:
3916 case BUILT_IN_VSNPRINTF_CHK:
3917 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3918
3919 case BUILT_IN_FPRINTF:
3920 case BUILT_IN_FPRINTF_UNLOCKED:
3921 case BUILT_IN_VFPRINTF:
3922 if (n == 2 || n == 3)
3923 return gimple_fold_builtin_fprintf (gsi,
3924 gimple_call_arg (stmt, 0),
3925 gimple_call_arg (stmt, 1),
3926 n == 3
3927 ? gimple_call_arg (stmt, 2)
3928 : NULL_TREE,
3929 fcode);
3930 break;
3931 case BUILT_IN_FPRINTF_CHK:
3932 case BUILT_IN_VFPRINTF_CHK:
3933 if (n == 3 || n == 4)
3934 return gimple_fold_builtin_fprintf (gsi,
3935 gimple_call_arg (stmt, 0),
3936 gimple_call_arg (stmt, 2),
3937 n == 4
3938 ? gimple_call_arg (stmt, 3)
3939 : NULL_TREE,
3940 fcode);
3941 break;
3942 case BUILT_IN_PRINTF:
3943 case BUILT_IN_PRINTF_UNLOCKED:
3944 case BUILT_IN_VPRINTF:
3945 if (n == 1 || n == 2)
3946 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3947 n == 2
3948 ? gimple_call_arg (stmt, 1)
3949 : NULL_TREE, fcode);
3950 break;
3951 case BUILT_IN_PRINTF_CHK:
3952 case BUILT_IN_VPRINTF_CHK:
3953 if (n == 2 || n == 3)
3954 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3955 n == 3
3956 ? gimple_call_arg (stmt, 2)
3957 : NULL_TREE, fcode);
3958 break;
3959 case BUILT_IN_ACC_ON_DEVICE:
3960 return gimple_fold_builtin_acc_on_device (gsi,
3961 gimple_call_arg (stmt, 0));
3962 case BUILT_IN_REALLOC:
3963 return gimple_fold_builtin_realloc (gsi);
3964
3965 default:;
3966 }
3967
3968 /* Try the generic builtin folder. */
3969 bool ignore = (gimple_call_lhs (stmt) == NULL);
3970 tree result = fold_call_stmt (stmt, ignore);
3971 if (result)
3972 {
3973 if (ignore)
3974 STRIP_NOPS (result);
3975 else
3976 result = fold_convert (gimple_call_return_type (stmt), result);
3977 if (!update_call_from_tree (gsi, result))
3978 gimplify_and_update_call_from_tree (gsi, result);
3979 return true;
3980 }
3981
3982 return false;
3983 }
3984
3985 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3986 function calls to constants, where possible. */
3987
3988 static tree
3989 fold_internal_goacc_dim (const gimple *call)
3990 {
3991 int axis = oacc_get_ifn_dim_arg (call);
3992 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3993 tree result = NULL_TREE;
3994 tree type = TREE_TYPE (gimple_call_lhs (call));
3995
3996 switch (gimple_call_internal_fn (call))
3997 {
3998 case IFN_GOACC_DIM_POS:
3999 /* If the size is 1, we know the answer. */
4000 if (size == 1)
4001 result = build_int_cst (type, 0);
4002 break;
4003 case IFN_GOACC_DIM_SIZE:
4004 /* If the size is not dynamic, we know the answer. */
4005 if (size)
4006 result = build_int_cst (type, size);
4007 break;
4008 default:
4009 break;
4010 }
4011
4012 return result;
4013 }
4014
4015 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4016 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4017 &var where var is only addressable because of such calls. */
4018
4019 bool
4020 optimize_atomic_compare_exchange_p (gimple *stmt)
4021 {
4022 if (gimple_call_num_args (stmt) != 6
4023 || !flag_inline_atomics
4024 || !optimize
4025 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4026 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4027 || !gimple_vdef (stmt)
4028 || !gimple_vuse (stmt))
4029 return false;
4030
4031 tree fndecl = gimple_call_fndecl (stmt);
4032 switch (DECL_FUNCTION_CODE (fndecl))
4033 {
4034 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4035 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4036 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4037 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4038 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4039 break;
4040 default:
4041 return false;
4042 }
4043
4044 tree expected = gimple_call_arg (stmt, 1);
4045 if (TREE_CODE (expected) != ADDR_EXPR
4046 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4047 return false;
4048
4049 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4050 if (!is_gimple_reg_type (etype)
4051 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4052 || TREE_THIS_VOLATILE (etype)
4053 || VECTOR_TYPE_P (etype)
4054 || TREE_CODE (etype) == COMPLEX_TYPE
4055 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4056 might not preserve all the bits. See PR71716. */
4057 || SCALAR_FLOAT_TYPE_P (etype)
4058 || maybe_ne (TYPE_PRECISION (etype),
4059 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4060 return false;
4061
4062 tree weak = gimple_call_arg (stmt, 3);
4063 if (!integer_zerop (weak) && !integer_onep (weak))
4064 return false;
4065
4066 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4067 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4068 machine_mode mode = TYPE_MODE (itype);
4069
4070 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4071 == CODE_FOR_nothing
4072 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4073 return false;
4074
4075 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4076 return false;
4077
4078 return true;
4079 }
4080
4081 /* Fold
4082 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4083 into
4084 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4085 i = IMAGPART_EXPR <t>;
4086 r = (_Bool) i;
4087 e = REALPART_EXPR <t>; */
4088
4089 void
4090 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4091 {
4092 gimple *stmt = gsi_stmt (*gsi);
4093 tree fndecl = gimple_call_fndecl (stmt);
4094 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4095 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4096 tree ctype = build_complex_type (itype);
4097 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4098 bool throws = false;
4099 edge e = NULL;
4100 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4101 expected);
4102 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4103 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4104 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4105 {
4106 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4107 build1 (VIEW_CONVERT_EXPR, itype,
4108 gimple_assign_lhs (g)));
4109 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4110 }
4111 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4112 + int_size_in_bytes (itype);
4113 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4114 gimple_call_arg (stmt, 0),
4115 gimple_assign_lhs (g),
4116 gimple_call_arg (stmt, 2),
4117 build_int_cst (integer_type_node, flag),
4118 gimple_call_arg (stmt, 4),
4119 gimple_call_arg (stmt, 5));
4120 tree lhs = make_ssa_name (ctype);
4121 gimple_call_set_lhs (g, lhs);
4122 gimple_move_vops (g, stmt);
4123 tree oldlhs = gimple_call_lhs (stmt);
4124 if (stmt_can_throw_internal (cfun, stmt))
4125 {
4126 throws = true;
4127 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4128 }
4129 gimple_call_set_nothrow (as_a <gcall *> (g),
4130 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4131 gimple_call_set_lhs (stmt, NULL_TREE);
4132 gsi_replace (gsi, g, true);
4133 if (oldlhs)
4134 {
4135 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4136 build1 (IMAGPART_EXPR, itype, lhs));
4137 if (throws)
4138 {
4139 gsi_insert_on_edge_immediate (e, g);
4140 *gsi = gsi_for_stmt (g);
4141 }
4142 else
4143 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4144 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4145 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4146 }
4147 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4148 build1 (REALPART_EXPR, itype, lhs));
4149 if (throws && oldlhs == NULL_TREE)
4150 {
4151 gsi_insert_on_edge_immediate (e, g);
4152 *gsi = gsi_for_stmt (g);
4153 }
4154 else
4155 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4156 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4157 {
4158 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4159 VIEW_CONVERT_EXPR,
4160 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4161 gimple_assign_lhs (g)));
4162 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4163 }
4164 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4165 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4166 *gsi = gsiret;
4167 }
4168
4169 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4170 doesn't fit into TYPE. The test for overflow should be regardless of
4171 -fwrapv, and even for unsigned types. */
4172
4173 bool
4174 arith_overflowed_p (enum tree_code code, const_tree type,
4175 const_tree arg0, const_tree arg1)
4176 {
4177 widest2_int warg0 = widest2_int_cst (arg0);
4178 widest2_int warg1 = widest2_int_cst (arg1);
4179 widest2_int wres;
4180 switch (code)
4181 {
4182 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4183 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4184 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4185 default: gcc_unreachable ();
4186 }
4187 signop sign = TYPE_SIGN (type);
4188 if (sign == UNSIGNED && wi::neg_p (wres))
4189 return true;
4190 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4191 }
4192
4193 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4194 for the memory it references, otherwise return null. VECTYPE is the
4195 type of the memory vector. */
4196
4197 static tree
4198 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
4199 {
4200 tree ptr = gimple_call_arg (call, 0);
4201 tree alias_align = gimple_call_arg (call, 1);
4202 tree mask = gimple_call_arg (call, 2);
4203 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
4204 return NULL_TREE;
4205
4206 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align) * BITS_PER_UNIT;
4207 if (TYPE_ALIGN (vectype) != align)
4208 vectype = build_aligned_type (vectype, align);
4209 tree offset = build_zero_cst (TREE_TYPE (alias_align));
4210 return fold_build2 (MEM_REF, vectype, ptr, offset);
4211 }
4212
4213 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4214
4215 static bool
4216 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
4217 {
4218 tree lhs = gimple_call_lhs (call);
4219 if (!lhs)
4220 return false;
4221
4222 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
4223 {
4224 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4225 gimple_set_location (new_stmt, gimple_location (call));
4226 gimple_move_vops (new_stmt, call);
4227 gsi_replace (gsi, new_stmt, false);
4228 return true;
4229 }
4230 return false;
4231 }
4232
4233 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4234
4235 static bool
4236 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
4237 {
4238 tree rhs = gimple_call_arg (call, 3);
4239 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
4240 {
4241 gassign *new_stmt = gimple_build_assign (lhs, rhs);
4242 gimple_set_location (new_stmt, gimple_location (call));
4243 gimple_move_vops (new_stmt, call);
4244 gsi_replace (gsi, new_stmt, false);
4245 return true;
4246 }
4247 return false;
4248 }
4249
4250 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4251 The statement may be replaced by another statement, e.g., if the call
4252 simplifies to a constant value. Return true if any changes were made.
4253 It is assumed that the operands have been previously folded. */
4254
4255 static bool
4256 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4257 {
4258 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4259 tree callee;
4260 bool changed = false;
4261 unsigned i;
4262
4263 /* Fold *& in call arguments. */
4264 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4265 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4266 {
4267 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4268 if (tmp)
4269 {
4270 gimple_call_set_arg (stmt, i, tmp);
4271 changed = true;
4272 }
4273 }
4274
4275 /* Check for virtual calls that became direct calls. */
4276 callee = gimple_call_fn (stmt);
4277 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4278 {
4279 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4280 {
4281 if (dump_file && virtual_method_call_p (callee)
4282 && !possible_polymorphic_call_target_p
4283 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4284 (OBJ_TYPE_REF_EXPR (callee)))))
4285 {
4286 fprintf (dump_file,
4287 "Type inheritance inconsistent devirtualization of ");
4288 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4289 fprintf (dump_file, " to ");
4290 print_generic_expr (dump_file, callee, TDF_SLIM);
4291 fprintf (dump_file, "\n");
4292 }
4293
4294 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4295 changed = true;
4296 }
4297 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4298 {
4299 bool final;
4300 vec <cgraph_node *>targets
4301 = possible_polymorphic_call_targets (callee, stmt, &final);
4302 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4303 {
4304 tree lhs = gimple_call_lhs (stmt);
4305 if (dump_enabled_p ())
4306 {
4307 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4308 "folding virtual function call to %s\n",
4309 targets.length () == 1
4310 ? targets[0]->name ()
4311 : "__builtin_unreachable");
4312 }
4313 if (targets.length () == 1)
4314 {
4315 tree fndecl = targets[0]->decl;
4316 gimple_call_set_fndecl (stmt, fndecl);
4317 changed = true;
4318 /* If changing the call to __cxa_pure_virtual
4319 or similar noreturn function, adjust gimple_call_fntype
4320 too. */
4321 if (gimple_call_noreturn_p (stmt)
4322 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4323 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4324 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4325 == void_type_node))
4326 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4327 /* If the call becomes noreturn, remove the lhs. */
4328 if (lhs
4329 && gimple_call_noreturn_p (stmt)
4330 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4331 || should_remove_lhs_p (lhs)))
4332 {
4333 if (TREE_CODE (lhs) == SSA_NAME)
4334 {
4335 tree var = create_tmp_var (TREE_TYPE (lhs));
4336 tree def = get_or_create_ssa_default_def (cfun, var);
4337 gimple *new_stmt = gimple_build_assign (lhs, def);
4338 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4339 }
4340 gimple_call_set_lhs (stmt, NULL_TREE);
4341 }
4342 maybe_remove_unused_call_args (cfun, stmt);
4343 }
4344 else
4345 {
4346 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4347 gimple *new_stmt = gimple_build_call (fndecl, 0);
4348 gimple_set_location (new_stmt, gimple_location (stmt));
4349 /* If the call had a SSA name as lhs morph that into
4350 an uninitialized value. */
4351 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4352 {
4353 tree var = create_tmp_var (TREE_TYPE (lhs));
4354 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4355 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4356 set_ssa_default_def (cfun, var, lhs);
4357 }
4358 gimple_move_vops (new_stmt, stmt);
4359 gsi_replace (gsi, new_stmt, false);
4360 return true;
4361 }
4362 }
4363 }
4364 }
4365
4366 /* Check for indirect calls that became direct calls, and then
4367 no longer require a static chain. */
4368 if (gimple_call_chain (stmt))
4369 {
4370 tree fn = gimple_call_fndecl (stmt);
4371 if (fn && !DECL_STATIC_CHAIN (fn))
4372 {
4373 gimple_call_set_chain (stmt, NULL);
4374 changed = true;
4375 }
4376 else
4377 {
4378 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4379 if (tmp)
4380 {
4381 gimple_call_set_chain (stmt, tmp);
4382 changed = true;
4383 }
4384 }
4385 }
4386
4387 if (inplace)
4388 return changed;
4389
4390 /* Check for builtins that CCP can handle using information not
4391 available in the generic fold routines. */
4392 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4393 {
4394 if (gimple_fold_builtin (gsi))
4395 changed = true;
4396 }
4397 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4398 {
4399 changed |= targetm.gimple_fold_builtin (gsi);
4400 }
4401 else if (gimple_call_internal_p (stmt))
4402 {
4403 enum tree_code subcode = ERROR_MARK;
4404 tree result = NULL_TREE;
4405 bool cplx_result = false;
4406 tree overflow = NULL_TREE;
4407 switch (gimple_call_internal_fn (stmt))
4408 {
4409 case IFN_BUILTIN_EXPECT:
4410 result = fold_builtin_expect (gimple_location (stmt),
4411 gimple_call_arg (stmt, 0),
4412 gimple_call_arg (stmt, 1),
4413 gimple_call_arg (stmt, 2),
4414 NULL_TREE);
4415 break;
4416 case IFN_UBSAN_OBJECT_SIZE:
4417 {
4418 tree offset = gimple_call_arg (stmt, 1);
4419 tree objsize = gimple_call_arg (stmt, 2);
4420 if (integer_all_onesp (objsize)
4421 || (TREE_CODE (offset) == INTEGER_CST
4422 && TREE_CODE (objsize) == INTEGER_CST
4423 && tree_int_cst_le (offset, objsize)))
4424 {
4425 replace_call_with_value (gsi, NULL_TREE);
4426 return true;
4427 }
4428 }
4429 break;
4430 case IFN_UBSAN_PTR:
4431 if (integer_zerop (gimple_call_arg (stmt, 1)))
4432 {
4433 replace_call_with_value (gsi, NULL_TREE);
4434 return true;
4435 }
4436 break;
4437 case IFN_UBSAN_BOUNDS:
4438 {
4439 tree index = gimple_call_arg (stmt, 1);
4440 tree bound = gimple_call_arg (stmt, 2);
4441 if (TREE_CODE (index) == INTEGER_CST
4442 && TREE_CODE (bound) == INTEGER_CST)
4443 {
4444 index = fold_convert (TREE_TYPE (bound), index);
4445 if (TREE_CODE (index) == INTEGER_CST
4446 && tree_int_cst_le (index, bound))
4447 {
4448 replace_call_with_value (gsi, NULL_TREE);
4449 return true;
4450 }
4451 }
4452 }
4453 break;
4454 case IFN_GOACC_DIM_SIZE:
4455 case IFN_GOACC_DIM_POS:
4456 result = fold_internal_goacc_dim (stmt);
4457 break;
4458 case IFN_UBSAN_CHECK_ADD:
4459 subcode = PLUS_EXPR;
4460 break;
4461 case IFN_UBSAN_CHECK_SUB:
4462 subcode = MINUS_EXPR;
4463 break;
4464 case IFN_UBSAN_CHECK_MUL:
4465 subcode = MULT_EXPR;
4466 break;
4467 case IFN_ADD_OVERFLOW:
4468 subcode = PLUS_EXPR;
4469 cplx_result = true;
4470 break;
4471 case IFN_SUB_OVERFLOW:
4472 subcode = MINUS_EXPR;
4473 cplx_result = true;
4474 break;
4475 case IFN_MUL_OVERFLOW:
4476 subcode = MULT_EXPR;
4477 cplx_result = true;
4478 break;
4479 case IFN_MASK_LOAD:
4480 changed |= gimple_fold_mask_load (gsi, stmt);
4481 break;
4482 case IFN_MASK_STORE:
4483 changed |= gimple_fold_mask_store (gsi, stmt);
4484 break;
4485 default:
4486 break;
4487 }
4488 if (subcode != ERROR_MARK)
4489 {
4490 tree arg0 = gimple_call_arg (stmt, 0);
4491 tree arg1 = gimple_call_arg (stmt, 1);
4492 tree type = TREE_TYPE (arg0);
4493 if (cplx_result)
4494 {
4495 tree lhs = gimple_call_lhs (stmt);
4496 if (lhs == NULL_TREE)
4497 type = NULL_TREE;
4498 else
4499 type = TREE_TYPE (TREE_TYPE (lhs));
4500 }
4501 if (type == NULL_TREE)
4502 ;
4503 /* x = y + 0; x = y - 0; x = y * 0; */
4504 else if (integer_zerop (arg1))
4505 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4506 /* x = 0 + y; x = 0 * y; */
4507 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4508 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4509 /* x = y - y; */
4510 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4511 result = integer_zero_node;
4512 /* x = y * 1; x = 1 * y; */
4513 else if (subcode == MULT_EXPR && integer_onep (arg1))
4514 result = arg0;
4515 else if (subcode == MULT_EXPR && integer_onep (arg0))
4516 result = arg1;
4517 else if (TREE_CODE (arg0) == INTEGER_CST
4518 && TREE_CODE (arg1) == INTEGER_CST)
4519 {
4520 if (cplx_result)
4521 result = int_const_binop (subcode, fold_convert (type, arg0),
4522 fold_convert (type, arg1));
4523 else
4524 result = int_const_binop (subcode, arg0, arg1);
4525 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4526 {
4527 if (cplx_result)
4528 overflow = build_one_cst (type);
4529 else
4530 result = NULL_TREE;
4531 }
4532 }
4533 if (result)
4534 {
4535 if (result == integer_zero_node)
4536 result = build_zero_cst (type);
4537 else if (cplx_result && TREE_TYPE (result) != type)
4538 {
4539 if (TREE_CODE (result) == INTEGER_CST)
4540 {
4541 if (arith_overflowed_p (PLUS_EXPR, type, result,
4542 integer_zero_node))
4543 overflow = build_one_cst (type);
4544 }
4545 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4546 && TYPE_UNSIGNED (type))
4547 || (TYPE_PRECISION (type)
4548 < (TYPE_PRECISION (TREE_TYPE (result))
4549 + (TYPE_UNSIGNED (TREE_TYPE (result))
4550 && !TYPE_UNSIGNED (type)))))
4551 result = NULL_TREE;
4552 if (result)
4553 result = fold_convert (type, result);
4554 }
4555 }
4556 }
4557
4558 if (result)
4559 {
4560 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4561 result = drop_tree_overflow (result);
4562 if (cplx_result)
4563 {
4564 if (overflow == NULL_TREE)
4565 overflow = build_zero_cst (TREE_TYPE (result));
4566 tree ctype = build_complex_type (TREE_TYPE (result));
4567 if (TREE_CODE (result) == INTEGER_CST
4568 && TREE_CODE (overflow) == INTEGER_CST)
4569 result = build_complex (ctype, result, overflow);
4570 else
4571 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4572 ctype, result, overflow);
4573 }
4574 if (!update_call_from_tree (gsi, result))
4575 gimplify_and_update_call_from_tree (gsi, result);
4576 changed = true;
4577 }
4578 }
4579
4580 return changed;
4581 }
4582
4583
4584 /* Return true whether NAME has a use on STMT. */
4585
4586 static bool
4587 has_use_on_stmt (tree name, gimple *stmt)
4588 {
4589 imm_use_iterator iter;
4590 use_operand_p use_p;
4591 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4592 if (USE_STMT (use_p) == stmt)
4593 return true;
4594 return false;
4595 }
4596
4597 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4598 gimple_simplify.
4599
4600 Replaces *GSI with the simplification result in RCODE and OPS
4601 and the associated statements in *SEQ. Does the replacement
4602 according to INPLACE and returns true if the operation succeeded. */
4603
4604 static bool
4605 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4606 gimple_match_op *res_op,
4607 gimple_seq *seq, bool inplace)
4608 {
4609 gimple *stmt = gsi_stmt (*gsi);
4610 tree *ops = res_op->ops;
4611 unsigned int num_ops = res_op->num_ops;
4612
4613 /* Play safe and do not allow abnormals to be mentioned in
4614 newly created statements. See also maybe_push_res_to_seq.
4615 As an exception allow such uses if there was a use of the
4616 same SSA name on the old stmt. */
4617 for (unsigned int i = 0; i < num_ops; ++i)
4618 if (TREE_CODE (ops[i]) == SSA_NAME
4619 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4620 && !has_use_on_stmt (ops[i], stmt))
4621 return false;
4622
4623 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4624 for (unsigned int i = 0; i < 2; ++i)
4625 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4626 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4627 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4628 return false;
4629
4630 /* Don't insert new statements when INPLACE is true, even if we could
4631 reuse STMT for the final statement. */
4632 if (inplace && !gimple_seq_empty_p (*seq))
4633 return false;
4634
4635 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4636 {
4637 gcc_assert (res_op->code.is_tree_code ());
4638 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4639 /* GIMPLE_CONDs condition may not throw. */
4640 && (!flag_exceptions
4641 || !cfun->can_throw_non_call_exceptions
4642 || !operation_could_trap_p (res_op->code,
4643 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4644 false, NULL_TREE)))
4645 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4646 else if (res_op->code == SSA_NAME)
4647 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4648 build_zero_cst (TREE_TYPE (ops[0])));
4649 else if (res_op->code == INTEGER_CST)
4650 {
4651 if (integer_zerop (ops[0]))
4652 gimple_cond_make_false (cond_stmt);
4653 else
4654 gimple_cond_make_true (cond_stmt);
4655 }
4656 else if (!inplace)
4657 {
4658 tree res = maybe_push_res_to_seq (res_op, seq);
4659 if (!res)
4660 return false;
4661 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4662 build_zero_cst (TREE_TYPE (res)));
4663 }
4664 else
4665 return false;
4666 if (dump_file && (dump_flags & TDF_DETAILS))
4667 {
4668 fprintf (dump_file, "gimple_simplified to ");
4669 if (!gimple_seq_empty_p (*seq))
4670 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4671 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4672 0, TDF_SLIM);
4673 }
4674 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4675 return true;
4676 }
4677 else if (is_gimple_assign (stmt)
4678 && res_op->code.is_tree_code ())
4679 {
4680 if (!inplace
4681 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4682 {
4683 maybe_build_generic_op (res_op);
4684 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4685 res_op->op_or_null (0),
4686 res_op->op_or_null (1),
4687 res_op->op_or_null (2));
4688 if (dump_file && (dump_flags & TDF_DETAILS))
4689 {
4690 fprintf (dump_file, "gimple_simplified to ");
4691 if (!gimple_seq_empty_p (*seq))
4692 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4693 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4694 0, TDF_SLIM);
4695 }
4696 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4697 return true;
4698 }
4699 }
4700 else if (res_op->code.is_fn_code ()
4701 && gimple_call_combined_fn (stmt) == res_op->code)
4702 {
4703 gcc_assert (num_ops == gimple_call_num_args (stmt));
4704 for (unsigned int i = 0; i < num_ops; ++i)
4705 gimple_call_set_arg (stmt, i, ops[i]);
4706 if (dump_file && (dump_flags & TDF_DETAILS))
4707 {
4708 fprintf (dump_file, "gimple_simplified to ");
4709 if (!gimple_seq_empty_p (*seq))
4710 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4711 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4712 }
4713 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4714 return true;
4715 }
4716 else if (!inplace)
4717 {
4718 if (gimple_has_lhs (stmt))
4719 {
4720 tree lhs = gimple_get_lhs (stmt);
4721 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4722 return false;
4723 if (dump_file && (dump_flags & TDF_DETAILS))
4724 {
4725 fprintf (dump_file, "gimple_simplified to ");
4726 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4727 }
4728 gsi_replace_with_seq_vops (gsi, *seq);
4729 return true;
4730 }
4731 else
4732 gcc_unreachable ();
4733 }
4734
4735 return false;
4736 }
4737
4738 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4739
4740 static bool
4741 maybe_canonicalize_mem_ref_addr (tree *t)
4742 {
4743 bool res = false;
4744
4745 if (TREE_CODE (*t) == ADDR_EXPR)
4746 t = &TREE_OPERAND (*t, 0);
4747
4748 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4749 generic vector extension. The actual vector referenced is
4750 view-converted to an array type for this purpose. If the index
4751 is constant the canonical representation in the middle-end is a
4752 BIT_FIELD_REF so re-write the former to the latter here. */
4753 if (TREE_CODE (*t) == ARRAY_REF
4754 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4755 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4756 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4757 {
4758 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4759 if (VECTOR_TYPE_P (vtype))
4760 {
4761 tree low = array_ref_low_bound (*t);
4762 if (TREE_CODE (low) == INTEGER_CST)
4763 {
4764 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4765 {
4766 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4767 wi::to_widest (low));
4768 idx = wi::mul (idx, wi::to_widest
4769 (TYPE_SIZE (TREE_TYPE (*t))));
4770 widest_int ext
4771 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4772 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4773 {
4774 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4775 TREE_TYPE (*t),
4776 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4777 TYPE_SIZE (TREE_TYPE (*t)),
4778 wide_int_to_tree (bitsizetype, idx));
4779 res = true;
4780 }
4781 }
4782 }
4783 }
4784 }
4785
4786 while (handled_component_p (*t))
4787 t = &TREE_OPERAND (*t, 0);
4788
4789 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4790 of invariant addresses into a SSA name MEM_REF address. */
4791 if (TREE_CODE (*t) == MEM_REF
4792 || TREE_CODE (*t) == TARGET_MEM_REF)
4793 {
4794 tree addr = TREE_OPERAND (*t, 0);
4795 if (TREE_CODE (addr) == ADDR_EXPR
4796 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4797 || handled_component_p (TREE_OPERAND (addr, 0))))
4798 {
4799 tree base;
4800 poly_int64 coffset;
4801 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4802 &coffset);
4803 if (!base)
4804 gcc_unreachable ();
4805
4806 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4807 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4808 TREE_OPERAND (*t, 1),
4809 size_int (coffset));
4810 res = true;
4811 }
4812 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4813 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4814 }
4815
4816 /* Canonicalize back MEM_REFs to plain reference trees if the object
4817 accessed is a decl that has the same access semantics as the MEM_REF. */
4818 if (TREE_CODE (*t) == MEM_REF
4819 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4820 && integer_zerop (TREE_OPERAND (*t, 1))
4821 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4822 {
4823 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4824 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4825 if (/* Same volatile qualification. */
4826 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4827 /* Same TBAA behavior with -fstrict-aliasing. */
4828 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4829 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4830 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4831 /* Same alignment. */
4832 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4833 /* We have to look out here to not drop a required conversion
4834 from the rhs to the lhs if *t appears on the lhs or vice-versa
4835 if it appears on the rhs. Thus require strict type
4836 compatibility. */
4837 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4838 {
4839 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4840 res = true;
4841 }
4842 }
4843
4844 /* Canonicalize TARGET_MEM_REF in particular with respect to
4845 the indexes becoming constant. */
4846 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4847 {
4848 tree tem = maybe_fold_tmr (*t);
4849 if (tem)
4850 {
4851 *t = tem;
4852 res = true;
4853 }
4854 }
4855
4856 return res;
4857 }
4858
4859 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4860 distinguishes both cases. */
4861
4862 static bool
4863 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4864 {
4865 bool changed = false;
4866 gimple *stmt = gsi_stmt (*gsi);
4867 bool nowarning = gimple_no_warning_p (stmt);
4868 unsigned i;
4869 fold_defer_overflow_warnings ();
4870
4871 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4872 after propagation.
4873 ??? This shouldn't be done in generic folding but in the
4874 propagation helpers which also know whether an address was
4875 propagated.
4876 Also canonicalize operand order. */
4877 switch (gimple_code (stmt))
4878 {
4879 case GIMPLE_ASSIGN:
4880 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4881 {
4882 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4883 if ((REFERENCE_CLASS_P (*rhs)
4884 || TREE_CODE (*rhs) == ADDR_EXPR)
4885 && maybe_canonicalize_mem_ref_addr (rhs))
4886 changed = true;
4887 tree *lhs = gimple_assign_lhs_ptr (stmt);
4888 if (REFERENCE_CLASS_P (*lhs)
4889 && maybe_canonicalize_mem_ref_addr (lhs))
4890 changed = true;
4891 }
4892 else
4893 {
4894 /* Canonicalize operand order. */
4895 enum tree_code code = gimple_assign_rhs_code (stmt);
4896 if (TREE_CODE_CLASS (code) == tcc_comparison
4897 || commutative_tree_code (code)
4898 || commutative_ternary_tree_code (code))
4899 {
4900 tree rhs1 = gimple_assign_rhs1 (stmt);
4901 tree rhs2 = gimple_assign_rhs2 (stmt);
4902 if (tree_swap_operands_p (rhs1, rhs2))
4903 {
4904 gimple_assign_set_rhs1 (stmt, rhs2);
4905 gimple_assign_set_rhs2 (stmt, rhs1);
4906 if (TREE_CODE_CLASS (code) == tcc_comparison)
4907 gimple_assign_set_rhs_code (stmt,
4908 swap_tree_comparison (code));
4909 changed = true;
4910 }
4911 }
4912 }
4913 break;
4914 case GIMPLE_CALL:
4915 {
4916 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4917 {
4918 tree *arg = gimple_call_arg_ptr (stmt, i);
4919 if (REFERENCE_CLASS_P (*arg)
4920 && maybe_canonicalize_mem_ref_addr (arg))
4921 changed = true;
4922 }
4923 tree *lhs = gimple_call_lhs_ptr (stmt);
4924 if (*lhs
4925 && REFERENCE_CLASS_P (*lhs)
4926 && maybe_canonicalize_mem_ref_addr (lhs))
4927 changed = true;
4928 break;
4929 }
4930 case GIMPLE_ASM:
4931 {
4932 gasm *asm_stmt = as_a <gasm *> (stmt);
4933 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4934 {
4935 tree link = gimple_asm_output_op (asm_stmt, i);
4936 tree op = TREE_VALUE (link);
4937 if (REFERENCE_CLASS_P (op)
4938 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4939 changed = true;
4940 }
4941 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4942 {
4943 tree link = gimple_asm_input_op (asm_stmt, i);
4944 tree op = TREE_VALUE (link);
4945 if ((REFERENCE_CLASS_P (op)
4946 || TREE_CODE (op) == ADDR_EXPR)
4947 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4948 changed = true;
4949 }
4950 }
4951 break;
4952 case GIMPLE_DEBUG:
4953 if (gimple_debug_bind_p (stmt))
4954 {
4955 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4956 if (*val
4957 && (REFERENCE_CLASS_P (*val)
4958 || TREE_CODE (*val) == ADDR_EXPR)
4959 && maybe_canonicalize_mem_ref_addr (val))
4960 changed = true;
4961 }
4962 break;
4963 case GIMPLE_COND:
4964 {
4965 /* Canonicalize operand order. */
4966 tree lhs = gimple_cond_lhs (stmt);
4967 tree rhs = gimple_cond_rhs (stmt);
4968 if (tree_swap_operands_p (lhs, rhs))
4969 {
4970 gcond *gc = as_a <gcond *> (stmt);
4971 gimple_cond_set_lhs (gc, rhs);
4972 gimple_cond_set_rhs (gc, lhs);
4973 gimple_cond_set_code (gc,
4974 swap_tree_comparison (gimple_cond_code (gc)));
4975 changed = true;
4976 }
4977 }
4978 default:;
4979 }
4980
4981 /* Dispatch to pattern-based folding. */
4982 if (!inplace
4983 || is_gimple_assign (stmt)
4984 || gimple_code (stmt) == GIMPLE_COND)
4985 {
4986 gimple_seq seq = NULL;
4987 gimple_match_op res_op;
4988 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4989 valueize, valueize))
4990 {
4991 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4992 changed = true;
4993 else
4994 gimple_seq_discard (seq);
4995 }
4996 }
4997
4998 stmt = gsi_stmt (*gsi);
4999
5000 /* Fold the main computation performed by the statement. */
5001 switch (gimple_code (stmt))
5002 {
5003 case GIMPLE_ASSIGN:
5004 {
5005 /* Try to canonicalize for boolean-typed X the comparisons
5006 X == 0, X == 1, X != 0, and X != 1. */
5007 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
5008 || gimple_assign_rhs_code (stmt) == NE_EXPR)
5009 {
5010 tree lhs = gimple_assign_lhs (stmt);
5011 tree op1 = gimple_assign_rhs1 (stmt);
5012 tree op2 = gimple_assign_rhs2 (stmt);
5013 tree type = TREE_TYPE (op1);
5014
5015 /* Check whether the comparison operands are of the same boolean
5016 type as the result type is.
5017 Check that second operand is an integer-constant with value
5018 one or zero. */
5019 if (TREE_CODE (op2) == INTEGER_CST
5020 && (integer_zerop (op2) || integer_onep (op2))
5021 && useless_type_conversion_p (TREE_TYPE (lhs), type))
5022 {
5023 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
5024 bool is_logical_not = false;
5025
5026 /* X == 0 and X != 1 is a logical-not.of X
5027 X == 1 and X != 0 is X */
5028 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
5029 || (cmp_code == NE_EXPR && integer_onep (op2)))
5030 is_logical_not = true;
5031
5032 if (is_logical_not == false)
5033 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
5034 /* Only for one-bit precision typed X the transformation
5035 !X -> ~X is valied. */
5036 else if (TYPE_PRECISION (type) == 1)
5037 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
5038 /* Otherwise we use !X -> X ^ 1. */
5039 else
5040 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
5041 build_int_cst (type, 1));
5042 changed = true;
5043 break;
5044 }
5045 }
5046
5047 unsigned old_num_ops = gimple_num_ops (stmt);
5048 tree lhs = gimple_assign_lhs (stmt);
5049 tree new_rhs = fold_gimple_assign (gsi);
5050 if (new_rhs
5051 && !useless_type_conversion_p (TREE_TYPE (lhs),
5052 TREE_TYPE (new_rhs)))
5053 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5054 if (new_rhs
5055 && (!inplace
5056 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5057 {
5058 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5059 changed = true;
5060 }
5061 break;
5062 }
5063
5064 case GIMPLE_CALL:
5065 changed |= gimple_fold_call (gsi, inplace);
5066 break;
5067
5068 case GIMPLE_ASM:
5069 /* Fold *& in asm operands. */
5070 {
5071 gasm *asm_stmt = as_a <gasm *> (stmt);
5072 size_t noutputs;
5073 const char **oconstraints;
5074 const char *constraint;
5075 bool allows_mem, allows_reg;
5076
5077 noutputs = gimple_asm_noutputs (asm_stmt);
5078 oconstraints = XALLOCAVEC (const char *, noutputs);
5079
5080 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5081 {
5082 tree link = gimple_asm_output_op (asm_stmt, i);
5083 tree op = TREE_VALUE (link);
5084 oconstraints[i]
5085 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5086 if (REFERENCE_CLASS_P (op)
5087 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5088 {
5089 TREE_VALUE (link) = op;
5090 changed = true;
5091 }
5092 }
5093 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5094 {
5095 tree link = gimple_asm_input_op (asm_stmt, i);
5096 tree op = TREE_VALUE (link);
5097 constraint
5098 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5099 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5100 oconstraints, &allows_mem, &allows_reg);
5101 if (REFERENCE_CLASS_P (op)
5102 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5103 != NULL_TREE)
5104 {
5105 TREE_VALUE (link) = op;
5106 changed = true;
5107 }
5108 }
5109 }
5110 break;
5111
5112 case GIMPLE_DEBUG:
5113 if (gimple_debug_bind_p (stmt))
5114 {
5115 tree val = gimple_debug_bind_get_value (stmt);
5116 if (val
5117 && REFERENCE_CLASS_P (val))
5118 {
5119 tree tem = maybe_fold_reference (val, false);
5120 if (tem)
5121 {
5122 gimple_debug_bind_set_value (stmt, tem);
5123 changed = true;
5124 }
5125 }
5126 else if (val
5127 && TREE_CODE (val) == ADDR_EXPR)
5128 {
5129 tree ref = TREE_OPERAND (val, 0);
5130 tree tem = maybe_fold_reference (ref, false);
5131 if (tem)
5132 {
5133 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5134 gimple_debug_bind_set_value (stmt, tem);
5135 changed = true;
5136 }
5137 }
5138 }
5139 break;
5140
5141 case GIMPLE_RETURN:
5142 {
5143 greturn *ret_stmt = as_a<greturn *> (stmt);
5144 tree ret = gimple_return_retval(ret_stmt);
5145
5146 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5147 {
5148 tree val = valueize (ret);
5149 if (val && val != ret
5150 && may_propagate_copy (ret, val))
5151 {
5152 gimple_return_set_retval (ret_stmt, val);
5153 changed = true;
5154 }
5155 }
5156 }
5157 break;
5158
5159 default:;
5160 }
5161
5162 stmt = gsi_stmt (*gsi);
5163
5164 /* Fold *& on the lhs. */
5165 if (gimple_has_lhs (stmt))
5166 {
5167 tree lhs = gimple_get_lhs (stmt);
5168 if (lhs && REFERENCE_CLASS_P (lhs))
5169 {
5170 tree new_lhs = maybe_fold_reference (lhs, true);
5171 if (new_lhs)
5172 {
5173 gimple_set_lhs (stmt, new_lhs);
5174 changed = true;
5175 }
5176 }
5177 }
5178
5179 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5180 return changed;
5181 }
5182
5183 /* Valueziation callback that ends up not following SSA edges. */
5184
5185 tree
5186 no_follow_ssa_edges (tree)
5187 {
5188 return NULL_TREE;
5189 }
5190
5191 /* Valueization callback that ends up following single-use SSA edges only. */
5192
5193 tree
5194 follow_single_use_edges (tree val)
5195 {
5196 if (TREE_CODE (val) == SSA_NAME
5197 && !has_single_use (val))
5198 return NULL_TREE;
5199 return val;
5200 }
5201
5202 /* Valueization callback that follows all SSA edges. */
5203
5204 tree
5205 follow_all_ssa_edges (tree val)
5206 {
5207 return val;
5208 }
5209
5210 /* Fold the statement pointed to by GSI. In some cases, this function may
5211 replace the whole statement with a new one. Returns true iff folding
5212 makes any changes.
5213 The statement pointed to by GSI should be in valid gimple form but may
5214 be in unfolded state as resulting from for example constant propagation
5215 which can produce *&x = 0. */
5216
5217 bool
5218 fold_stmt (gimple_stmt_iterator *gsi)
5219 {
5220 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5221 }
5222
5223 bool
5224 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5225 {
5226 return fold_stmt_1 (gsi, false, valueize);
5227 }
5228
5229 /* Perform the minimal folding on statement *GSI. Only operations like
5230 *&x created by constant propagation are handled. The statement cannot
5231 be replaced with a new one. Return true if the statement was
5232 changed, false otherwise.
5233 The statement *GSI should be in valid gimple form but may
5234 be in unfolded state as resulting from for example constant propagation
5235 which can produce *&x = 0. */
5236
5237 bool
5238 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5239 {
5240 gimple *stmt = gsi_stmt (*gsi);
5241 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5242 gcc_assert (gsi_stmt (*gsi) == stmt);
5243 return changed;
5244 }
5245
5246 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5247 if EXPR is null or we don't know how.
5248 If non-null, the result always has boolean type. */
5249
5250 static tree
5251 canonicalize_bool (tree expr, bool invert)
5252 {
5253 if (!expr)
5254 return NULL_TREE;
5255 else if (invert)
5256 {
5257 if (integer_nonzerop (expr))
5258 return boolean_false_node;
5259 else if (integer_zerop (expr))
5260 return boolean_true_node;
5261 else if (TREE_CODE (expr) == SSA_NAME)
5262 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5263 build_int_cst (TREE_TYPE (expr), 0));
5264 else if (COMPARISON_CLASS_P (expr))
5265 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5266 boolean_type_node,
5267 TREE_OPERAND (expr, 0),
5268 TREE_OPERAND (expr, 1));
5269 else
5270 return NULL_TREE;
5271 }
5272 else
5273 {
5274 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5275 return expr;
5276 if (integer_nonzerop (expr))
5277 return boolean_true_node;
5278 else if (integer_zerop (expr))
5279 return boolean_false_node;
5280 else if (TREE_CODE (expr) == SSA_NAME)
5281 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5282 build_int_cst (TREE_TYPE (expr), 0));
5283 else if (COMPARISON_CLASS_P (expr))
5284 return fold_build2 (TREE_CODE (expr),
5285 boolean_type_node,
5286 TREE_OPERAND (expr, 0),
5287 TREE_OPERAND (expr, 1));
5288 else
5289 return NULL_TREE;
5290 }
5291 }
5292
5293 /* Check to see if a boolean expression EXPR is logically equivalent to the
5294 comparison (OP1 CODE OP2). Check for various identities involving
5295 SSA_NAMEs. */
5296
5297 static bool
5298 same_bool_comparison_p (const_tree expr, enum tree_code code,
5299 const_tree op1, const_tree op2)
5300 {
5301 gimple *s;
5302
5303 /* The obvious case. */
5304 if (TREE_CODE (expr) == code
5305 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5306 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5307 return true;
5308
5309 /* Check for comparing (name, name != 0) and the case where expr
5310 is an SSA_NAME with a definition matching the comparison. */
5311 if (TREE_CODE (expr) == SSA_NAME
5312 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5313 {
5314 if (operand_equal_p (expr, op1, 0))
5315 return ((code == NE_EXPR && integer_zerop (op2))
5316 || (code == EQ_EXPR && integer_nonzerop (op2)));
5317 s = SSA_NAME_DEF_STMT (expr);
5318 if (is_gimple_assign (s)
5319 && gimple_assign_rhs_code (s) == code
5320 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5321 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5322 return true;
5323 }
5324
5325 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5326 of name is a comparison, recurse. */
5327 if (TREE_CODE (op1) == SSA_NAME
5328 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5329 {
5330 s = SSA_NAME_DEF_STMT (op1);
5331 if (is_gimple_assign (s)
5332 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5333 {
5334 enum tree_code c = gimple_assign_rhs_code (s);
5335 if ((c == NE_EXPR && integer_zerop (op2))
5336 || (c == EQ_EXPR && integer_nonzerop (op2)))
5337 return same_bool_comparison_p (expr, c,
5338 gimple_assign_rhs1 (s),
5339 gimple_assign_rhs2 (s));
5340 if ((c == EQ_EXPR && integer_zerop (op2))
5341 || (c == NE_EXPR && integer_nonzerop (op2)))
5342 return same_bool_comparison_p (expr,
5343 invert_tree_comparison (c, false),
5344 gimple_assign_rhs1 (s),
5345 gimple_assign_rhs2 (s));
5346 }
5347 }
5348 return false;
5349 }
5350
5351 /* Check to see if two boolean expressions OP1 and OP2 are logically
5352 equivalent. */
5353
5354 static bool
5355 same_bool_result_p (const_tree op1, const_tree op2)
5356 {
5357 /* Simple cases first. */
5358 if (operand_equal_p (op1, op2, 0))
5359 return true;
5360
5361 /* Check the cases where at least one of the operands is a comparison.
5362 These are a bit smarter than operand_equal_p in that they apply some
5363 identifies on SSA_NAMEs. */
5364 if (COMPARISON_CLASS_P (op2)
5365 && same_bool_comparison_p (op1, TREE_CODE (op2),
5366 TREE_OPERAND (op2, 0),
5367 TREE_OPERAND (op2, 1)))
5368 return true;
5369 if (COMPARISON_CLASS_P (op1)
5370 && same_bool_comparison_p (op2, TREE_CODE (op1),
5371 TREE_OPERAND (op1, 0),
5372 TREE_OPERAND (op1, 1)))
5373 return true;
5374
5375 /* Default case. */
5376 return false;
5377 }
5378
5379 /* Forward declarations for some mutually recursive functions. */
5380
5381 static tree
5382 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5383 enum tree_code code2, tree op2a, tree op2b);
5384 static tree
5385 and_var_with_comparison (tree type, tree var, bool invert,
5386 enum tree_code code2, tree op2a, tree op2b);
5387 static tree
5388 and_var_with_comparison_1 (tree type, gimple *stmt,
5389 enum tree_code code2, tree op2a, tree op2b);
5390 static tree
5391 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
5392 enum tree_code code2, tree op2a, tree op2b);
5393 static tree
5394 or_var_with_comparison (tree, tree var, bool invert,
5395 enum tree_code code2, tree op2a, tree op2b);
5396 static tree
5397 or_var_with_comparison_1 (tree, gimple *stmt,
5398 enum tree_code code2, tree op2a, tree op2b);
5399
5400 /* Helper function for and_comparisons_1: try to simplify the AND of the
5401 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5402 If INVERT is true, invert the value of the VAR before doing the AND.
5403 Return NULL_EXPR if we can't simplify this to a single expression. */
5404
5405 static tree
5406 and_var_with_comparison (tree type, tree var, bool invert,
5407 enum tree_code code2, tree op2a, tree op2b)
5408 {
5409 tree t;
5410 gimple *stmt = SSA_NAME_DEF_STMT (var);
5411
5412 /* We can only deal with variables whose definitions are assignments. */
5413 if (!is_gimple_assign (stmt))
5414 return NULL_TREE;
5415
5416 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5417 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5418 Then we only have to consider the simpler non-inverted cases. */
5419 if (invert)
5420 t = or_var_with_comparison_1 (type, stmt,
5421 invert_tree_comparison (code2, false),
5422 op2a, op2b);
5423 else
5424 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5425 return canonicalize_bool (t, invert);
5426 }
5427
5428 /* Try to simplify the AND of the ssa variable defined by the assignment
5429 STMT with the comparison specified by (OP2A CODE2 OP2B).
5430 Return NULL_EXPR if we can't simplify this to a single expression. */
5431
5432 static tree
5433 and_var_with_comparison_1 (tree type, gimple *stmt,
5434 enum tree_code code2, tree op2a, tree op2b)
5435 {
5436 tree var = gimple_assign_lhs (stmt);
5437 tree true_test_var = NULL_TREE;
5438 tree false_test_var = NULL_TREE;
5439 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5440
5441 /* Check for identities like (var AND (var == 0)) => false. */
5442 if (TREE_CODE (op2a) == SSA_NAME
5443 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5444 {
5445 if ((code2 == NE_EXPR && integer_zerop (op2b))
5446 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5447 {
5448 true_test_var = op2a;
5449 if (var == true_test_var)
5450 return var;
5451 }
5452 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5453 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5454 {
5455 false_test_var = op2a;
5456 if (var == false_test_var)
5457 return boolean_false_node;
5458 }
5459 }
5460
5461 /* If the definition is a comparison, recurse on it. */
5462 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5463 {
5464 tree t = and_comparisons_1 (type, innercode,
5465 gimple_assign_rhs1 (stmt),
5466 gimple_assign_rhs2 (stmt),
5467 code2,
5468 op2a,
5469 op2b);
5470 if (t)
5471 return t;
5472 }
5473
5474 /* If the definition is an AND or OR expression, we may be able to
5475 simplify by reassociating. */
5476 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5477 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5478 {
5479 tree inner1 = gimple_assign_rhs1 (stmt);
5480 tree inner2 = gimple_assign_rhs2 (stmt);
5481 gimple *s;
5482 tree t;
5483 tree partial = NULL_TREE;
5484 bool is_and = (innercode == BIT_AND_EXPR);
5485
5486 /* Check for boolean identities that don't require recursive examination
5487 of inner1/inner2:
5488 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5489 inner1 AND (inner1 OR inner2) => inner1
5490 !inner1 AND (inner1 AND inner2) => false
5491 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5492 Likewise for similar cases involving inner2. */
5493 if (inner1 == true_test_var)
5494 return (is_and ? var : inner1);
5495 else if (inner2 == true_test_var)
5496 return (is_and ? var : inner2);
5497 else if (inner1 == false_test_var)
5498 return (is_and
5499 ? boolean_false_node
5500 : and_var_with_comparison (type, inner2, false, code2, op2a,
5501 op2b));
5502 else if (inner2 == false_test_var)
5503 return (is_and
5504 ? boolean_false_node
5505 : and_var_with_comparison (type, inner1, false, code2, op2a,
5506 op2b));
5507
5508 /* Next, redistribute/reassociate the AND across the inner tests.
5509 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5510 if (TREE_CODE (inner1) == SSA_NAME
5511 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5512 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5513 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5514 gimple_assign_rhs1 (s),
5515 gimple_assign_rhs2 (s),
5516 code2, op2a, op2b)))
5517 {
5518 /* Handle the AND case, where we are reassociating:
5519 (inner1 AND inner2) AND (op2a code2 op2b)
5520 => (t AND inner2)
5521 If the partial result t is a constant, we win. Otherwise
5522 continue on to try reassociating with the other inner test. */
5523 if (is_and)
5524 {
5525 if (integer_onep (t))
5526 return inner2;
5527 else if (integer_zerop (t))
5528 return boolean_false_node;
5529 }
5530
5531 /* Handle the OR case, where we are redistributing:
5532 (inner1 OR inner2) AND (op2a code2 op2b)
5533 => (t OR (inner2 AND (op2a code2 op2b))) */
5534 else if (integer_onep (t))
5535 return boolean_true_node;
5536
5537 /* Save partial result for later. */
5538 partial = t;
5539 }
5540
5541 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5542 if (TREE_CODE (inner2) == SSA_NAME
5543 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5544 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5545 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
5546 gimple_assign_rhs1 (s),
5547 gimple_assign_rhs2 (s),
5548 code2, op2a, op2b)))
5549 {
5550 /* Handle the AND case, where we are reassociating:
5551 (inner1 AND inner2) AND (op2a code2 op2b)
5552 => (inner1 AND t) */
5553 if (is_and)
5554 {
5555 if (integer_onep (t))
5556 return inner1;
5557 else if (integer_zerop (t))
5558 return boolean_false_node;
5559 /* If both are the same, we can apply the identity
5560 (x AND x) == x. */
5561 else if (partial && same_bool_result_p (t, partial))
5562 return t;
5563 }
5564
5565 /* Handle the OR case. where we are redistributing:
5566 (inner1 OR inner2) AND (op2a code2 op2b)
5567 => (t OR (inner1 AND (op2a code2 op2b)))
5568 => (t OR partial) */
5569 else
5570 {
5571 if (integer_onep (t))
5572 return boolean_true_node;
5573 else if (partial)
5574 {
5575 /* We already got a simplification for the other
5576 operand to the redistributed OR expression. The
5577 interesting case is when at least one is false.
5578 Or, if both are the same, we can apply the identity
5579 (x OR x) == x. */
5580 if (integer_zerop (partial))
5581 return t;
5582 else if (integer_zerop (t))
5583 return partial;
5584 else if (same_bool_result_p (t, partial))
5585 return t;
5586 }
5587 }
5588 }
5589 }
5590 return NULL_TREE;
5591 }
5592
5593 /* Try to simplify the AND of two comparisons defined by
5594 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5595 If this can be done without constructing an intermediate value,
5596 return the resulting tree; otherwise NULL_TREE is returned.
5597 This function is deliberately asymmetric as it recurses on SSA_DEFs
5598 in the first comparison but not the second. */
5599
5600 static tree
5601 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
5602 enum tree_code code2, tree op2a, tree op2b)
5603 {
5604 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5605
5606 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5607 if (operand_equal_p (op1a, op2a, 0)
5608 && operand_equal_p (op1b, op2b, 0))
5609 {
5610 /* Result will be either NULL_TREE, or a combined comparison. */
5611 tree t = combine_comparisons (UNKNOWN_LOCATION,
5612 TRUTH_ANDIF_EXPR, code1, code2,
5613 truth_type, op1a, op1b);
5614 if (t)
5615 return t;
5616 }
5617
5618 /* Likewise the swapped case of the above. */
5619 if (operand_equal_p (op1a, op2b, 0)
5620 && operand_equal_p (op1b, op2a, 0))
5621 {
5622 /* Result will be either NULL_TREE, or a combined comparison. */
5623 tree t = combine_comparisons (UNKNOWN_LOCATION,
5624 TRUTH_ANDIF_EXPR, code1,
5625 swap_tree_comparison (code2),
5626 truth_type, op1a, op1b);
5627 if (t)
5628 return t;
5629 }
5630
5631 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5632 NAME's definition is a truth value. See if there are any simplifications
5633 that can be done against the NAME's definition. */
5634 if (TREE_CODE (op1a) == SSA_NAME
5635 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5636 && (integer_zerop (op1b) || integer_onep (op1b)))
5637 {
5638 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5639 || (code1 == NE_EXPR && integer_onep (op1b)));
5640 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5641 switch (gimple_code (stmt))
5642 {
5643 case GIMPLE_ASSIGN:
5644 /* Try to simplify by copy-propagating the definition. */
5645 return and_var_with_comparison (type, op1a, invert, code2, op2a,
5646 op2b);
5647
5648 case GIMPLE_PHI:
5649 /* If every argument to the PHI produces the same result when
5650 ANDed with the second comparison, we win.
5651 Do not do this unless the type is bool since we need a bool
5652 result here anyway. */
5653 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5654 {
5655 tree result = NULL_TREE;
5656 unsigned i;
5657 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5658 {
5659 tree arg = gimple_phi_arg_def (stmt, i);
5660
5661 /* If this PHI has itself as an argument, ignore it.
5662 If all the other args produce the same result,
5663 we're still OK. */
5664 if (arg == gimple_phi_result (stmt))
5665 continue;
5666 else if (TREE_CODE (arg) == INTEGER_CST)
5667 {
5668 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5669 {
5670 if (!result)
5671 result = boolean_false_node;
5672 else if (!integer_zerop (result))
5673 return NULL_TREE;
5674 }
5675 else if (!result)
5676 result = fold_build2 (code2, boolean_type_node,
5677 op2a, op2b);
5678 else if (!same_bool_comparison_p (result,
5679 code2, op2a, op2b))
5680 return NULL_TREE;
5681 }
5682 else if (TREE_CODE (arg) == SSA_NAME
5683 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5684 {
5685 tree temp;
5686 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5687 /* In simple cases we can look through PHI nodes,
5688 but we have to be careful with loops.
5689 See PR49073. */
5690 if (! dom_info_available_p (CDI_DOMINATORS)
5691 || gimple_bb (def_stmt) == gimple_bb (stmt)
5692 || dominated_by_p (CDI_DOMINATORS,
5693 gimple_bb (def_stmt),
5694 gimple_bb (stmt)))
5695 return NULL_TREE;
5696 temp = and_var_with_comparison (type, arg, invert, code2,
5697 op2a, op2b);
5698 if (!temp)
5699 return NULL_TREE;
5700 else if (!result)
5701 result = temp;
5702 else if (!same_bool_result_p (result, temp))
5703 return NULL_TREE;
5704 }
5705 else
5706 return NULL_TREE;
5707 }
5708 return result;
5709 }
5710
5711 default:
5712 break;
5713 }
5714 }
5715 return NULL_TREE;
5716 }
5717
5718 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5719 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5720 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5721 simplify this to a single expression. As we are going to lower the cost
5722 of building SSA names / gimple stmts significantly, we need to allocate
5723 them ont the stack. This will cause the code to be a bit ugly. */
5724
5725 static tree
5726 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
5727 enum tree_code code1,
5728 tree op1a, tree op1b,
5729 enum tree_code code2, tree op2a,
5730 tree op2b)
5731 {
5732 /* Allocate gimple stmt1 on the stack. */
5733 gassign *stmt1
5734 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5735 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
5736 gimple_assign_set_rhs_code (stmt1, code1);
5737 gimple_assign_set_rhs1 (stmt1, op1a);
5738 gimple_assign_set_rhs2 (stmt1, op1b);
5739
5740 /* Allocate gimple stmt2 on the stack. */
5741 gassign *stmt2
5742 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
5743 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
5744 gimple_assign_set_rhs_code (stmt2, code2);
5745 gimple_assign_set_rhs1 (stmt2, op2a);
5746 gimple_assign_set_rhs2 (stmt2, op2b);
5747
5748 /* Allocate SSA names(lhs1) on the stack. */
5749 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
5750 memset (lhs1, 0, sizeof (tree_ssa_name));
5751 TREE_SET_CODE (lhs1, SSA_NAME);
5752 TREE_TYPE (lhs1) = type;
5753 init_ssa_name_imm_use (lhs1);
5754
5755 /* Allocate SSA names(lhs2) on the stack. */
5756 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
5757 memset (lhs2, 0, sizeof (tree_ssa_name));
5758 TREE_SET_CODE (lhs2, SSA_NAME);
5759 TREE_TYPE (lhs2) = type;
5760 init_ssa_name_imm_use (lhs2);
5761
5762 gimple_assign_set_lhs (stmt1, lhs1);
5763 gimple_assign_set_lhs (stmt2, lhs2);
5764
5765 gimple_match_op op (gimple_match_cond::UNCOND, code,
5766 type, gimple_assign_lhs (stmt1),
5767 gimple_assign_lhs (stmt2));
5768 if (op.resimplify (NULL, follow_all_ssa_edges))
5769 {
5770 if (gimple_simplified_result_is_gimple_val (&op))
5771 {
5772 tree res = op.ops[0];
5773 if (res == lhs1)
5774 return build2 (code1, type, op1a, op1b);
5775 else if (res == lhs2)
5776 return build2 (code2, type, op2a, op2b);
5777 else
5778 return res;
5779 }
5780 else if (op.code.is_tree_code ()
5781 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
5782 {
5783 tree op0 = op.ops[0];
5784 tree op1 = op.ops[1];
5785 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
5786 return NULL_TREE; /* not simple */
5787
5788 return build2 ((enum tree_code)op.code, op.type, op0, op1);
5789 }
5790 }
5791
5792 return NULL_TREE;
5793 }
5794
5795 /* Try to simplify the AND of two comparisons, specified by
5796 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5797 If this can be simplified to a single expression (without requiring
5798 introducing more SSA variables to hold intermediate values),
5799 return the resulting tree. Otherwise return NULL_TREE.
5800 If the result expression is non-null, it has boolean type. */
5801
5802 tree
5803 maybe_fold_and_comparisons (tree type,
5804 enum tree_code code1, tree op1a, tree op1b,
5805 enum tree_code code2, tree op2a, tree op2b)
5806 {
5807 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
5808 return t;
5809
5810 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
5811 return t;
5812
5813 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
5814 op1a, op1b, code2, op2a,
5815 op2b))
5816 return t;
5817
5818 return NULL_TREE;
5819 }
5820
5821 /* Helper function for or_comparisons_1: try to simplify the OR of the
5822 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5823 If INVERT is true, invert the value of VAR before doing the OR.
5824 Return NULL_EXPR if we can't simplify this to a single expression. */
5825
5826 static tree
5827 or_var_with_comparison (tree type, tree var, bool invert,
5828 enum tree_code code2, tree op2a, tree op2b)
5829 {
5830 tree t;
5831 gimple *stmt = SSA_NAME_DEF_STMT (var);
5832
5833 /* We can only deal with variables whose definitions are assignments. */
5834 if (!is_gimple_assign (stmt))
5835 return NULL_TREE;
5836
5837 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5838 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5839 Then we only have to consider the simpler non-inverted cases. */
5840 if (invert)
5841 t = and_var_with_comparison_1 (type, stmt,
5842 invert_tree_comparison (code2, false),
5843 op2a, op2b);
5844 else
5845 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
5846 return canonicalize_bool (t, invert);
5847 }
5848
5849 /* Try to simplify the OR of the ssa variable defined by the assignment
5850 STMT with the comparison specified by (OP2A CODE2 OP2B).
5851 Return NULL_EXPR if we can't simplify this to a single expression. */
5852
5853 static tree
5854 or_var_with_comparison_1 (tree type, gimple *stmt,
5855 enum tree_code code2, tree op2a, tree op2b)
5856 {
5857 tree var = gimple_assign_lhs (stmt);
5858 tree true_test_var = NULL_TREE;
5859 tree false_test_var = NULL_TREE;
5860 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5861
5862 /* Check for identities like (var OR (var != 0)) => true . */
5863 if (TREE_CODE (op2a) == SSA_NAME
5864 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5865 {
5866 if ((code2 == NE_EXPR && integer_zerop (op2b))
5867 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5868 {
5869 true_test_var = op2a;
5870 if (var == true_test_var)
5871 return var;
5872 }
5873 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5874 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5875 {
5876 false_test_var = op2a;
5877 if (var == false_test_var)
5878 return boolean_true_node;
5879 }
5880 }
5881
5882 /* If the definition is a comparison, recurse on it. */
5883 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5884 {
5885 tree t = or_comparisons_1 (type, innercode,
5886 gimple_assign_rhs1 (stmt),
5887 gimple_assign_rhs2 (stmt),
5888 code2,
5889 op2a,
5890 op2b);
5891 if (t)
5892 return t;
5893 }
5894
5895 /* If the definition is an AND or OR expression, we may be able to
5896 simplify by reassociating. */
5897 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5898 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5899 {
5900 tree inner1 = gimple_assign_rhs1 (stmt);
5901 tree inner2 = gimple_assign_rhs2 (stmt);
5902 gimple *s;
5903 tree t;
5904 tree partial = NULL_TREE;
5905 bool is_or = (innercode == BIT_IOR_EXPR);
5906
5907 /* Check for boolean identities that don't require recursive examination
5908 of inner1/inner2:
5909 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5910 inner1 OR (inner1 AND inner2) => inner1
5911 !inner1 OR (inner1 OR inner2) => true
5912 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5913 */
5914 if (inner1 == true_test_var)
5915 return (is_or ? var : inner1);
5916 else if (inner2 == true_test_var)
5917 return (is_or ? var : inner2);
5918 else if (inner1 == false_test_var)
5919 return (is_or
5920 ? boolean_true_node
5921 : or_var_with_comparison (type, inner2, false, code2, op2a,
5922 op2b));
5923 else if (inner2 == false_test_var)
5924 return (is_or
5925 ? boolean_true_node
5926 : or_var_with_comparison (type, inner1, false, code2, op2a,
5927 op2b));
5928
5929 /* Next, redistribute/reassociate the OR across the inner tests.
5930 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5931 if (TREE_CODE (inner1) == SSA_NAME
5932 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5933 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5934 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
5935 gimple_assign_rhs1 (s),
5936 gimple_assign_rhs2 (s),
5937 code2, op2a, op2b)))
5938 {
5939 /* Handle the OR case, where we are reassociating:
5940 (inner1 OR inner2) OR (op2a code2 op2b)
5941 => (t OR inner2)
5942 If the partial result t is a constant, we win. Otherwise
5943 continue on to try reassociating with the other inner test. */
5944 if (is_or)
5945 {
5946 if (integer_onep (t))
5947 return boolean_true_node;
5948 else if (integer_zerop (t))
5949 return inner2;
5950 }
5951
5952 /* Handle the AND case, where we are redistributing:
5953 (inner1 AND inner2) OR (op2a code2 op2b)
5954 => (t AND (inner2 OR (op2a code op2b))) */
5955 else if (integer_zerop (t))
5956 return boolean_false_node;
5957
5958 /* Save partial result for later. */
5959 partial = t;
5960 }
5961
5962 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5963 if (TREE_CODE (inner2) == SSA_NAME
5964 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5965 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5966 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
5967 gimple_assign_rhs1 (s),
5968 gimple_assign_rhs2 (s),
5969 code2, op2a, op2b)))
5970 {
5971 /* Handle the OR case, where we are reassociating:
5972 (inner1 OR inner2) OR (op2a code2 op2b)
5973 => (inner1 OR t)
5974 => (t OR partial) */
5975 if (is_or)
5976 {
5977 if (integer_zerop (t))
5978 return inner1;
5979 else if (integer_onep (t))
5980 return boolean_true_node;
5981 /* If both are the same, we can apply the identity
5982 (x OR x) == x. */
5983 else if (partial && same_bool_result_p (t, partial))
5984 return t;
5985 }
5986
5987 /* Handle the AND case, where we are redistributing:
5988 (inner1 AND inner2) OR (op2a code2 op2b)
5989 => (t AND (inner1 OR (op2a code2 op2b)))
5990 => (t AND partial) */
5991 else
5992 {
5993 if (integer_zerop (t))
5994 return boolean_false_node;
5995 else if (partial)
5996 {
5997 /* We already got a simplification for the other
5998 operand to the redistributed AND expression. The
5999 interesting case is when at least one is true.
6000 Or, if both are the same, we can apply the identity
6001 (x AND x) == x. */
6002 if (integer_onep (partial))
6003 return t;
6004 else if (integer_onep (t))
6005 return partial;
6006 else if (same_bool_result_p (t, partial))
6007 return t;
6008 }
6009 }
6010 }
6011 }
6012 return NULL_TREE;
6013 }
6014
6015 /* Try to simplify the OR of two comparisons defined by
6016 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6017 If this can be done without constructing an intermediate value,
6018 return the resulting tree; otherwise NULL_TREE is returned.
6019 This function is deliberately asymmetric as it recurses on SSA_DEFs
6020 in the first comparison but not the second. */
6021
6022 static tree
6023 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6024 enum tree_code code2, tree op2a, tree op2b)
6025 {
6026 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6027
6028 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6029 if (operand_equal_p (op1a, op2a, 0)
6030 && operand_equal_p (op1b, op2b, 0))
6031 {
6032 /* Result will be either NULL_TREE, or a combined comparison. */
6033 tree t = combine_comparisons (UNKNOWN_LOCATION,
6034 TRUTH_ORIF_EXPR, code1, code2,
6035 truth_type, op1a, op1b);
6036 if (t)
6037 return t;
6038 }
6039
6040 /* Likewise the swapped case of the above. */
6041 if (operand_equal_p (op1a, op2b, 0)
6042 && operand_equal_p (op1b, op2a, 0))
6043 {
6044 /* Result will be either NULL_TREE, or a combined comparison. */
6045 tree t = combine_comparisons (UNKNOWN_LOCATION,
6046 TRUTH_ORIF_EXPR, code1,
6047 swap_tree_comparison (code2),
6048 truth_type, op1a, op1b);
6049 if (t)
6050 return t;
6051 }
6052
6053 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6054 NAME's definition is a truth value. See if there are any simplifications
6055 that can be done against the NAME's definition. */
6056 if (TREE_CODE (op1a) == SSA_NAME
6057 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6058 && (integer_zerop (op1b) || integer_onep (op1b)))
6059 {
6060 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6061 || (code1 == NE_EXPR && integer_onep (op1b)));
6062 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6063 switch (gimple_code (stmt))
6064 {
6065 case GIMPLE_ASSIGN:
6066 /* Try to simplify by copy-propagating the definition. */
6067 return or_var_with_comparison (type, op1a, invert, code2, op2a,
6068 op2b);
6069
6070 case GIMPLE_PHI:
6071 /* If every argument to the PHI produces the same result when
6072 ORed with the second comparison, we win.
6073 Do not do this unless the type is bool since we need a bool
6074 result here anyway. */
6075 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6076 {
6077 tree result = NULL_TREE;
6078 unsigned i;
6079 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6080 {
6081 tree arg = gimple_phi_arg_def (stmt, i);
6082
6083 /* If this PHI has itself as an argument, ignore it.
6084 If all the other args produce the same result,
6085 we're still OK. */
6086 if (arg == gimple_phi_result (stmt))
6087 continue;
6088 else if (TREE_CODE (arg) == INTEGER_CST)
6089 {
6090 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6091 {
6092 if (!result)
6093 result = boolean_true_node;
6094 else if (!integer_onep (result))
6095 return NULL_TREE;
6096 }
6097 else if (!result)
6098 result = fold_build2 (code2, boolean_type_node,
6099 op2a, op2b);
6100 else if (!same_bool_comparison_p (result,
6101 code2, op2a, op2b))
6102 return NULL_TREE;
6103 }
6104 else if (TREE_CODE (arg) == SSA_NAME
6105 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6106 {
6107 tree temp;
6108 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6109 /* In simple cases we can look through PHI nodes,
6110 but we have to be careful with loops.
6111 See PR49073. */
6112 if (! dom_info_available_p (CDI_DOMINATORS)
6113 || gimple_bb (def_stmt) == gimple_bb (stmt)
6114 || dominated_by_p (CDI_DOMINATORS,
6115 gimple_bb (def_stmt),
6116 gimple_bb (stmt)))
6117 return NULL_TREE;
6118 temp = or_var_with_comparison (type, arg, invert, code2,
6119 op2a, op2b);
6120 if (!temp)
6121 return NULL_TREE;
6122 else if (!result)
6123 result = temp;
6124 else if (!same_bool_result_p (result, temp))
6125 return NULL_TREE;
6126 }
6127 else
6128 return NULL_TREE;
6129 }
6130 return result;
6131 }
6132
6133 default:
6134 break;
6135 }
6136 }
6137 return NULL_TREE;
6138 }
6139
6140 /* Try to simplify the OR of two comparisons, specified by
6141 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6142 If this can be simplified to a single expression (without requiring
6143 introducing more SSA variables to hold intermediate values),
6144 return the resulting tree. Otherwise return NULL_TREE.
6145 If the result expression is non-null, it has boolean type. */
6146
6147 tree
6148 maybe_fold_or_comparisons (tree type,
6149 enum tree_code code1, tree op1a, tree op1b,
6150 enum tree_code code2, tree op2a, tree op2b)
6151 {
6152 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6153 return t;
6154
6155 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6156 return t;
6157
6158 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
6159 op1a, op1b, code2, op2a,
6160 op2b))
6161 return t;
6162
6163 return NULL_TREE;
6164 }
6165
6166 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6167
6168 Either NULL_TREE, a simplified but non-constant or a constant
6169 is returned.
6170
6171 ??? This should go into a gimple-fold-inline.h file to be eventually
6172 privatized with the single valueize function used in the various TUs
6173 to avoid the indirect function call overhead. */
6174
6175 tree
6176 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6177 tree (*gvalueize) (tree))
6178 {
6179 gimple_match_op res_op;
6180 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6181 edges if there are intermediate VARYING defs. For this reason
6182 do not follow SSA edges here even though SCCVN can technically
6183 just deal fine with that. */
6184 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6185 {
6186 tree res = NULL_TREE;
6187 if (gimple_simplified_result_is_gimple_val (&res_op))
6188 res = res_op.ops[0];
6189 else if (mprts_hook)
6190 res = mprts_hook (&res_op);
6191 if (res)
6192 {
6193 if (dump_file && dump_flags & TDF_DETAILS)
6194 {
6195 fprintf (dump_file, "Match-and-simplified ");
6196 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6197 fprintf (dump_file, " to ");
6198 print_generic_expr (dump_file, res);
6199 fprintf (dump_file, "\n");
6200 }
6201 return res;
6202 }
6203 }
6204
6205 location_t loc = gimple_location (stmt);
6206 switch (gimple_code (stmt))
6207 {
6208 case GIMPLE_ASSIGN:
6209 {
6210 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6211
6212 switch (get_gimple_rhs_class (subcode))
6213 {
6214 case GIMPLE_SINGLE_RHS:
6215 {
6216 tree rhs = gimple_assign_rhs1 (stmt);
6217 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6218
6219 if (TREE_CODE (rhs) == SSA_NAME)
6220 {
6221 /* If the RHS is an SSA_NAME, return its known constant value,
6222 if any. */
6223 return (*valueize) (rhs);
6224 }
6225 /* Handle propagating invariant addresses into address
6226 operations. */
6227 else if (TREE_CODE (rhs) == ADDR_EXPR
6228 && !is_gimple_min_invariant (rhs))
6229 {
6230 poly_int64 offset = 0;
6231 tree base;
6232 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6233 &offset,
6234 valueize);
6235 if (base
6236 && (CONSTANT_CLASS_P (base)
6237 || decl_address_invariant_p (base)))
6238 return build_invariant_address (TREE_TYPE (rhs),
6239 base, offset);
6240 }
6241 else if (TREE_CODE (rhs) == CONSTRUCTOR
6242 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6243 && known_eq (CONSTRUCTOR_NELTS (rhs),
6244 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6245 {
6246 unsigned i, nelts;
6247 tree val;
6248
6249 nelts = CONSTRUCTOR_NELTS (rhs);
6250 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6251 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6252 {
6253 val = (*valueize) (val);
6254 if (TREE_CODE (val) == INTEGER_CST
6255 || TREE_CODE (val) == REAL_CST
6256 || TREE_CODE (val) == FIXED_CST)
6257 vec.quick_push (val);
6258 else
6259 return NULL_TREE;
6260 }
6261
6262 return vec.build ();
6263 }
6264 if (subcode == OBJ_TYPE_REF)
6265 {
6266 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6267 /* If callee is constant, we can fold away the wrapper. */
6268 if (is_gimple_min_invariant (val))
6269 return val;
6270 }
6271
6272 if (kind == tcc_reference)
6273 {
6274 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6275 || TREE_CODE (rhs) == REALPART_EXPR
6276 || TREE_CODE (rhs) == IMAGPART_EXPR)
6277 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6278 {
6279 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6280 return fold_unary_loc (EXPR_LOCATION (rhs),
6281 TREE_CODE (rhs),
6282 TREE_TYPE (rhs), val);
6283 }
6284 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6285 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6286 {
6287 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6288 return fold_ternary_loc (EXPR_LOCATION (rhs),
6289 TREE_CODE (rhs),
6290 TREE_TYPE (rhs), val,
6291 TREE_OPERAND (rhs, 1),
6292 TREE_OPERAND (rhs, 2));
6293 }
6294 else if (TREE_CODE (rhs) == MEM_REF
6295 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6296 {
6297 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6298 if (TREE_CODE (val) == ADDR_EXPR
6299 && is_gimple_min_invariant (val))
6300 {
6301 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6302 unshare_expr (val),
6303 TREE_OPERAND (rhs, 1));
6304 if (tem)
6305 rhs = tem;
6306 }
6307 }
6308 return fold_const_aggregate_ref_1 (rhs, valueize);
6309 }
6310 else if (kind == tcc_declaration)
6311 return get_symbol_constant_value (rhs);
6312 return rhs;
6313 }
6314
6315 case GIMPLE_UNARY_RHS:
6316 return NULL_TREE;
6317
6318 case GIMPLE_BINARY_RHS:
6319 /* Translate &x + CST into an invariant form suitable for
6320 further propagation. */
6321 if (subcode == POINTER_PLUS_EXPR)
6322 {
6323 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6324 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6325 if (TREE_CODE (op0) == ADDR_EXPR
6326 && TREE_CODE (op1) == INTEGER_CST)
6327 {
6328 tree off = fold_convert (ptr_type_node, op1);
6329 return build_fold_addr_expr_loc
6330 (loc,
6331 fold_build2 (MEM_REF,
6332 TREE_TYPE (TREE_TYPE (op0)),
6333 unshare_expr (op0), off));
6334 }
6335 }
6336 /* Canonicalize bool != 0 and bool == 0 appearing after
6337 valueization. While gimple_simplify handles this
6338 it can get confused by the ~X == 1 -> X == 0 transform
6339 which we cant reduce to a SSA name or a constant
6340 (and we have no way to tell gimple_simplify to not
6341 consider those transforms in the first place). */
6342 else if (subcode == EQ_EXPR
6343 || subcode == NE_EXPR)
6344 {
6345 tree lhs = gimple_assign_lhs (stmt);
6346 tree op0 = gimple_assign_rhs1 (stmt);
6347 if (useless_type_conversion_p (TREE_TYPE (lhs),
6348 TREE_TYPE (op0)))
6349 {
6350 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6351 op0 = (*valueize) (op0);
6352 if (TREE_CODE (op0) == INTEGER_CST)
6353 std::swap (op0, op1);
6354 if (TREE_CODE (op1) == INTEGER_CST
6355 && ((subcode == NE_EXPR && integer_zerop (op1))
6356 || (subcode == EQ_EXPR && integer_onep (op1))))
6357 return op0;
6358 }
6359 }
6360 return NULL_TREE;
6361
6362 case GIMPLE_TERNARY_RHS:
6363 {
6364 /* Handle ternary operators that can appear in GIMPLE form. */
6365 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6366 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6367 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6368 return fold_ternary_loc (loc, subcode,
6369 gimple_expr_type (stmt), op0, op1, op2);
6370 }
6371
6372 default:
6373 gcc_unreachable ();
6374 }
6375 }
6376
6377 case GIMPLE_CALL:
6378 {
6379 tree fn;
6380 gcall *call_stmt = as_a <gcall *> (stmt);
6381
6382 if (gimple_call_internal_p (stmt))
6383 {
6384 enum tree_code subcode = ERROR_MARK;
6385 switch (gimple_call_internal_fn (stmt))
6386 {
6387 case IFN_UBSAN_CHECK_ADD:
6388 subcode = PLUS_EXPR;
6389 break;
6390 case IFN_UBSAN_CHECK_SUB:
6391 subcode = MINUS_EXPR;
6392 break;
6393 case IFN_UBSAN_CHECK_MUL:
6394 subcode = MULT_EXPR;
6395 break;
6396 case IFN_BUILTIN_EXPECT:
6397 {
6398 tree arg0 = gimple_call_arg (stmt, 0);
6399 tree op0 = (*valueize) (arg0);
6400 if (TREE_CODE (op0) == INTEGER_CST)
6401 return op0;
6402 return NULL_TREE;
6403 }
6404 default:
6405 return NULL_TREE;
6406 }
6407 tree arg0 = gimple_call_arg (stmt, 0);
6408 tree arg1 = gimple_call_arg (stmt, 1);
6409 tree op0 = (*valueize) (arg0);
6410 tree op1 = (*valueize) (arg1);
6411
6412 if (TREE_CODE (op0) != INTEGER_CST
6413 || TREE_CODE (op1) != INTEGER_CST)
6414 {
6415 switch (subcode)
6416 {
6417 case MULT_EXPR:
6418 /* x * 0 = 0 * x = 0 without overflow. */
6419 if (integer_zerop (op0) || integer_zerop (op1))
6420 return build_zero_cst (TREE_TYPE (arg0));
6421 break;
6422 case MINUS_EXPR:
6423 /* y - y = 0 without overflow. */
6424 if (operand_equal_p (op0, op1, 0))
6425 return build_zero_cst (TREE_TYPE (arg0));
6426 break;
6427 default:
6428 break;
6429 }
6430 }
6431 tree res
6432 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6433 if (res
6434 && TREE_CODE (res) == INTEGER_CST
6435 && !TREE_OVERFLOW (res))
6436 return res;
6437 return NULL_TREE;
6438 }
6439
6440 fn = (*valueize) (gimple_call_fn (stmt));
6441 if (TREE_CODE (fn) == ADDR_EXPR
6442 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6443 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6444 && gimple_builtin_call_types_compatible_p (stmt,
6445 TREE_OPERAND (fn, 0)))
6446 {
6447 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6448 tree retval;
6449 unsigned i;
6450 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6451 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6452 retval = fold_builtin_call_array (loc,
6453 gimple_call_return_type (call_stmt),
6454 fn, gimple_call_num_args (stmt), args);
6455 if (retval)
6456 {
6457 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6458 STRIP_NOPS (retval);
6459 retval = fold_convert (gimple_call_return_type (call_stmt),
6460 retval);
6461 }
6462 return retval;
6463 }
6464 return NULL_TREE;
6465 }
6466
6467 default:
6468 return NULL_TREE;
6469 }
6470 }
6471
6472 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6473 Returns NULL_TREE if folding to a constant is not possible, otherwise
6474 returns a constant according to is_gimple_min_invariant. */
6475
6476 tree
6477 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6478 {
6479 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6480 if (res && is_gimple_min_invariant (res))
6481 return res;
6482 return NULL_TREE;
6483 }
6484
6485
6486 /* The following set of functions are supposed to fold references using
6487 their constant initializers. */
6488
6489 /* See if we can find constructor defining value of BASE.
6490 When we know the consructor with constant offset (such as
6491 base is array[40] and we do know constructor of array), then
6492 BIT_OFFSET is adjusted accordingly.
6493
6494 As a special case, return error_mark_node when constructor
6495 is not explicitly available, but it is known to be zero
6496 such as 'static const int a;'. */
6497 static tree
6498 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6499 tree (*valueize)(tree))
6500 {
6501 poly_int64 bit_offset2, size, max_size;
6502 bool reverse;
6503
6504 if (TREE_CODE (base) == MEM_REF)
6505 {
6506 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6507 if (!boff.to_shwi (bit_offset))
6508 return NULL_TREE;
6509
6510 if (valueize
6511 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6512 base = valueize (TREE_OPERAND (base, 0));
6513 if (!base || TREE_CODE (base) != ADDR_EXPR)
6514 return NULL_TREE;
6515 base = TREE_OPERAND (base, 0);
6516 }
6517 else if (valueize
6518 && TREE_CODE (base) == SSA_NAME)
6519 base = valueize (base);
6520
6521 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6522 DECL_INITIAL. If BASE is a nested reference into another
6523 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6524 the inner reference. */
6525 switch (TREE_CODE (base))
6526 {
6527 case VAR_DECL:
6528 case CONST_DECL:
6529 {
6530 tree init = ctor_for_folding (base);
6531
6532 /* Our semantic is exact opposite of ctor_for_folding;
6533 NULL means unknown, while error_mark_node is 0. */
6534 if (init == error_mark_node)
6535 return NULL_TREE;
6536 if (!init)
6537 return error_mark_node;
6538 return init;
6539 }
6540
6541 case VIEW_CONVERT_EXPR:
6542 return get_base_constructor (TREE_OPERAND (base, 0),
6543 bit_offset, valueize);
6544
6545 case ARRAY_REF:
6546 case COMPONENT_REF:
6547 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6548 &reverse);
6549 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6550 return NULL_TREE;
6551 *bit_offset += bit_offset2;
6552 return get_base_constructor (base, bit_offset, valueize);
6553
6554 case CONSTRUCTOR:
6555 return base;
6556
6557 default:
6558 if (CONSTANT_CLASS_P (base))
6559 return base;
6560
6561 return NULL_TREE;
6562 }
6563 }
6564
6565 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6566 to the memory at bit OFFSET. When non-null, TYPE is the expected
6567 type of the reference; otherwise the type of the referenced element
6568 is used instead. When SIZE is zero, attempt to fold a reference to
6569 the entire element which OFFSET refers to. Increment *SUBOFF by
6570 the bit offset of the accessed element. */
6571
6572 static tree
6573 fold_array_ctor_reference (tree type, tree ctor,
6574 unsigned HOST_WIDE_INT offset,
6575 unsigned HOST_WIDE_INT size,
6576 tree from_decl,
6577 unsigned HOST_WIDE_INT *suboff)
6578 {
6579 offset_int low_bound;
6580 offset_int elt_size;
6581 offset_int access_index;
6582 tree domain_type = NULL_TREE;
6583 HOST_WIDE_INT inner_offset;
6584
6585 /* Compute low bound and elt size. */
6586 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6587 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6588 if (domain_type && TYPE_MIN_VALUE (domain_type))
6589 {
6590 /* Static constructors for variably sized objects make no sense. */
6591 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6592 return NULL_TREE;
6593 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6594 }
6595 else
6596 low_bound = 0;
6597 /* Static constructors for variably sized objects make no sense. */
6598 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6599 return NULL_TREE;
6600 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6601
6602 /* When TYPE is non-null, verify that it specifies a constant-sized
6603 access of a multiple of the array element size. Avoid division
6604 by zero below when ELT_SIZE is zero, such as with the result of
6605 an initializer for a zero-length array or an empty struct. */
6606 if (elt_size == 0
6607 || (type
6608 && (!TYPE_SIZE_UNIT (type)
6609 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
6610 return NULL_TREE;
6611
6612 /* Compute the array index we look for. */
6613 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6614 elt_size);
6615 access_index += low_bound;
6616
6617 /* And offset within the access. */
6618 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6619
6620 if (size > elt_size.to_uhwi () * BITS_PER_UNIT)
6621 {
6622 /* native_encode_expr constraints. */
6623 if (size > MAX_BITSIZE_MODE_ANY_MODE
6624 || size % BITS_PER_UNIT != 0
6625 || inner_offset % BITS_PER_UNIT != 0)
6626 return NULL_TREE;
6627
6628 unsigned ctor_idx;
6629 tree val = get_array_ctor_element_at_index (ctor, access_index,
6630 &ctor_idx);
6631 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6632 return build_zero_cst (type);
6633
6634 /* native-encode adjacent ctor elements. */
6635 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6636 unsigned bufoff = 0;
6637 offset_int index = 0;
6638 offset_int max_index = access_index;
6639 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6640 if (!val)
6641 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6642 else if (!CONSTANT_CLASS_P (val))
6643 return NULL_TREE;
6644 if (!elt->index)
6645 ;
6646 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6647 {
6648 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6649 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6650 }
6651 else
6652 index = max_index = wi::to_offset (elt->index);
6653 index = wi::umax (index, access_index);
6654 do
6655 {
6656 int len = native_encode_expr (val, buf + bufoff,
6657 elt_size.to_uhwi (),
6658 inner_offset / BITS_PER_UNIT);
6659 if (len != elt_size - inner_offset / BITS_PER_UNIT)
6660 return NULL_TREE;
6661 inner_offset = 0;
6662 bufoff += len;
6663
6664 access_index += 1;
6665 if (wi::cmpu (access_index, index) == 0)
6666 val = elt->value;
6667 else if (wi::cmpu (access_index, max_index) > 0)
6668 {
6669 ctor_idx++;
6670 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
6671 {
6672 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6673 ++max_index;
6674 }
6675 else
6676 {
6677 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
6678 index = 0;
6679 max_index = access_index;
6680 if (!elt->index)
6681 ;
6682 else if (TREE_CODE (elt->index) == RANGE_EXPR)
6683 {
6684 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
6685 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
6686 }
6687 else
6688 index = max_index = wi::to_offset (elt->index);
6689 index = wi::umax (index, access_index);
6690 if (wi::cmpu (access_index, index) == 0)
6691 val = elt->value;
6692 else
6693 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
6694 }
6695 }
6696 }
6697 while (bufoff < size / BITS_PER_UNIT);
6698 *suboff += size;
6699 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
6700 }
6701
6702 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6703 {
6704 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6705 {
6706 /* For the final reference to the entire accessed element
6707 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6708 may be null) in favor of the type of the element, and set
6709 SIZE to the size of the accessed element. */
6710 inner_offset = 0;
6711 type = TREE_TYPE (val);
6712 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6713 }
6714
6715 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6716 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6717 suboff);
6718 }
6719
6720 /* Memory not explicitly mentioned in constructor is 0 (or
6721 the reference is out of range). */
6722 return type ? build_zero_cst (type) : NULL_TREE;
6723 }
6724
6725 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6726 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6727 is the expected type of the reference; otherwise the type of
6728 the referenced member is used instead. When SIZE is zero,
6729 attempt to fold a reference to the entire member which OFFSET
6730 refers to; in this case. Increment *SUBOFF by the bit offset
6731 of the accessed member. */
6732
6733 static tree
6734 fold_nonarray_ctor_reference (tree type, tree ctor,
6735 unsigned HOST_WIDE_INT offset,
6736 unsigned HOST_WIDE_INT size,
6737 tree from_decl,
6738 unsigned HOST_WIDE_INT *suboff)
6739 {
6740 unsigned HOST_WIDE_INT cnt;
6741 tree cfield, cval;
6742
6743 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6744 cval)
6745 {
6746 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6747 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6748 tree field_size = DECL_SIZE (cfield);
6749
6750 if (!field_size)
6751 {
6752 /* Determine the size of the flexible array member from
6753 the size of the initializer provided for it. */
6754 field_size = TYPE_SIZE (TREE_TYPE (cval));
6755 }
6756
6757 /* Variable sized objects in static constructors makes no sense,
6758 but field_size can be NULL for flexible array members. */
6759 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6760 && TREE_CODE (byte_offset) == INTEGER_CST
6761 && (field_size != NULL_TREE
6762 ? TREE_CODE (field_size) == INTEGER_CST
6763 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6764
6765 /* Compute bit offset of the field. */
6766 offset_int bitoffset
6767 = (wi::to_offset (field_offset)
6768 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6769 /* Compute bit offset where the field ends. */
6770 offset_int bitoffset_end;
6771 if (field_size != NULL_TREE)
6772 bitoffset_end = bitoffset + wi::to_offset (field_size);
6773 else
6774 bitoffset_end = 0;
6775
6776 /* Compute the bit offset of the end of the desired access.
6777 As a special case, if the size of the desired access is
6778 zero, assume the access is to the entire field (and let
6779 the caller make any necessary adjustments by storing
6780 the actual bounds of the field in FIELDBOUNDS). */
6781 offset_int access_end = offset_int (offset);
6782 if (size)
6783 access_end += size;
6784 else
6785 access_end = bitoffset_end;
6786
6787 /* Is there any overlap between the desired access at
6788 [OFFSET, OFFSET+SIZE) and the offset of the field within
6789 the object at [BITOFFSET, BITOFFSET_END)? */
6790 if (wi::cmps (access_end, bitoffset) > 0
6791 && (field_size == NULL_TREE
6792 || wi::lts_p (offset, bitoffset_end)))
6793 {
6794 *suboff += bitoffset.to_uhwi ();
6795
6796 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6797 {
6798 /* For the final reference to the entire accessed member
6799 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6800 be null) in favor of the type of the member, and set
6801 SIZE to the size of the accessed member. */
6802 offset = bitoffset.to_uhwi ();
6803 type = TREE_TYPE (cval);
6804 size = (bitoffset_end - bitoffset).to_uhwi ();
6805 }
6806
6807 /* We do have overlap. Now see if the field is large enough
6808 to cover the access. Give up for accesses that extend
6809 beyond the end of the object or that span multiple fields. */
6810 if (wi::cmps (access_end, bitoffset_end) > 0)
6811 return NULL_TREE;
6812 if (offset < bitoffset)
6813 return NULL_TREE;
6814
6815 offset_int inner_offset = offset_int (offset) - bitoffset;
6816 return fold_ctor_reference (type, cval,
6817 inner_offset.to_uhwi (), size,
6818 from_decl, suboff);
6819 }
6820 }
6821
6822 if (!type)
6823 return NULL_TREE;
6824
6825 return build_zero_cst (type);
6826 }
6827
6828 /* CTOR is value initializing memory. Fold a reference of TYPE and
6829 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
6830 is zero, attempt to fold a reference to the entire subobject
6831 which OFFSET refers to. This is used when folding accesses to
6832 string members of aggregates. When non-null, set *SUBOFF to
6833 the bit offset of the accessed subobject. */
6834
6835 tree
6836 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6837 const poly_uint64 &poly_size, tree from_decl,
6838 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6839 {
6840 tree ret;
6841
6842 /* We found the field with exact match. */
6843 if (type
6844 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6845 && known_eq (poly_offset, 0U))
6846 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6847
6848 /* The remaining optimizations need a constant size and offset. */
6849 unsigned HOST_WIDE_INT size, offset;
6850 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6851 return NULL_TREE;
6852
6853 /* We are at the end of walk, see if we can view convert the
6854 result. */
6855 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6856 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6857 && !compare_tree_int (TYPE_SIZE (type), size)
6858 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6859 {
6860 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6861 if (ret)
6862 {
6863 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6864 if (ret)
6865 STRIP_USELESS_TYPE_CONVERSION (ret);
6866 }
6867 return ret;
6868 }
6869 /* For constants and byte-aligned/sized reads try to go through
6870 native_encode/interpret. */
6871 if (CONSTANT_CLASS_P (ctor)
6872 && BITS_PER_UNIT == 8
6873 && offset % BITS_PER_UNIT == 0
6874 && size % BITS_PER_UNIT == 0
6875 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6876 {
6877 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6878 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6879 offset / BITS_PER_UNIT);
6880 if (len > 0)
6881 return native_interpret_expr (type, buf, len);
6882 }
6883 if (TREE_CODE (ctor) == CONSTRUCTOR)
6884 {
6885 unsigned HOST_WIDE_INT dummy = 0;
6886 if (!suboff)
6887 suboff = &dummy;
6888
6889 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6890 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6891 return fold_array_ctor_reference (type, ctor, offset, size,
6892 from_decl, suboff);
6893
6894 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6895 from_decl, suboff);
6896 }
6897
6898 return NULL_TREE;
6899 }
6900
6901 /* Return the tree representing the element referenced by T if T is an
6902 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6903 names using VALUEIZE. Return NULL_TREE otherwise. */
6904
6905 tree
6906 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6907 {
6908 tree ctor, idx, base;
6909 poly_int64 offset, size, max_size;
6910 tree tem;
6911 bool reverse;
6912
6913 if (TREE_THIS_VOLATILE (t))
6914 return NULL_TREE;
6915
6916 if (DECL_P (t))
6917 return get_symbol_constant_value (t);
6918
6919 tem = fold_read_from_constant_string (t);
6920 if (tem)
6921 return tem;
6922
6923 switch (TREE_CODE (t))
6924 {
6925 case ARRAY_REF:
6926 case ARRAY_RANGE_REF:
6927 /* Constant indexes are handled well by get_base_constructor.
6928 Only special case variable offsets.
6929 FIXME: This code can't handle nested references with variable indexes
6930 (they will be handled only by iteration of ccp). Perhaps we can bring
6931 get_ref_base_and_extent here and make it use a valueize callback. */
6932 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6933 && valueize
6934 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6935 && poly_int_tree_p (idx))
6936 {
6937 tree low_bound, unit_size;
6938
6939 /* If the resulting bit-offset is constant, track it. */
6940 if ((low_bound = array_ref_low_bound (t),
6941 poly_int_tree_p (low_bound))
6942 && (unit_size = array_ref_element_size (t),
6943 tree_fits_uhwi_p (unit_size)))
6944 {
6945 poly_offset_int woffset
6946 = wi::sext (wi::to_poly_offset (idx)
6947 - wi::to_poly_offset (low_bound),
6948 TYPE_PRECISION (TREE_TYPE (idx)));
6949 woffset *= tree_to_uhwi (unit_size);
6950 woffset *= BITS_PER_UNIT;
6951 if (woffset.to_shwi (&offset))
6952 {
6953 base = TREE_OPERAND (t, 0);
6954 ctor = get_base_constructor (base, &offset, valueize);
6955 /* Empty constructor. Always fold to 0. */
6956 if (ctor == error_mark_node)
6957 return build_zero_cst (TREE_TYPE (t));
6958 /* Out of bound array access. Value is undefined,
6959 but don't fold. */
6960 if (maybe_lt (offset, 0))
6961 return NULL_TREE;
6962 /* We cannot determine ctor. */
6963 if (!ctor)
6964 return NULL_TREE;
6965 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6966 tree_to_uhwi (unit_size)
6967 * BITS_PER_UNIT,
6968 base);
6969 }
6970 }
6971 }
6972 /* Fallthru. */
6973
6974 case COMPONENT_REF:
6975 case BIT_FIELD_REF:
6976 case TARGET_MEM_REF:
6977 case MEM_REF:
6978 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6979 ctor = get_base_constructor (base, &offset, valueize);
6980
6981 /* Empty constructor. Always fold to 0. */
6982 if (ctor == error_mark_node)
6983 return build_zero_cst (TREE_TYPE (t));
6984 /* We do not know precise address. */
6985 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6986 return NULL_TREE;
6987 /* We cannot determine ctor. */
6988 if (!ctor)
6989 return NULL_TREE;
6990
6991 /* Out of bound array access. Value is undefined, but don't fold. */
6992 if (maybe_lt (offset, 0))
6993 return NULL_TREE;
6994
6995 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6996 base);
6997
6998 case REALPART_EXPR:
6999 case IMAGPART_EXPR:
7000 {
7001 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7002 if (c && TREE_CODE (c) == COMPLEX_CST)
7003 return fold_build1_loc (EXPR_LOCATION (t),
7004 TREE_CODE (t), TREE_TYPE (t), c);
7005 break;
7006 }
7007
7008 default:
7009 break;
7010 }
7011
7012 return NULL_TREE;
7013 }
7014
7015 tree
7016 fold_const_aggregate_ref (tree t)
7017 {
7018 return fold_const_aggregate_ref_1 (t, NULL);
7019 }
7020
7021 /* Lookup virtual method with index TOKEN in a virtual table V
7022 at OFFSET.
7023 Set CAN_REFER if non-NULL to false if method
7024 is not referable or if the virtual table is ill-formed (such as rewriten
7025 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7026
7027 tree
7028 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7029 tree v,
7030 unsigned HOST_WIDE_INT offset,
7031 bool *can_refer)
7032 {
7033 tree vtable = v, init, fn;
7034 unsigned HOST_WIDE_INT size;
7035 unsigned HOST_WIDE_INT elt_size, access_index;
7036 tree domain_type;
7037
7038 if (can_refer)
7039 *can_refer = true;
7040
7041 /* First of all double check we have virtual table. */
7042 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7043 {
7044 /* Pass down that we lost track of the target. */
7045 if (can_refer)
7046 *can_refer = false;
7047 return NULL_TREE;
7048 }
7049
7050 init = ctor_for_folding (v);
7051
7052 /* The virtual tables should always be born with constructors
7053 and we always should assume that they are avaialble for
7054 folding. At the moment we do not stream them in all cases,
7055 but it should never happen that ctor seem unreachable. */
7056 gcc_assert (init);
7057 if (init == error_mark_node)
7058 {
7059 /* Pass down that we lost track of the target. */
7060 if (can_refer)
7061 *can_refer = false;
7062 return NULL_TREE;
7063 }
7064 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7065 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7066 offset *= BITS_PER_UNIT;
7067 offset += token * size;
7068
7069 /* Lookup the value in the constructor that is assumed to be array.
7070 This is equivalent to
7071 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7072 offset, size, NULL);
7073 but in a constant time. We expect that frontend produced a simple
7074 array without indexed initializers. */
7075
7076 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7077 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7078 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7079 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7080
7081 access_index = offset / BITS_PER_UNIT / elt_size;
7082 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7083
7084 /* The C++ FE can now produce indexed fields, and we check if the indexes
7085 match. */
7086 if (access_index < CONSTRUCTOR_NELTS (init))
7087 {
7088 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7089 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7090 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7091 STRIP_NOPS (fn);
7092 }
7093 else
7094 fn = NULL;
7095
7096 /* For type inconsistent program we may end up looking up virtual method
7097 in virtual table that does not contain TOKEN entries. We may overrun
7098 the virtual table and pick up a constant or RTTI info pointer.
7099 In any case the call is undefined. */
7100 if (!fn
7101 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7102 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7103 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7104 else
7105 {
7106 fn = TREE_OPERAND (fn, 0);
7107
7108 /* When cgraph node is missing and function is not public, we cannot
7109 devirtualize. This can happen in WHOPR when the actual method
7110 ends up in other partition, because we found devirtualization
7111 possibility too late. */
7112 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7113 {
7114 if (can_refer)
7115 {
7116 *can_refer = false;
7117 return fn;
7118 }
7119 return NULL_TREE;
7120 }
7121 }
7122
7123 /* Make sure we create a cgraph node for functions we'll reference.
7124 They can be non-existent if the reference comes from an entry
7125 of an external vtable for example. */
7126 cgraph_node::get_create (fn);
7127
7128 return fn;
7129 }
7130
7131 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7132 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7133 KNOWN_BINFO carries the binfo describing the true type of
7134 OBJ_TYPE_REF_OBJECT(REF).
7135 Set CAN_REFER if non-NULL to false if method
7136 is not referable or if the virtual table is ill-formed (such as rewriten
7137 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7138
7139 tree
7140 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7141 bool *can_refer)
7142 {
7143 unsigned HOST_WIDE_INT offset;
7144 tree v;
7145
7146 v = BINFO_VTABLE (known_binfo);
7147 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7148 if (!v)
7149 return NULL_TREE;
7150
7151 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7152 {
7153 if (can_refer)
7154 *can_refer = false;
7155 return NULL_TREE;
7156 }
7157 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7158 }
7159
7160 /* Given a pointer value T, return a simplified version of an
7161 indirection through T, or NULL_TREE if no simplification is
7162 possible. Note that the resulting type may be different from
7163 the type pointed to in the sense that it is still compatible
7164 from the langhooks point of view. */
7165
7166 tree
7167 gimple_fold_indirect_ref (tree t)
7168 {
7169 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7170 tree sub = t;
7171 tree subtype;
7172
7173 STRIP_NOPS (sub);
7174 subtype = TREE_TYPE (sub);
7175 if (!POINTER_TYPE_P (subtype)
7176 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7177 return NULL_TREE;
7178
7179 if (TREE_CODE (sub) == ADDR_EXPR)
7180 {
7181 tree op = TREE_OPERAND (sub, 0);
7182 tree optype = TREE_TYPE (op);
7183 /* *&p => p */
7184 if (useless_type_conversion_p (type, optype))
7185 return op;
7186
7187 /* *(foo *)&fooarray => fooarray[0] */
7188 if (TREE_CODE (optype) == ARRAY_TYPE
7189 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7190 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7191 {
7192 tree type_domain = TYPE_DOMAIN (optype);
7193 tree min_val = size_zero_node;
7194 if (type_domain && TYPE_MIN_VALUE (type_domain))
7195 min_val = TYPE_MIN_VALUE (type_domain);
7196 if (TREE_CODE (min_val) == INTEGER_CST)
7197 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7198 }
7199 /* *(foo *)&complexfoo => __real__ complexfoo */
7200 else if (TREE_CODE (optype) == COMPLEX_TYPE
7201 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7202 return fold_build1 (REALPART_EXPR, type, op);
7203 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7204 else if (TREE_CODE (optype) == VECTOR_TYPE
7205 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7206 {
7207 tree part_width = TYPE_SIZE (type);
7208 tree index = bitsize_int (0);
7209 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7210 }
7211 }
7212
7213 /* *(p + CST) -> ... */
7214 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7215 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7216 {
7217 tree addr = TREE_OPERAND (sub, 0);
7218 tree off = TREE_OPERAND (sub, 1);
7219 tree addrtype;
7220
7221 STRIP_NOPS (addr);
7222 addrtype = TREE_TYPE (addr);
7223
7224 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7225 if (TREE_CODE (addr) == ADDR_EXPR
7226 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7227 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7228 && tree_fits_uhwi_p (off))
7229 {
7230 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7231 tree part_width = TYPE_SIZE (type);
7232 unsigned HOST_WIDE_INT part_widthi
7233 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7234 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7235 tree index = bitsize_int (indexi);
7236 if (known_lt (offset / part_widthi,
7237 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7238 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7239 part_width, index);
7240 }
7241
7242 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7243 if (TREE_CODE (addr) == ADDR_EXPR
7244 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7245 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7246 {
7247 tree size = TYPE_SIZE_UNIT (type);
7248 if (tree_int_cst_equal (size, off))
7249 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7250 }
7251
7252 /* *(p + CST) -> MEM_REF <p, CST>. */
7253 if (TREE_CODE (addr) != ADDR_EXPR
7254 || DECL_P (TREE_OPERAND (addr, 0)))
7255 return fold_build2 (MEM_REF, type,
7256 addr,
7257 wide_int_to_tree (ptype, wi::to_wide (off)));
7258 }
7259
7260 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7261 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7262 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7263 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7264 {
7265 tree type_domain;
7266 tree min_val = size_zero_node;
7267 tree osub = sub;
7268 sub = gimple_fold_indirect_ref (sub);
7269 if (! sub)
7270 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7271 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7272 if (type_domain && TYPE_MIN_VALUE (type_domain))
7273 min_val = TYPE_MIN_VALUE (type_domain);
7274 if (TREE_CODE (min_val) == INTEGER_CST)
7275 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7276 }
7277
7278 return NULL_TREE;
7279 }
7280
7281 /* Return true if CODE is an operation that when operating on signed
7282 integer types involves undefined behavior on overflow and the
7283 operation can be expressed with unsigned arithmetic. */
7284
7285 bool
7286 arith_code_with_undefined_signed_overflow (tree_code code)
7287 {
7288 switch (code)
7289 {
7290 case ABS_EXPR:
7291 case PLUS_EXPR:
7292 case MINUS_EXPR:
7293 case MULT_EXPR:
7294 case NEGATE_EXPR:
7295 case POINTER_PLUS_EXPR:
7296 return true;
7297 default:
7298 return false;
7299 }
7300 }
7301
7302 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7303 operation that can be transformed to unsigned arithmetic by converting
7304 its operand, carrying out the operation in the corresponding unsigned
7305 type and converting the result back to the original type.
7306
7307 Returns a sequence of statements that replace STMT and also contain
7308 a modified form of STMT itself. */
7309
7310 gimple_seq
7311 rewrite_to_defined_overflow (gimple *stmt)
7312 {
7313 if (dump_file && (dump_flags & TDF_DETAILS))
7314 {
7315 fprintf (dump_file, "rewriting stmt with undefined signed "
7316 "overflow ");
7317 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7318 }
7319
7320 tree lhs = gimple_assign_lhs (stmt);
7321 tree type = unsigned_type_for (TREE_TYPE (lhs));
7322 gimple_seq stmts = NULL;
7323 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7324 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7325 else
7326 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7327 {
7328 tree op = gimple_op (stmt, i);
7329 op = gimple_convert (&stmts, type, op);
7330 gimple_set_op (stmt, i, op);
7331 }
7332 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7333 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7334 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7335 gimple_seq_add_stmt (&stmts, stmt);
7336 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7337 gimple_seq_add_stmt (&stmts, cvt);
7338
7339 return stmts;
7340 }
7341
7342
7343 /* The valueization hook we use for the gimple_build API simplification.
7344 This makes us match fold_buildN behavior by only combining with
7345 statements in the sequence(s) we are currently building. */
7346
7347 static tree
7348 gimple_build_valueize (tree op)
7349 {
7350 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7351 return op;
7352 return NULL_TREE;
7353 }
7354
7355 /* Build the expression CODE OP0 of type TYPE with location LOC,
7356 simplifying it first if possible. Returns the built
7357 expression value and appends statements possibly defining it
7358 to SEQ. */
7359
7360 tree
7361 gimple_build (gimple_seq *seq, location_t loc,
7362 enum tree_code code, tree type, tree op0)
7363 {
7364 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7365 if (!res)
7366 {
7367 res = create_tmp_reg_or_ssa_name (type);
7368 gimple *stmt;
7369 if (code == REALPART_EXPR
7370 || code == IMAGPART_EXPR
7371 || code == VIEW_CONVERT_EXPR)
7372 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7373 else
7374 stmt = gimple_build_assign (res, code, op0);
7375 gimple_set_location (stmt, loc);
7376 gimple_seq_add_stmt_without_update (seq, stmt);
7377 }
7378 return res;
7379 }
7380
7381 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7382 simplifying it first if possible. Returns the built
7383 expression value and appends statements possibly defining it
7384 to SEQ. */
7385
7386 tree
7387 gimple_build (gimple_seq *seq, location_t loc,
7388 enum tree_code code, tree type, tree op0, tree op1)
7389 {
7390 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7391 if (!res)
7392 {
7393 res = create_tmp_reg_or_ssa_name (type);
7394 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7395 gimple_set_location (stmt, loc);
7396 gimple_seq_add_stmt_without_update (seq, stmt);
7397 }
7398 return res;
7399 }
7400
7401 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7402 simplifying it first if possible. Returns the built
7403 expression value and appends statements possibly defining it
7404 to SEQ. */
7405
7406 tree
7407 gimple_build (gimple_seq *seq, location_t loc,
7408 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7409 {
7410 tree res = gimple_simplify (code, type, op0, op1, op2,
7411 seq, gimple_build_valueize);
7412 if (!res)
7413 {
7414 res = create_tmp_reg_or_ssa_name (type);
7415 gimple *stmt;
7416 if (code == BIT_FIELD_REF)
7417 stmt = gimple_build_assign (res, code,
7418 build3 (code, type, op0, op1, op2));
7419 else
7420 stmt = gimple_build_assign (res, code, op0, op1, op2);
7421 gimple_set_location (stmt, loc);
7422 gimple_seq_add_stmt_without_update (seq, stmt);
7423 }
7424 return res;
7425 }
7426
7427 /* Build the call FN (ARG0) with a result of type TYPE
7428 (or no result if TYPE is void) with location LOC,
7429 simplifying it first if possible. Returns the built
7430 expression value (or NULL_TREE if TYPE is void) and appends
7431 statements possibly defining it to SEQ. */
7432
7433 tree
7434 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7435 tree type, tree arg0)
7436 {
7437 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7438 if (!res)
7439 {
7440 gcall *stmt;
7441 if (internal_fn_p (fn))
7442 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7443 else
7444 {
7445 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7446 stmt = gimple_build_call (decl, 1, arg0);
7447 }
7448 if (!VOID_TYPE_P (type))
7449 {
7450 res = create_tmp_reg_or_ssa_name (type);
7451 gimple_call_set_lhs (stmt, res);
7452 }
7453 gimple_set_location (stmt, loc);
7454 gimple_seq_add_stmt_without_update (seq, stmt);
7455 }
7456 return res;
7457 }
7458
7459 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7460 (or no result if TYPE is void) with location LOC,
7461 simplifying it first if possible. Returns the built
7462 expression value (or NULL_TREE if TYPE is void) and appends
7463 statements possibly defining it to SEQ. */
7464
7465 tree
7466 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7467 tree type, tree arg0, tree arg1)
7468 {
7469 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7470 if (!res)
7471 {
7472 gcall *stmt;
7473 if (internal_fn_p (fn))
7474 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7475 else
7476 {
7477 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7478 stmt = gimple_build_call (decl, 2, arg0, arg1);
7479 }
7480 if (!VOID_TYPE_P (type))
7481 {
7482 res = create_tmp_reg_or_ssa_name (type);
7483 gimple_call_set_lhs (stmt, res);
7484 }
7485 gimple_set_location (stmt, loc);
7486 gimple_seq_add_stmt_without_update (seq, stmt);
7487 }
7488 return res;
7489 }
7490
7491 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7492 (or no result if TYPE is void) with location LOC,
7493 simplifying it first if possible. Returns the built
7494 expression value (or NULL_TREE if TYPE is void) and appends
7495 statements possibly defining it to SEQ. */
7496
7497 tree
7498 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7499 tree type, tree arg0, tree arg1, tree arg2)
7500 {
7501 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7502 seq, gimple_build_valueize);
7503 if (!res)
7504 {
7505 gcall *stmt;
7506 if (internal_fn_p (fn))
7507 stmt = gimple_build_call_internal (as_internal_fn (fn),
7508 3, arg0, arg1, arg2);
7509 else
7510 {
7511 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7512 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7513 }
7514 if (!VOID_TYPE_P (type))
7515 {
7516 res = create_tmp_reg_or_ssa_name (type);
7517 gimple_call_set_lhs (stmt, res);
7518 }
7519 gimple_set_location (stmt, loc);
7520 gimple_seq_add_stmt_without_update (seq, stmt);
7521 }
7522 return res;
7523 }
7524
7525 /* Build the conversion (TYPE) OP with a result of type TYPE
7526 with location LOC if such conversion is neccesary in GIMPLE,
7527 simplifying it first.
7528 Returns the built expression value and appends
7529 statements possibly defining it to SEQ. */
7530
7531 tree
7532 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7533 {
7534 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7535 return op;
7536 return gimple_build (seq, loc, NOP_EXPR, type, op);
7537 }
7538
7539 /* Build the conversion (ptrofftype) OP with a result of a type
7540 compatible with ptrofftype with location LOC if such conversion
7541 is neccesary in GIMPLE, simplifying it first.
7542 Returns the built expression value and appends
7543 statements possibly defining it to SEQ. */
7544
7545 tree
7546 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7547 {
7548 if (ptrofftype_p (TREE_TYPE (op)))
7549 return op;
7550 return gimple_convert (seq, loc, sizetype, op);
7551 }
7552
7553 /* Build a vector of type TYPE in which each element has the value OP.
7554 Return a gimple value for the result, appending any new statements
7555 to SEQ. */
7556
7557 tree
7558 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7559 tree op)
7560 {
7561 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7562 && !CONSTANT_CLASS_P (op))
7563 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7564
7565 tree res, vec = build_vector_from_val (type, op);
7566 if (is_gimple_val (vec))
7567 return vec;
7568 if (gimple_in_ssa_p (cfun))
7569 res = make_ssa_name (type);
7570 else
7571 res = create_tmp_reg (type);
7572 gimple *stmt = gimple_build_assign (res, vec);
7573 gimple_set_location (stmt, loc);
7574 gimple_seq_add_stmt_without_update (seq, stmt);
7575 return res;
7576 }
7577
7578 /* Build a vector from BUILDER, handling the case in which some elements
7579 are non-constant. Return a gimple value for the result, appending any
7580 new instructions to SEQ.
7581
7582 BUILDER must not have a stepped encoding on entry. This is because
7583 the function is not geared up to handle the arithmetic that would
7584 be needed in the variable case, and any code building a vector that
7585 is known to be constant should use BUILDER->build () directly. */
7586
7587 tree
7588 gimple_build_vector (gimple_seq *seq, location_t loc,
7589 tree_vector_builder *builder)
7590 {
7591 gcc_assert (builder->nelts_per_pattern () <= 2);
7592 unsigned int encoded_nelts = builder->encoded_nelts ();
7593 for (unsigned int i = 0; i < encoded_nelts; ++i)
7594 if (!TREE_CONSTANT ((*builder)[i]))
7595 {
7596 tree type = builder->type ();
7597 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7598 vec<constructor_elt, va_gc> *v;
7599 vec_alloc (v, nelts);
7600 for (i = 0; i < nelts; ++i)
7601 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7602
7603 tree res;
7604 if (gimple_in_ssa_p (cfun))
7605 res = make_ssa_name (type);
7606 else
7607 res = create_tmp_reg (type);
7608 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7609 gimple_set_location (stmt, loc);
7610 gimple_seq_add_stmt_without_update (seq, stmt);
7611 return res;
7612 }
7613 return builder->build ();
7614 }
7615
7616 /* Return true if the result of assignment STMT is known to be non-negative.
7617 If the return value is based on the assumption that signed overflow is
7618 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7619 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7620
7621 static bool
7622 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7623 int depth)
7624 {
7625 enum tree_code code = gimple_assign_rhs_code (stmt);
7626 switch (get_gimple_rhs_class (code))
7627 {
7628 case GIMPLE_UNARY_RHS:
7629 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7630 gimple_expr_type (stmt),
7631 gimple_assign_rhs1 (stmt),
7632 strict_overflow_p, depth);
7633 case GIMPLE_BINARY_RHS:
7634 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7635 gimple_expr_type (stmt),
7636 gimple_assign_rhs1 (stmt),
7637 gimple_assign_rhs2 (stmt),
7638 strict_overflow_p, depth);
7639 case GIMPLE_TERNARY_RHS:
7640 return false;
7641 case GIMPLE_SINGLE_RHS:
7642 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7643 strict_overflow_p, depth);
7644 case GIMPLE_INVALID_RHS:
7645 break;
7646 }
7647 gcc_unreachable ();
7648 }
7649
7650 /* Return true if return value of call STMT is known to be non-negative.
7651 If the return value is based on the assumption that signed overflow is
7652 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7653 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7654
7655 static bool
7656 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7657 int depth)
7658 {
7659 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7660 gimple_call_arg (stmt, 0) : NULL_TREE;
7661 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7662 gimple_call_arg (stmt, 1) : NULL_TREE;
7663
7664 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7665 gimple_call_combined_fn (stmt),
7666 arg0,
7667 arg1,
7668 strict_overflow_p, depth);
7669 }
7670
7671 /* Return true if return value of call STMT is known to be non-negative.
7672 If the return value is based on the assumption that signed overflow is
7673 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7674 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7675
7676 static bool
7677 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7678 int depth)
7679 {
7680 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7681 {
7682 tree arg = gimple_phi_arg_def (stmt, i);
7683 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7684 return false;
7685 }
7686 return true;
7687 }
7688
7689 /* Return true if STMT is known to compute a non-negative value.
7690 If the return value is based on the assumption that signed overflow is
7691 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7692 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7693
7694 bool
7695 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7696 int depth)
7697 {
7698 switch (gimple_code (stmt))
7699 {
7700 case GIMPLE_ASSIGN:
7701 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7702 depth);
7703 case GIMPLE_CALL:
7704 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7705 depth);
7706 case GIMPLE_PHI:
7707 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7708 depth);
7709 default:
7710 return false;
7711 }
7712 }
7713
7714 /* Return true if the floating-point value computed by assignment STMT
7715 is known to have an integer value. We also allow +Inf, -Inf and NaN
7716 to be considered integer values. Return false for signaling NaN.
7717
7718 DEPTH is the current nesting depth of the query. */
7719
7720 static bool
7721 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7722 {
7723 enum tree_code code = gimple_assign_rhs_code (stmt);
7724 switch (get_gimple_rhs_class (code))
7725 {
7726 case GIMPLE_UNARY_RHS:
7727 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7728 gimple_assign_rhs1 (stmt), depth);
7729 case GIMPLE_BINARY_RHS:
7730 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7731 gimple_assign_rhs1 (stmt),
7732 gimple_assign_rhs2 (stmt), depth);
7733 case GIMPLE_TERNARY_RHS:
7734 return false;
7735 case GIMPLE_SINGLE_RHS:
7736 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7737 case GIMPLE_INVALID_RHS:
7738 break;
7739 }
7740 gcc_unreachable ();
7741 }
7742
7743 /* Return true if the floating-point value computed by call STMT is known
7744 to have an integer value. We also allow +Inf, -Inf and NaN to be
7745 considered integer values. Return false for signaling NaN.
7746
7747 DEPTH is the current nesting depth of the query. */
7748
7749 static bool
7750 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7751 {
7752 tree arg0 = (gimple_call_num_args (stmt) > 0
7753 ? gimple_call_arg (stmt, 0)
7754 : NULL_TREE);
7755 tree arg1 = (gimple_call_num_args (stmt) > 1
7756 ? gimple_call_arg (stmt, 1)
7757 : NULL_TREE);
7758 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7759 arg0, arg1, depth);
7760 }
7761
7762 /* Return true if the floating-point result of phi STMT is known to have
7763 an integer value. We also allow +Inf, -Inf and NaN to be considered
7764 integer values. Return false for signaling NaN.
7765
7766 DEPTH is the current nesting depth of the query. */
7767
7768 static bool
7769 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7770 {
7771 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7772 {
7773 tree arg = gimple_phi_arg_def (stmt, i);
7774 if (!integer_valued_real_single_p (arg, depth + 1))
7775 return false;
7776 }
7777 return true;
7778 }
7779
7780 /* Return true if the floating-point value computed by STMT is known
7781 to have an integer value. We also allow +Inf, -Inf and NaN to be
7782 considered integer values. Return false for signaling NaN.
7783
7784 DEPTH is the current nesting depth of the query. */
7785
7786 bool
7787 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7788 {
7789 switch (gimple_code (stmt))
7790 {
7791 case GIMPLE_ASSIGN:
7792 return gimple_assign_integer_valued_real_p (stmt, depth);
7793 case GIMPLE_CALL:
7794 return gimple_call_integer_valued_real_p (stmt, depth);
7795 case GIMPLE_PHI:
7796 return gimple_phi_integer_valued_real_p (stmt, depth);
7797 default:
7798 return false;
7799 }
7800 }