]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
Update libbid according to the latest Intel Decimal Floating-Point Math Library.
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68
69 enum strlen_range_kind {
70 /* Compute the exact constant string length. */
71 SRK_STRLEN,
72 /* Compute the maximum constant string length. */
73 SRK_STRLENMAX,
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
79 SRK_LENRANGE,
80 /* Determine the integer value of the argument (not string length). */
81 SRK_INT_VALUE
82 };
83
84 static bool
85 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
86
87 /* Return true when DECL can be referenced from current unit.
88 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
89 We can get declarations that are not possible to reference for various
90 reasons:
91
92 1) When analyzing C++ virtual tables.
93 C++ virtual tables do have known constructors even
94 when they are keyed to other compilation unit.
95 Those tables can contain pointers to methods and vars
96 in other units. Those methods have both STATIC and EXTERNAL
97 set.
98 2) In WHOPR mode devirtualization might lead to reference
99 to method that was partitioned elsehwere.
100 In this case we have static VAR_DECL or FUNCTION_DECL
101 that has no corresponding callgraph/varpool node
102 declaring the body.
103 3) COMDAT functions referred by external vtables that
104 we devirtualize only during final compilation stage.
105 At this time we already decided that we will not output
106 the function body and thus we can't reference the symbol
107 directly. */
108
109 static bool
110 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
111 {
112 varpool_node *vnode;
113 struct cgraph_node *node;
114 symtab_node *snode;
115
116 if (DECL_ABSTRACT_P (decl))
117 return false;
118
119 /* We are concerned only about static/external vars and functions. */
120 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
121 || !VAR_OR_FUNCTION_DECL_P (decl))
122 return true;
123
124 /* Static objects can be referred only if they are defined and not optimized
125 out yet. */
126 if (!TREE_PUBLIC (decl))
127 {
128 if (DECL_EXTERNAL (decl))
129 return false;
130 /* Before we start optimizing unreachable code we can be sure all
131 static objects are defined. */
132 if (symtab->function_flags_ready)
133 return true;
134 snode = symtab_node::get (decl);
135 if (!snode || !snode->definition)
136 return false;
137 node = dyn_cast <cgraph_node *> (snode);
138 return !node || !node->global.inlined_to;
139 }
140
141 /* We will later output the initializer, so we can refer to it.
142 So we are concerned only when DECL comes from initializer of
143 external var or var that has been optimized out. */
144 if (!from_decl
145 || !VAR_P (from_decl)
146 || (!DECL_EXTERNAL (from_decl)
147 && (vnode = varpool_node::get (from_decl)) != NULL
148 && vnode->definition)
149 || (flag_ltrans
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->in_other_partition))
152 return true;
153 /* We are folding reference from external vtable. The vtable may reffer
154 to a symbol keyed to other compilation unit. The other compilation
155 unit may be in separate DSO and the symbol may be hidden. */
156 if (DECL_VISIBILITY_SPECIFIED (decl)
157 && DECL_EXTERNAL (decl)
158 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
159 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
160 return false;
161 /* When function is public, we always can introduce new reference.
162 Exception are the COMDAT functions where introducing a direct
163 reference imply need to include function body in the curren tunit. */
164 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
165 return true;
166 /* We have COMDAT. We are going to check if we still have definition
167 or if the definition is going to be output in other partition.
168 Bypass this when gimplifying; all needed functions will be produced.
169
170 As observed in PR20991 for already optimized out comdat virtual functions
171 it may be tempting to not necessarily give up because the copy will be
172 output elsewhere when corresponding vtable is output.
173 This is however not possible - ABI specify that COMDATs are output in
174 units where they are used and when the other unit was compiled with LTO
175 it is possible that vtable was kept public while the function itself
176 was privatized. */
177 if (!symtab->function_flags_ready)
178 return true;
179
180 snode = symtab_node::get (decl);
181 if (!snode
182 || ((!snode->definition || DECL_EXTERNAL (decl))
183 && (!snode->in_other_partition
184 || (!snode->forced_by_abi && !snode->force_output))))
185 return false;
186 node = dyn_cast <cgraph_node *> (snode);
187 return !node || !node->global.inlined_to;
188 }
189
190 /* Create a temporary for TYPE for a statement STMT. If the current function
191 is in SSA form, a SSA name is created. Otherwise a temporary register
192 is made. */
193
194 tree
195 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
196 {
197 if (gimple_in_ssa_p (cfun))
198 return make_ssa_name (type, stmt);
199 else
200 return create_tmp_reg (type);
201 }
202
203 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
204 acceptable form for is_gimple_min_invariant.
205 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
206
207 tree
208 canonicalize_constructor_val (tree cval, tree from_decl)
209 {
210 if (CONSTANT_CLASS_P (cval))
211 return cval;
212
213 tree orig_cval = cval;
214 STRIP_NOPS (cval);
215 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
216 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
217 {
218 tree ptr = TREE_OPERAND (cval, 0);
219 if (is_gimple_min_invariant (ptr))
220 cval = build1_loc (EXPR_LOCATION (cval),
221 ADDR_EXPR, TREE_TYPE (ptr),
222 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
223 ptr,
224 fold_convert (ptr_type_node,
225 TREE_OPERAND (cval, 1))));
226 }
227 if (TREE_CODE (cval) == ADDR_EXPR)
228 {
229 tree base = NULL_TREE;
230 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
231 {
232 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
233 if (base)
234 TREE_OPERAND (cval, 0) = base;
235 }
236 else
237 base = get_base_address (TREE_OPERAND (cval, 0));
238 if (!base)
239 return NULL_TREE;
240
241 if (VAR_OR_FUNCTION_DECL_P (base)
242 && !can_refer_decl_in_current_unit_p (base, from_decl))
243 return NULL_TREE;
244 if (TREE_TYPE (base) == error_mark_node)
245 return NULL_TREE;
246 if (VAR_P (base))
247 TREE_ADDRESSABLE (base) = 1;
248 else if (TREE_CODE (base) == FUNCTION_DECL)
249 {
250 /* Make sure we create a cgraph node for functions we'll reference.
251 They can be non-existent if the reference comes from an entry
252 of an external vtable for example. */
253 cgraph_node::get_create (base);
254 }
255 /* Fixup types in global initializers. */
256 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
257 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
258
259 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
260 cval = fold_convert (TREE_TYPE (orig_cval), cval);
261 return cval;
262 }
263 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
264 if (TREE_CODE (cval) == INTEGER_CST)
265 {
266 if (TREE_OVERFLOW_P (cval))
267 cval = drop_tree_overflow (cval);
268 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
269 cval = fold_convert (TREE_TYPE (orig_cval), cval);
270 return cval;
271 }
272 return orig_cval;
273 }
274
275 /* If SYM is a constant variable with known value, return the value.
276 NULL_TREE is returned otherwise. */
277
278 tree
279 get_symbol_constant_value (tree sym)
280 {
281 tree val = ctor_for_folding (sym);
282 if (val != error_mark_node)
283 {
284 if (val)
285 {
286 val = canonicalize_constructor_val (unshare_expr (val), sym);
287 if (val && is_gimple_min_invariant (val))
288 return val;
289 else
290 return NULL_TREE;
291 }
292 /* Variables declared 'const' without an initializer
293 have zero as the initializer if they may not be
294 overridden at link or run time. */
295 if (!val
296 && is_gimple_reg_type (TREE_TYPE (sym)))
297 return build_zero_cst (TREE_TYPE (sym));
298 }
299
300 return NULL_TREE;
301 }
302
303
304
305 /* Subroutine of fold_stmt. We perform several simplifications of the
306 memory reference tree EXPR and make sure to re-gimplify them properly
307 after propagation of constant addresses. IS_LHS is true if the
308 reference is supposed to be an lvalue. */
309
310 static tree
311 maybe_fold_reference (tree expr, bool is_lhs)
312 {
313 tree result;
314
315 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
316 || TREE_CODE (expr) == REALPART_EXPR
317 || TREE_CODE (expr) == IMAGPART_EXPR)
318 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
319 return fold_unary_loc (EXPR_LOCATION (expr),
320 TREE_CODE (expr),
321 TREE_TYPE (expr),
322 TREE_OPERAND (expr, 0));
323 else if (TREE_CODE (expr) == BIT_FIELD_REF
324 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
325 return fold_ternary_loc (EXPR_LOCATION (expr),
326 TREE_CODE (expr),
327 TREE_TYPE (expr),
328 TREE_OPERAND (expr, 0),
329 TREE_OPERAND (expr, 1),
330 TREE_OPERAND (expr, 2));
331
332 if (!is_lhs
333 && (result = fold_const_aggregate_ref (expr))
334 && is_gimple_min_invariant (result))
335 return result;
336
337 return NULL_TREE;
338 }
339
340
341 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
342 replacement rhs for the statement or NULL_TREE if no simplification
343 could be made. It is assumed that the operands have been previously
344 folded. */
345
346 static tree
347 fold_gimple_assign (gimple_stmt_iterator *si)
348 {
349 gimple *stmt = gsi_stmt (*si);
350 enum tree_code subcode = gimple_assign_rhs_code (stmt);
351 location_t loc = gimple_location (stmt);
352
353 tree result = NULL_TREE;
354
355 switch (get_gimple_rhs_class (subcode))
356 {
357 case GIMPLE_SINGLE_RHS:
358 {
359 tree rhs = gimple_assign_rhs1 (stmt);
360
361 if (TREE_CLOBBER_P (rhs))
362 return NULL_TREE;
363
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
366
367 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
368 {
369 tree val = OBJ_TYPE_REF_EXPR (rhs);
370 if (is_gimple_min_invariant (val))
371 return val;
372 else if (flag_devirtualize && virtual_method_call_p (rhs))
373 {
374 bool final;
375 vec <cgraph_node *>targets
376 = possible_polymorphic_call_targets (rhs, stmt, &final);
377 if (final && targets.length () <= 1 && dbg_cnt (devirt))
378 {
379 if (dump_enabled_p ())
380 {
381 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
382 "resolving virtual function address "
383 "reference to function %s\n",
384 targets.length () == 1
385 ? targets[0]->name ()
386 : "NULL");
387 }
388 if (targets.length () == 1)
389 {
390 val = fold_convert (TREE_TYPE (val),
391 build_fold_addr_expr_loc
392 (loc, targets[0]->decl));
393 STRIP_USELESS_TYPE_CONVERSION (val);
394 }
395 else
396 /* We cannot use __builtin_unreachable here because it
397 cannot have address taken. */
398 val = build_int_cst (TREE_TYPE (val), 0);
399 return val;
400 }
401 }
402 }
403
404 else if (TREE_CODE (rhs) == ADDR_EXPR)
405 {
406 tree ref = TREE_OPERAND (rhs, 0);
407 tree tem = maybe_fold_reference (ref, true);
408 if (tem
409 && TREE_CODE (tem) == MEM_REF
410 && integer_zerop (TREE_OPERAND (tem, 1)))
411 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
412 else if (tem)
413 result = fold_convert (TREE_TYPE (rhs),
414 build_fold_addr_expr_loc (loc, tem));
415 else if (TREE_CODE (ref) == MEM_REF
416 && integer_zerop (TREE_OPERAND (ref, 1)))
417 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
418
419 if (result)
420 {
421 /* Strip away useless type conversions. Both the
422 NON_LVALUE_EXPR that may have been added by fold, and
423 "useless" type conversions that might now be apparent
424 due to propagation. */
425 STRIP_USELESS_TYPE_CONVERSION (result);
426
427 if (result != rhs && valid_gimple_rhs_p (result))
428 return result;
429 }
430 }
431
432 else if (TREE_CODE (rhs) == CONSTRUCTOR
433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
434 {
435 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
436 unsigned i;
437 tree val;
438
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
440 if (! CONSTANT_CLASS_P (val))
441 return NULL_TREE;
442
443 return build_vector_from_ctor (TREE_TYPE (rhs),
444 CONSTRUCTOR_ELTS (rhs));
445 }
446
447 else if (DECL_P (rhs))
448 return get_symbol_constant_value (rhs);
449 }
450 break;
451
452 case GIMPLE_UNARY_RHS:
453 break;
454
455 case GIMPLE_BINARY_RHS:
456 break;
457
458 case GIMPLE_TERNARY_RHS:
459 result = fold_ternary_loc (loc, subcode,
460 TREE_TYPE (gimple_assign_lhs (stmt)),
461 gimple_assign_rhs1 (stmt),
462 gimple_assign_rhs2 (stmt),
463 gimple_assign_rhs3 (stmt));
464
465 if (result)
466 {
467 STRIP_USELESS_TYPE_CONVERSION (result);
468 if (valid_gimple_rhs_p (result))
469 return result;
470 }
471 break;
472
473 case GIMPLE_INVALID_RHS:
474 gcc_unreachable ();
475 }
476
477 return NULL_TREE;
478 }
479
480
481 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
482 adjusting the replacement stmts location and virtual operands.
483 If the statement has a lhs the last stmt in the sequence is expected
484 to assign to that lhs. */
485
486 static void
487 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
488 {
489 gimple *stmt = gsi_stmt (*si_p);
490
491 if (gimple_has_location (stmt))
492 annotate_all_with_location (stmts, gimple_location (stmt));
493
494 /* First iterate over the replacement statements backward, assigning
495 virtual operands to their defining statements. */
496 gimple *laststore = NULL;
497 for (gimple_stmt_iterator i = gsi_last (stmts);
498 !gsi_end_p (i); gsi_prev (&i))
499 {
500 gimple *new_stmt = gsi_stmt (i);
501 if ((gimple_assign_single_p (new_stmt)
502 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
503 || (is_gimple_call (new_stmt)
504 && (gimple_call_flags (new_stmt)
505 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
506 {
507 tree vdef;
508 if (!laststore)
509 vdef = gimple_vdef (stmt);
510 else
511 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
512 gimple_set_vdef (new_stmt, vdef);
513 if (vdef && TREE_CODE (vdef) == SSA_NAME)
514 SSA_NAME_DEF_STMT (vdef) = new_stmt;
515 laststore = new_stmt;
516 }
517 }
518
519 /* Second iterate over the statements forward, assigning virtual
520 operands to their uses. */
521 tree reaching_vuse = gimple_vuse (stmt);
522 for (gimple_stmt_iterator i = gsi_start (stmts);
523 !gsi_end_p (i); gsi_next (&i))
524 {
525 gimple *new_stmt = gsi_stmt (i);
526 /* If the new statement possibly has a VUSE, update it with exact SSA
527 name we know will reach this one. */
528 if (gimple_has_mem_ops (new_stmt))
529 gimple_set_vuse (new_stmt, reaching_vuse);
530 gimple_set_modified (new_stmt, true);
531 if (gimple_vdef (new_stmt))
532 reaching_vuse = gimple_vdef (new_stmt);
533 }
534
535 /* If the new sequence does not do a store release the virtual
536 definition of the original statement. */
537 if (reaching_vuse
538 && reaching_vuse == gimple_vuse (stmt))
539 {
540 tree vdef = gimple_vdef (stmt);
541 if (vdef
542 && TREE_CODE (vdef) == SSA_NAME)
543 {
544 unlink_stmt_vdef (stmt);
545 release_ssa_name (vdef);
546 }
547 }
548
549 /* Finally replace the original statement with the sequence. */
550 gsi_replace_with_seq (si_p, stmts, false);
551 }
552
553 /* Convert EXPR into a GIMPLE value suitable for substitution on the
554 RHS of an assignment. Insert the necessary statements before
555 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
556 is replaced. If the call is expected to produces a result, then it
557 is replaced by an assignment of the new RHS to the result variable.
558 If the result is to be ignored, then the call is replaced by a
559 GIMPLE_NOP. A proper VDEF chain is retained by making the first
560 VUSE and the last VDEF of the whole sequence be the same as the replaced
561 statement and using new SSA names for stores in between. */
562
563 void
564 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
565 {
566 tree lhs;
567 gimple *stmt, *new_stmt;
568 gimple_stmt_iterator i;
569 gimple_seq stmts = NULL;
570
571 stmt = gsi_stmt (*si_p);
572
573 gcc_assert (is_gimple_call (stmt));
574
575 push_gimplify_context (gimple_in_ssa_p (cfun));
576
577 lhs = gimple_call_lhs (stmt);
578 if (lhs == NULL_TREE)
579 {
580 gimplify_and_add (expr, &stmts);
581 /* We can end up with folding a memcpy of an empty class assignment
582 which gets optimized away by C++ gimplification. */
583 if (gimple_seq_empty_p (stmts))
584 {
585 pop_gimplify_context (NULL);
586 if (gimple_in_ssa_p (cfun))
587 {
588 unlink_stmt_vdef (stmt);
589 release_defs (stmt);
590 }
591 gsi_replace (si_p, gimple_build_nop (), false);
592 return;
593 }
594 }
595 else
596 {
597 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
598 new_stmt = gimple_build_assign (lhs, tmp);
599 i = gsi_last (stmts);
600 gsi_insert_after_without_update (&i, new_stmt,
601 GSI_CONTINUE_LINKING);
602 }
603
604 pop_gimplify_context (NULL);
605
606 gsi_replace_with_seq_vops (si_p, stmts);
607 }
608
609
610 /* Replace the call at *GSI with the gimple value VAL. */
611
612 void
613 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
614 {
615 gimple *stmt = gsi_stmt (*gsi);
616 tree lhs = gimple_call_lhs (stmt);
617 gimple *repl;
618 if (lhs)
619 {
620 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
621 val = fold_convert (TREE_TYPE (lhs), val);
622 repl = gimple_build_assign (lhs, val);
623 }
624 else
625 repl = gimple_build_nop ();
626 tree vdef = gimple_vdef (stmt);
627 if (vdef && TREE_CODE (vdef) == SSA_NAME)
628 {
629 unlink_stmt_vdef (stmt);
630 release_ssa_name (vdef);
631 }
632 gsi_replace (gsi, repl, false);
633 }
634
635 /* Replace the call at *GSI with the new call REPL and fold that
636 again. */
637
638 static void
639 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
640 {
641 gimple *stmt = gsi_stmt (*gsi);
642 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
643 gimple_set_location (repl, gimple_location (stmt));
644 if (gimple_vdef (stmt)
645 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
646 {
647 gimple_set_vdef (repl, gimple_vdef (stmt));
648 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
649 }
650 if (gimple_vuse (stmt))
651 gimple_set_vuse (repl, gimple_vuse (stmt));
652 gsi_replace (gsi, repl, false);
653 fold_stmt (gsi);
654 }
655
656 /* Return true if VAR is a VAR_DECL or a component thereof. */
657
658 static bool
659 var_decl_component_p (tree var)
660 {
661 tree inner = var;
662 while (handled_component_p (inner))
663 inner = TREE_OPERAND (inner, 0);
664 return (DECL_P (inner)
665 || (TREE_CODE (inner) == MEM_REF
666 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
667 }
668
669 /* Return TRUE if the SIZE argument, representing the size of an
670 object, is in a range of values of which exactly zero is valid. */
671
672 static bool
673 size_must_be_zero_p (tree size)
674 {
675 if (integer_zerop (size))
676 return true;
677
678 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
679 return false;
680
681 tree type = TREE_TYPE (size);
682 int prec = TYPE_PRECISION (type);
683
684 /* Compute the value of SSIZE_MAX, the largest positive value that
685 can be stored in ssize_t, the signed counterpart of size_t. */
686 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
687 value_range_base valid_range (VR_RANGE,
688 build_int_cst (type, 0),
689 wide_int_to_tree (type, ssize_max));
690 value_range_base vr;
691 get_range_info (size, vr);
692 vr.intersect (&valid_range);
693 return vr.zero_p ();
694 }
695
696 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
697 diagnose (otherwise undefined) overlapping copies without preventing
698 folding. When folded, GCC guarantees that overlapping memcpy has
699 the same semantics as memmove. Call to the library memcpy need not
700 provide the same guarantee. Return false if no simplification can
701 be made. */
702
703 static bool
704 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
705 tree dest, tree src, enum built_in_function code)
706 {
707 gimple *stmt = gsi_stmt (*gsi);
708 tree lhs = gimple_call_lhs (stmt);
709 tree len = gimple_call_arg (stmt, 2);
710 tree destvar, srcvar;
711 location_t loc = gimple_location (stmt);
712
713 /* If the LEN parameter is a constant zero or in range where
714 the only valid value is zero, return DEST. */
715 if (size_must_be_zero_p (len))
716 {
717 gimple *repl;
718 if (gimple_call_lhs (stmt))
719 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
720 else
721 repl = gimple_build_nop ();
722 tree vdef = gimple_vdef (stmt);
723 if (vdef && TREE_CODE (vdef) == SSA_NAME)
724 {
725 unlink_stmt_vdef (stmt);
726 release_ssa_name (vdef);
727 }
728 gsi_replace (gsi, repl, false);
729 return true;
730 }
731
732 /* If SRC and DEST are the same (and not volatile), return
733 DEST{,+LEN,+LEN-1}. */
734 if (operand_equal_p (src, dest, 0))
735 {
736 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
737 It's safe and may even be emitted by GCC itself (see bug
738 32667). */
739 unlink_stmt_vdef (stmt);
740 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
741 release_ssa_name (gimple_vdef (stmt));
742 if (!lhs)
743 {
744 gsi_replace (gsi, gimple_build_nop (), false);
745 return true;
746 }
747 goto done;
748 }
749 else
750 {
751 tree srctype, desttype;
752 unsigned int src_align, dest_align;
753 tree off0;
754 const char *tmp_str;
755 unsigned HOST_WIDE_INT tmp_len;
756
757 /* Build accesses at offset zero with a ref-all character type. */
758 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
759 ptr_mode, true), 0);
760
761 /* If we can perform the copy efficiently with first doing all loads
762 and then all stores inline it that way. Currently efficiently
763 means that we can load all the memory into a single integer
764 register which is what MOVE_MAX gives us. */
765 src_align = get_pointer_alignment (src);
766 dest_align = get_pointer_alignment (dest);
767 if (tree_fits_uhwi_p (len)
768 && compare_tree_int (len, MOVE_MAX) <= 0
769 /* ??? Don't transform copies from strings with known length this
770 confuses the tree-ssa-strlen.c. This doesn't handle
771 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
772 reason. */
773 && !c_strlen (src, 2)
774 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
775 && memchr (tmp_str, 0, tmp_len) == NULL))
776 {
777 unsigned ilen = tree_to_uhwi (len);
778 if (pow2p_hwi (ilen))
779 {
780 /* Detect out-of-bounds accesses without issuing warnings.
781 Avoid folding out-of-bounds copies but to avoid false
782 positives for unreachable code defer warning until after
783 DCE has worked its magic.
784 -Wrestrict is still diagnosed. */
785 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
786 dest, src, len, len,
787 false, false))
788 if (warning != OPT_Wrestrict)
789 return false;
790
791 scalar_int_mode mode;
792 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
793 if (type
794 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
795 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
796 /* If the destination pointer is not aligned we must be able
797 to emit an unaligned store. */
798 && (dest_align >= GET_MODE_ALIGNMENT (mode)
799 || !targetm.slow_unaligned_access (mode, dest_align)
800 || (optab_handler (movmisalign_optab, mode)
801 != CODE_FOR_nothing)))
802 {
803 tree srctype = type;
804 tree desttype = type;
805 if (src_align < GET_MODE_ALIGNMENT (mode))
806 srctype = build_aligned_type (type, src_align);
807 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
808 tree tem = fold_const_aggregate_ref (srcmem);
809 if (tem)
810 srcmem = tem;
811 else if (src_align < GET_MODE_ALIGNMENT (mode)
812 && targetm.slow_unaligned_access (mode, src_align)
813 && (optab_handler (movmisalign_optab, mode)
814 == CODE_FOR_nothing))
815 srcmem = NULL_TREE;
816 if (srcmem)
817 {
818 gimple *new_stmt;
819 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
820 {
821 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
822 srcmem
823 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
824 new_stmt);
825 gimple_assign_set_lhs (new_stmt, srcmem);
826 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
827 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
828 }
829 if (dest_align < GET_MODE_ALIGNMENT (mode))
830 desttype = build_aligned_type (type, dest_align);
831 new_stmt
832 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
833 dest, off0),
834 srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
837 if (gimple_vdef (new_stmt)
838 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
839 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
840 if (!lhs)
841 {
842 gsi_replace (gsi, new_stmt, false);
843 return true;
844 }
845 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
846 goto done;
847 }
848 }
849 }
850 }
851
852 if (code == BUILT_IN_MEMMOVE)
853 {
854 /* Both DEST and SRC must be pointer types.
855 ??? This is what old code did. Is the testing for pointer types
856 really mandatory?
857
858 If either SRC is readonly or length is 1, we can use memcpy. */
859 if (!dest_align || !src_align)
860 return false;
861 if (readonly_data_expr (src)
862 || (tree_fits_uhwi_p (len)
863 && (MIN (src_align, dest_align) / BITS_PER_UNIT
864 >= tree_to_uhwi (len))))
865 {
866 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
867 if (!fn)
868 return false;
869 gimple_call_set_fndecl (stmt, fn);
870 gimple_call_set_arg (stmt, 0, dest);
871 gimple_call_set_arg (stmt, 1, src);
872 fold_stmt (gsi);
873 return true;
874 }
875
876 /* If *src and *dest can't overlap, optimize into memcpy as well. */
877 if (TREE_CODE (src) == ADDR_EXPR
878 && TREE_CODE (dest) == ADDR_EXPR)
879 {
880 tree src_base, dest_base, fn;
881 poly_int64 src_offset = 0, dest_offset = 0;
882 poly_uint64 maxsize;
883
884 srcvar = TREE_OPERAND (src, 0);
885 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
886 if (src_base == NULL)
887 src_base = srcvar;
888 destvar = TREE_OPERAND (dest, 0);
889 dest_base = get_addr_base_and_unit_offset (destvar,
890 &dest_offset);
891 if (dest_base == NULL)
892 dest_base = destvar;
893 if (!poly_int_tree_p (len, &maxsize))
894 maxsize = -1;
895 if (SSA_VAR_P (src_base)
896 && SSA_VAR_P (dest_base))
897 {
898 if (operand_equal_p (src_base, dest_base, 0)
899 && ranges_maybe_overlap_p (src_offset, maxsize,
900 dest_offset, maxsize))
901 return false;
902 }
903 else if (TREE_CODE (src_base) == MEM_REF
904 && TREE_CODE (dest_base) == MEM_REF)
905 {
906 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
907 TREE_OPERAND (dest_base, 0), 0))
908 return false;
909 poly_offset_int full_src_offset
910 = mem_ref_offset (src_base) + src_offset;
911 poly_offset_int full_dest_offset
912 = mem_ref_offset (dest_base) + dest_offset;
913 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
914 full_dest_offset, maxsize))
915 return false;
916 }
917 else
918 return false;
919
920 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
921 if (!fn)
922 return false;
923 gimple_call_set_fndecl (stmt, fn);
924 gimple_call_set_arg (stmt, 0, dest);
925 gimple_call_set_arg (stmt, 1, src);
926 fold_stmt (gsi);
927 return true;
928 }
929
930 /* If the destination and source do not alias optimize into
931 memcpy as well. */
932 if ((is_gimple_min_invariant (dest)
933 || TREE_CODE (dest) == SSA_NAME)
934 && (is_gimple_min_invariant (src)
935 || TREE_CODE (src) == SSA_NAME))
936 {
937 ao_ref destr, srcr;
938 ao_ref_init_from_ptr_and_size (&destr, dest, len);
939 ao_ref_init_from_ptr_and_size (&srcr, src, len);
940 if (!refs_may_alias_p_1 (&destr, &srcr, false))
941 {
942 tree fn;
943 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
944 if (!fn)
945 return false;
946 gimple_call_set_fndecl (stmt, fn);
947 gimple_call_set_arg (stmt, 0, dest);
948 gimple_call_set_arg (stmt, 1, src);
949 fold_stmt (gsi);
950 return true;
951 }
952 }
953
954 return false;
955 }
956
957 if (!tree_fits_shwi_p (len))
958 return false;
959 if (!POINTER_TYPE_P (TREE_TYPE (src))
960 || !POINTER_TYPE_P (TREE_TYPE (dest)))
961 return false;
962 /* In the following try to find a type that is most natural to be
963 used for the memcpy source and destination and that allows
964 the most optimization when memcpy is turned into a plain assignment
965 using that type. In theory we could always use a char[len] type
966 but that only gains us that the destination and source possibly
967 no longer will have their address taken. */
968 srctype = TREE_TYPE (TREE_TYPE (src));
969 if (TREE_CODE (srctype) == ARRAY_TYPE
970 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
971 srctype = TREE_TYPE (srctype);
972 desttype = TREE_TYPE (TREE_TYPE (dest));
973 if (TREE_CODE (desttype) == ARRAY_TYPE
974 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
975 desttype = TREE_TYPE (desttype);
976 if (TREE_ADDRESSABLE (srctype)
977 || TREE_ADDRESSABLE (desttype))
978 return false;
979
980 /* Make sure we are not copying using a floating-point mode or
981 a type whose size possibly does not match its precision. */
982 if (FLOAT_MODE_P (TYPE_MODE (desttype))
983 || TREE_CODE (desttype) == BOOLEAN_TYPE
984 || TREE_CODE (desttype) == ENUMERAL_TYPE)
985 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
986 if (FLOAT_MODE_P (TYPE_MODE (srctype))
987 || TREE_CODE (srctype) == BOOLEAN_TYPE
988 || TREE_CODE (srctype) == ENUMERAL_TYPE)
989 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
990 if (!srctype)
991 srctype = desttype;
992 if (!desttype)
993 desttype = srctype;
994 if (!srctype)
995 return false;
996
997 src_align = get_pointer_alignment (src);
998 dest_align = get_pointer_alignment (dest);
999 if (dest_align < TYPE_ALIGN (desttype)
1000 || src_align < TYPE_ALIGN (srctype))
1001 return false;
1002
1003 destvar = NULL_TREE;
1004 if (TREE_CODE (dest) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (dest, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1007 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1008
1009 srcvar = NULL_TREE;
1010 if (TREE_CODE (src) == ADDR_EXPR
1011 && var_decl_component_p (TREE_OPERAND (src, 0))
1012 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1013 {
1014 if (!destvar
1015 || src_align >= TYPE_ALIGN (desttype))
1016 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1017 src, off0);
1018 else if (!STRICT_ALIGNMENT)
1019 {
1020 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1021 src_align);
1022 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1023 }
1024 }
1025
1026 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1027 return false;
1028
1029 if (srcvar == NULL_TREE)
1030 {
1031 if (src_align >= TYPE_ALIGN (desttype))
1032 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1033 else
1034 {
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1038 src_align);
1039 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1040 }
1041 }
1042 else if (destvar == NULL_TREE)
1043 {
1044 if (dest_align >= TYPE_ALIGN (srctype))
1045 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1046 else
1047 {
1048 if (STRICT_ALIGNMENT)
1049 return false;
1050 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1051 dest_align);
1052 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1053 }
1054 }
1055
1056 /* Same as above, detect out-of-bounds accesses without issuing
1057 warnings. Avoid folding out-of-bounds copies but to avoid
1058 false positives for unreachable code defer warning until
1059 after DCE has worked its magic.
1060 -Wrestrict is still diagnosed. */
1061 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1062 dest, src, len, len,
1063 false, false))
1064 if (warning != OPT_Wrestrict)
1065 return false;
1066
1067 gimple *new_stmt;
1068 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1069 {
1070 tree tem = fold_const_aggregate_ref (srcvar);
1071 if (tem)
1072 srcvar = tem;
1073 if (! is_gimple_min_invariant (srcvar))
1074 {
1075 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1076 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1077 new_stmt);
1078 gimple_assign_set_lhs (new_stmt, srcvar);
1079 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1080 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1081 }
1082 new_stmt = gimple_build_assign (destvar, srcvar);
1083 goto set_vop_and_replace;
1084 }
1085
1086 /* We get an aggregate copy. Use an unsigned char[] type to
1087 perform the copying to preserve padding and to avoid any issues
1088 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1089 desttype = build_array_type_nelts (unsigned_char_type_node,
1090 tree_to_uhwi (len));
1091 srctype = desttype;
1092 if (src_align > TYPE_ALIGN (srctype))
1093 srctype = build_aligned_type (srctype, src_align);
1094 if (dest_align > TYPE_ALIGN (desttype))
1095 desttype = build_aligned_type (desttype, dest_align);
1096 new_stmt
1097 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1098 fold_build2 (MEM_REF, srctype, src, off0));
1099 set_vop_and_replace:
1100 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1101 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1102 if (gimple_vdef (new_stmt)
1103 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1104 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1105 if (!lhs)
1106 {
1107 gsi_replace (gsi, new_stmt, false);
1108 return true;
1109 }
1110 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1111 }
1112
1113 done:
1114 gimple_seq stmts = NULL;
1115 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1116 len = NULL_TREE;
1117 else if (code == BUILT_IN_MEMPCPY)
1118 {
1119 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1120 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1121 TREE_TYPE (dest), dest, len);
1122 }
1123 else
1124 gcc_unreachable ();
1125
1126 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1127 gimple *repl = gimple_build_assign (lhs, dest);
1128 gsi_replace (gsi, repl, false);
1129 return true;
1130 }
1131
1132 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1133 to built-in memcmp (a, b, len). */
1134
1135 static bool
1136 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1137 {
1138 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1139
1140 if (!fn)
1141 return false;
1142
1143 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1144
1145 gimple *stmt = gsi_stmt (*gsi);
1146 tree a = gimple_call_arg (stmt, 0);
1147 tree b = gimple_call_arg (stmt, 1);
1148 tree len = gimple_call_arg (stmt, 2);
1149
1150 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1151 replace_call_with_call_and_fold (gsi, repl);
1152
1153 return true;
1154 }
1155
1156 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1157 to built-in memmove (dest, src, len). */
1158
1159 static bool
1160 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1161 {
1162 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1163
1164 if (!fn)
1165 return false;
1166
1167 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1168 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1169 len) into memmove (dest, src, len). */
1170
1171 gimple *stmt = gsi_stmt (*gsi);
1172 tree src = gimple_call_arg (stmt, 0);
1173 tree dest = gimple_call_arg (stmt, 1);
1174 tree len = gimple_call_arg (stmt, 2);
1175
1176 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1177 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1178 replace_call_with_call_and_fold (gsi, repl);
1179
1180 return true;
1181 }
1182
1183 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1184 to built-in memset (dest, 0, len). */
1185
1186 static bool
1187 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1188 {
1189 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1190
1191 if (!fn)
1192 return false;
1193
1194 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1195
1196 gimple *stmt = gsi_stmt (*gsi);
1197 tree dest = gimple_call_arg (stmt, 0);
1198 tree len = gimple_call_arg (stmt, 1);
1199
1200 gimple_seq seq = NULL;
1201 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1202 gimple_seq_add_stmt_without_update (&seq, repl);
1203 gsi_replace_with_seq_vops (gsi, seq);
1204 fold_stmt (gsi);
1205
1206 return true;
1207 }
1208
1209 /* Fold function call to builtin memset or bzero at *GSI setting the
1210 memory of size LEN to VAL. Return whether a simplification was made. */
1211
1212 static bool
1213 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1214 {
1215 gimple *stmt = gsi_stmt (*gsi);
1216 tree etype;
1217 unsigned HOST_WIDE_INT length, cval;
1218
1219 /* If the LEN parameter is zero, return DEST. */
1220 if (integer_zerop (len))
1221 {
1222 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1223 return true;
1224 }
1225
1226 if (! tree_fits_uhwi_p (len))
1227 return false;
1228
1229 if (TREE_CODE (c) != INTEGER_CST)
1230 return false;
1231
1232 tree dest = gimple_call_arg (stmt, 0);
1233 tree var = dest;
1234 if (TREE_CODE (var) != ADDR_EXPR)
1235 return false;
1236
1237 var = TREE_OPERAND (var, 0);
1238 if (TREE_THIS_VOLATILE (var))
1239 return false;
1240
1241 etype = TREE_TYPE (var);
1242 if (TREE_CODE (etype) == ARRAY_TYPE)
1243 etype = TREE_TYPE (etype);
1244
1245 if (!INTEGRAL_TYPE_P (etype)
1246 && !POINTER_TYPE_P (etype))
1247 return NULL_TREE;
1248
1249 if (! var_decl_component_p (var))
1250 return NULL_TREE;
1251
1252 length = tree_to_uhwi (len);
1253 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1254 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1255 return NULL_TREE;
1256
1257 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1258 return NULL_TREE;
1259
1260 if (integer_zerop (c))
1261 cval = 0;
1262 else
1263 {
1264 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1265 return NULL_TREE;
1266
1267 cval = TREE_INT_CST_LOW (c);
1268 cval &= 0xff;
1269 cval |= cval << 8;
1270 cval |= cval << 16;
1271 cval |= (cval << 31) << 1;
1272 }
1273
1274 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1275 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1276 gimple_set_vuse (store, gimple_vuse (stmt));
1277 tree vdef = gimple_vdef (stmt);
1278 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1279 {
1280 gimple_set_vdef (store, gimple_vdef (stmt));
1281 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1282 }
1283 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1284 if (gimple_call_lhs (stmt))
1285 {
1286 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1287 gsi_replace (gsi, asgn, false);
1288 }
1289 else
1290 {
1291 gimple_stmt_iterator gsi2 = *gsi;
1292 gsi_prev (gsi);
1293 gsi_remove (&gsi2, true);
1294 }
1295
1296 return true;
1297 }
1298
1299 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1300
1301 static bool
1302 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1303 c_strlen_data *pdata, unsigned eltsize)
1304 {
1305 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1306
1307 /* The length computed by this invocation of the function. */
1308 tree val = NULL_TREE;
1309
1310 /* True if VAL is an optimistic (tight) bound determined from
1311 the size of the character array in which the string may be
1312 stored. In that case, the computed VAL is used to set
1313 PDATA->MAXBOUND. */
1314 bool tight_bound = false;
1315
1316 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1317 if (TREE_CODE (arg) == ADDR_EXPR
1318 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1319 {
1320 tree op = TREE_OPERAND (arg, 0);
1321 if (integer_zerop (TREE_OPERAND (op, 1)))
1322 {
1323 tree aop0 = TREE_OPERAND (op, 0);
1324 if (TREE_CODE (aop0) == INDIRECT_REF
1325 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1326 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1327 pdata, eltsize);
1328 }
1329 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1330 && rkind == SRK_LENRANGE)
1331 {
1332 /* Fail if an array is the last member of a struct object
1333 since it could be treated as a (fake) flexible array
1334 member. */
1335 tree idx = TREE_OPERAND (op, 1);
1336
1337 arg = TREE_OPERAND (op, 0);
1338 tree optype = TREE_TYPE (arg);
1339 if (tree dom = TYPE_DOMAIN (optype))
1340 if (tree bound = TYPE_MAX_VALUE (dom))
1341 if (TREE_CODE (bound) == INTEGER_CST
1342 && TREE_CODE (idx) == INTEGER_CST
1343 && tree_int_cst_lt (bound, idx))
1344 return false;
1345 }
1346 }
1347
1348 if (rkind == SRK_INT_VALUE)
1349 {
1350 /* We are computing the maximum value (not string length). */
1351 val = arg;
1352 if (TREE_CODE (val) != INTEGER_CST
1353 || tree_int_cst_sgn (val) < 0)
1354 return false;
1355 }
1356 else
1357 {
1358 c_strlen_data lendata = { };
1359 val = c_strlen (arg, 1, &lendata, eltsize);
1360
1361 if (!val && lendata.decl)
1362 {
1363 /* ARG refers to an unterminated const character array.
1364 DATA.DECL with size DATA.LEN. */
1365 val = lendata.minlen;
1366 pdata->decl = lendata.decl;
1367 }
1368 }
1369
1370 if (!val && rkind == SRK_LENRANGE)
1371 {
1372 if (TREE_CODE (arg) == ADDR_EXPR)
1373 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1374 pdata, eltsize);
1375
1376 if (TREE_CODE (arg) == ARRAY_REF)
1377 {
1378 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1379
1380 /* Determine the "innermost" array type. */
1381 while (TREE_CODE (optype) == ARRAY_TYPE
1382 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1383 optype = TREE_TYPE (optype);
1384
1385 /* Avoid arrays of pointers. */
1386 tree eltype = TREE_TYPE (optype);
1387 if (TREE_CODE (optype) != ARRAY_TYPE
1388 || !INTEGRAL_TYPE_P (eltype))
1389 return false;
1390
1391 /* Fail when the array bound is unknown or zero. */
1392 val = TYPE_SIZE_UNIT (optype);
1393 if (!val || integer_zerop (val))
1394 return false;
1395
1396 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1397 integer_one_node);
1398
1399 /* Set the minimum size to zero since the string in
1400 the array could have zero length. */
1401 pdata->minlen = ssize_int (0);
1402
1403 tight_bound = true;
1404 }
1405 else if (TREE_CODE (arg) == COMPONENT_REF
1406 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1407 == ARRAY_TYPE))
1408 {
1409 /* Use the type of the member array to determine the upper
1410 bound on the length of the array. This may be overly
1411 optimistic if the array itself isn't NUL-terminated and
1412 the caller relies on the subsequent member to contain
1413 the NUL but that would only be considered valid if
1414 the array were the last member of a struct. */
1415
1416 tree fld = TREE_OPERAND (arg, 1);
1417
1418 tree optype = TREE_TYPE (fld);
1419
1420 /* Determine the "innermost" array type. */
1421 while (TREE_CODE (optype) == ARRAY_TYPE
1422 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1423 optype = TREE_TYPE (optype);
1424
1425 /* Fail when the array bound is unknown or zero. */
1426 val = TYPE_SIZE_UNIT (optype);
1427 if (!val || integer_zerop (val))
1428 return false;
1429 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1430 integer_one_node);
1431
1432 /* Set the minimum size to zero since the string in
1433 the array could have zero length. */
1434 pdata->minlen = ssize_int (0);
1435
1436 /* The array size determined above is an optimistic bound
1437 on the length. If the array isn't nul-terminated the
1438 length computed by the library function would be greater.
1439 Even though using strlen to cross the subobject boundary
1440 is undefined, avoid drawing conclusions from the member
1441 type about the length here. */
1442 tight_bound = true;
1443 }
1444 else if (VAR_P (arg))
1445 {
1446 /* Avoid handling pointers to arrays. GCC might misuse
1447 a pointer to an array of one bound to point to an array
1448 object of a greater bound. */
1449 tree argtype = TREE_TYPE (arg);
1450 if (TREE_CODE (argtype) == ARRAY_TYPE)
1451 {
1452 val = TYPE_SIZE_UNIT (argtype);
1453 if (!val
1454 || TREE_CODE (val) != INTEGER_CST
1455 || integer_zerop (val))
1456 return false;
1457 val = wide_int_to_tree (TREE_TYPE (val),
1458 wi::sub (wi::to_wide (val), 1));
1459
1460 /* Set the minimum size to zero since the string in
1461 the array could have zero length. */
1462 pdata->minlen = ssize_int (0);
1463 }
1464 }
1465 }
1466
1467 if (!val)
1468 return false;
1469
1470 /* Adjust the lower bound on the string length as necessary. */
1471 if (!pdata->minlen
1472 || (rkind != SRK_STRLEN
1473 && TREE_CODE (pdata->minlen) == INTEGER_CST
1474 && TREE_CODE (val) == INTEGER_CST
1475 && tree_int_cst_lt (val, pdata->minlen)))
1476 pdata->minlen = val;
1477
1478 if (pdata->maxbound)
1479 {
1480 /* Adjust the tighter (more optimistic) string length bound
1481 if necessary and proceed to adjust the more conservative
1482 bound. */
1483 if (TREE_CODE (val) == INTEGER_CST)
1484 {
1485 if (TREE_CODE (pdata->maxbound) == INTEGER_CST)
1486 {
1487 if (tree_int_cst_lt (pdata->maxbound, val))
1488 pdata->maxbound = val;
1489 }
1490 else
1491 pdata->maxbound = build_all_ones_cst (size_type_node);
1492 }
1493 else
1494 pdata->maxbound = val;
1495 }
1496 else
1497 pdata->maxbound = val;
1498
1499 if (tight_bound)
1500 {
1501 /* VAL computed above represents an optimistically tight bound
1502 on the length of the string based on the referenced object's
1503 or subobject's type. Determine the conservative upper bound
1504 based on the enclosing object's size if possible. */
1505 if (rkind == SRK_LENRANGE)
1506 {
1507 poly_int64 offset;
1508 tree base = get_addr_base_and_unit_offset (arg, &offset);
1509 if (!base)
1510 {
1511 /* When the call above fails due to a non-constant offset
1512 assume the offset is zero and use the size of the whole
1513 enclosing object instead. */
1514 base = get_base_address (arg);
1515 offset = 0;
1516 }
1517 /* If the base object is a pointer no upper bound on the length
1518 can be determined. Otherwise the maximum length is equal to
1519 the size of the enclosing object minus the offset of
1520 the referenced subobject minus 1 (for the terminating nul). */
1521 tree type = TREE_TYPE (base);
1522 if (TREE_CODE (type) == POINTER_TYPE
1523 || !VAR_P (base) || !(val = DECL_SIZE_UNIT (base)))
1524 val = build_all_ones_cst (size_type_node);
1525 else
1526 {
1527 val = DECL_SIZE_UNIT (base);
1528 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1529 size_int (offset + 1));
1530 }
1531 }
1532 else
1533 return false;
1534 }
1535
1536 if (pdata->maxlen)
1537 {
1538 /* Adjust the more conservative bound if possible/necessary
1539 and fail otherwise. */
1540 if (rkind != SRK_STRLEN)
1541 {
1542 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1543 || TREE_CODE (val) != INTEGER_CST)
1544 return false;
1545
1546 if (tree_int_cst_lt (pdata->maxlen, val))
1547 pdata->maxlen = val;
1548 return true;
1549 }
1550 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1551 {
1552 /* Fail if the length of this ARG is different from that
1553 previously determined from another ARG. */
1554 return false;
1555 }
1556 }
1557
1558 pdata->maxlen = val;
1559 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1560 }
1561
1562 /* For an ARG referencing one or more strings, try to obtain the range
1563 of their lengths, or the size of the largest array ARG referes to if
1564 the range of lengths cannot be determined, and store all in *PDATA.
1565 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1566 the maximum constant value.
1567 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1568 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1569 length or if we are unable to determine the length, return false.
1570 VISITED is a bitmap of visited variables.
1571 RKIND determines the kind of value or range to obtain (see
1572 strlen_range_kind).
1573 Set PDATA->DECL if ARG refers to an unterminated constant array.
1574 On input, set ELTSIZE to 1 for normal single byte character strings,
1575 and either 2 or 4 for wide characer strings (the size of wchar_t).
1576 Return true if *PDATA was successfully populated and false otherwise. */
1577
1578 static bool
1579 get_range_strlen (tree arg, bitmap *visited,
1580 strlen_range_kind rkind,
1581 c_strlen_data *pdata, unsigned eltsize)
1582 {
1583
1584 if (TREE_CODE (arg) != SSA_NAME)
1585 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1586
1587 /* If ARG is registered for SSA update we cannot look at its defining
1588 statement. */
1589 if (name_registered_for_update_p (arg))
1590 return false;
1591
1592 /* If we were already here, break the infinite cycle. */
1593 if (!*visited)
1594 *visited = BITMAP_ALLOC (NULL);
1595 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1596 return true;
1597
1598 tree var = arg;
1599 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1600
1601 switch (gimple_code (def_stmt))
1602 {
1603 case GIMPLE_ASSIGN:
1604 /* The RHS of the statement defining VAR must either have a
1605 constant length or come from another SSA_NAME with a constant
1606 length. */
1607 if (gimple_assign_single_p (def_stmt)
1608 || gimple_assign_unary_nop_p (def_stmt))
1609 {
1610 tree rhs = gimple_assign_rhs1 (def_stmt);
1611 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1612 }
1613 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1614 {
1615 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1616 gimple_assign_rhs3 (def_stmt) };
1617
1618 for (unsigned int i = 0; i < 2; i++)
1619 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1620 {
1621 if (rkind != SRK_LENRANGE)
1622 return false;
1623 /* Set the upper bound to the maximum to prevent
1624 it from being adjusted in the next iteration but
1625 leave MINLEN and the more conservative MAXBOUND
1626 determined so far alone (or leave them null if
1627 they haven't been set yet). That the MINLEN is
1628 in fact zero can be determined from MAXLEN being
1629 unbounded but the discovered minimum is used for
1630 diagnostics. */
1631 pdata->maxlen = build_all_ones_cst (size_type_node);
1632 }
1633 return true;
1634 }
1635 return false;
1636
1637 case GIMPLE_PHI:
1638 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1639 must have a constant length. */
1640 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1641 {
1642 tree arg = gimple_phi_arg (def_stmt, i)->def;
1643
1644 /* If this PHI has itself as an argument, we cannot
1645 determine the string length of this argument. However,
1646 if we can find a constant string length for the other
1647 PHI args then we can still be sure that this is a
1648 constant string length. So be optimistic and just
1649 continue with the next argument. */
1650 if (arg == gimple_phi_result (def_stmt))
1651 continue;
1652
1653 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1654 {
1655 if (rkind != SRK_LENRANGE)
1656 return false;
1657 /* Set the upper bound to the maximum to prevent
1658 it from being adjusted in the next iteration but
1659 leave MINLEN and the more conservative MAXBOUND
1660 determined so far alone (or leave them null if
1661 they haven't been set yet). That the MINLEN is
1662 in fact zero can be determined from MAXLEN being
1663 unbounded but the discovered minimum is used for
1664 diagnostics. */
1665 pdata->maxlen = build_all_ones_cst (size_type_node);
1666 }
1667 }
1668 return true;
1669
1670 default:
1671 return false;
1672 }
1673 }
1674
1675 /* Try to obtain the range of the lengths of the string(s) referenced
1676 by ARG, or the size of the largest array ARG refers to if the range
1677 of lengths cannot be determined, and store all in *PDATA. ELTSIZE
1678 is the expected size of the string element in bytes: 1 for char and
1679 some power of 2 for wide characters.
1680 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1681 for optimization. Returning false means that a nonzero PDATA->MINLEN
1682 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1683 is -1 (in that case, the actual range is indeterminate, i.e.,
1684 [0, PTRDIFF_MAX - 2]. */
1685
1686 bool
1687 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1688 {
1689 bitmap visited = NULL;
1690
1691 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1692 {
1693 /* On failure extend the length range to an impossible maximum
1694 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1695 members can stay unchanged regardless. */
1696 pdata->minlen = ssize_int (0);
1697 pdata->maxlen = build_all_ones_cst (size_type_node);
1698 }
1699 else if (!pdata->minlen)
1700 pdata->minlen = ssize_int (0);
1701
1702 /* Unless its null, leave the more conservative MAXBOUND unchanged. */
1703 if (!pdata->maxbound)
1704 pdata->maxbound = pdata->maxlen;
1705
1706 if (visited)
1707 BITMAP_FREE (visited);
1708
1709 return !integer_all_onesp (pdata->maxlen);
1710 }
1711
1712 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1713 For ARG of pointer types, NONSTR indicates if the caller is prepared
1714 to handle unterminated strings. For integer ARG and when RKIND ==
1715 SRK_INT_VALUE, NONSTR must be null.
1716
1717 If an unterminated array is discovered and our caller handles
1718 unterminated arrays, then bubble up the offending DECL and
1719 return the maximum size. Otherwise return NULL. */
1720
1721 static tree
1722 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1723 {
1724 /* A non-null NONSTR is meaningless when determining the maximum
1725 value of an integer ARG. */
1726 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1727 /* ARG must have an integral type when RKIND says so. */
1728 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1729
1730 bitmap visited = NULL;
1731
1732 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1733 is unbounded. */
1734 c_strlen_data lendata = { };
1735 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1736 lendata.maxlen = NULL_TREE;
1737 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1738 lendata.maxlen = NULL_TREE;
1739
1740 if (visited)
1741 BITMAP_FREE (visited);
1742
1743 if (nonstr)
1744 {
1745 /* For callers prepared to handle unterminated arrays set
1746 *NONSTR to point to the declaration of the array and return
1747 the maximum length/size. */
1748 *nonstr = lendata.decl;
1749 return lendata.maxlen;
1750 }
1751
1752 /* Fail if the constant array isn't nul-terminated. */
1753 return lendata.decl ? NULL_TREE : lendata.maxlen;
1754 }
1755
1756
1757 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1758 If LEN is not NULL, it represents the length of the string to be
1759 copied. Return NULL_TREE if no simplification can be made. */
1760
1761 static bool
1762 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1763 tree dest, tree src)
1764 {
1765 gimple *stmt = gsi_stmt (*gsi);
1766 location_t loc = gimple_location (stmt);
1767 tree fn;
1768
1769 /* If SRC and DEST are the same (and not volatile), return DEST. */
1770 if (operand_equal_p (src, dest, 0))
1771 {
1772 /* Issue -Wrestrict unless the pointers are null (those do
1773 not point to objects and so do not indicate an overlap;
1774 such calls could be the result of sanitization and jump
1775 threading). */
1776 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1777 {
1778 tree func = gimple_call_fndecl (stmt);
1779
1780 warning_at (loc, OPT_Wrestrict,
1781 "%qD source argument is the same as destination",
1782 func);
1783 }
1784
1785 replace_call_with_value (gsi, dest);
1786 return true;
1787 }
1788
1789 if (optimize_function_for_size_p (cfun))
1790 return false;
1791
1792 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1793 if (!fn)
1794 return false;
1795
1796 /* Set to non-null if ARG refers to an unterminated array. */
1797 tree nonstr = NULL;
1798 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1799
1800 if (nonstr)
1801 {
1802 /* Avoid folding calls with unterminated arrays. */
1803 if (!gimple_no_warning_p (stmt))
1804 warn_string_no_nul (loc, "strcpy", src, nonstr);
1805 gimple_set_no_warning (stmt, true);
1806 return false;
1807 }
1808
1809 if (!len)
1810 return false;
1811
1812 len = fold_convert_loc (loc, size_type_node, len);
1813 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1814 len = force_gimple_operand_gsi (gsi, len, true,
1815 NULL_TREE, true, GSI_SAME_STMT);
1816 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1817 replace_call_with_call_and_fold (gsi, repl);
1818 return true;
1819 }
1820
1821 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1822 If SLEN is not NULL, it represents the length of the source string.
1823 Return NULL_TREE if no simplification can be made. */
1824
1825 static bool
1826 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1827 tree dest, tree src, tree len)
1828 {
1829 gimple *stmt = gsi_stmt (*gsi);
1830 location_t loc = gimple_location (stmt);
1831 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1832
1833 /* If the LEN parameter is zero, return DEST. */
1834 if (integer_zerop (len))
1835 {
1836 /* Avoid warning if the destination refers to a an array/pointer
1837 decorate with attribute nonstring. */
1838 if (!nonstring)
1839 {
1840 tree fndecl = gimple_call_fndecl (stmt);
1841
1842 /* Warn about the lack of nul termination: the result is not
1843 a (nul-terminated) string. */
1844 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1845 if (slen && !integer_zerop (slen))
1846 warning_at (loc, OPT_Wstringop_truncation,
1847 "%G%qD destination unchanged after copying no bytes "
1848 "from a string of length %E",
1849 stmt, fndecl, slen);
1850 else
1851 warning_at (loc, OPT_Wstringop_truncation,
1852 "%G%qD destination unchanged after copying no bytes",
1853 stmt, fndecl);
1854 }
1855
1856 replace_call_with_value (gsi, dest);
1857 return true;
1858 }
1859
1860 /* We can't compare slen with len as constants below if len is not a
1861 constant. */
1862 if (TREE_CODE (len) != INTEGER_CST)
1863 return false;
1864
1865 /* Now, we must be passed a constant src ptr parameter. */
1866 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1867 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1868 return false;
1869
1870 /* The size of the source string including the terminating nul. */
1871 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1872
1873 /* We do not support simplification of this case, though we do
1874 support it when expanding trees into RTL. */
1875 /* FIXME: generate a call to __builtin_memset. */
1876 if (tree_int_cst_lt (ssize, len))
1877 return false;
1878
1879 /* Diagnose truncation that leaves the copy unterminated. */
1880 maybe_diag_stxncpy_trunc (*gsi, src, len);
1881
1882 /* OK transform into builtin memcpy. */
1883 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1884 if (!fn)
1885 return false;
1886
1887 len = fold_convert_loc (loc, size_type_node, len);
1888 len = force_gimple_operand_gsi (gsi, len, true,
1889 NULL_TREE, true, GSI_SAME_STMT);
1890 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1891 replace_call_with_call_and_fold (gsi, repl);
1892
1893 return true;
1894 }
1895
1896 /* Fold function call to builtin strchr or strrchr.
1897 If both arguments are constant, evaluate and fold the result,
1898 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1899 In general strlen is significantly faster than strchr
1900 due to being a simpler operation. */
1901 static bool
1902 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1903 {
1904 gimple *stmt = gsi_stmt (*gsi);
1905 tree str = gimple_call_arg (stmt, 0);
1906 tree c = gimple_call_arg (stmt, 1);
1907 location_t loc = gimple_location (stmt);
1908 const char *p;
1909 char ch;
1910
1911 if (!gimple_call_lhs (stmt))
1912 return false;
1913
1914 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1915 {
1916 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1917
1918 if (p1 == NULL)
1919 {
1920 replace_call_with_value (gsi, integer_zero_node);
1921 return true;
1922 }
1923
1924 tree len = build_int_cst (size_type_node, p1 - p);
1925 gimple_seq stmts = NULL;
1926 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1927 POINTER_PLUS_EXPR, str, len);
1928 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1929 gsi_replace_with_seq_vops (gsi, stmts);
1930 return true;
1931 }
1932
1933 if (!integer_zerop (c))
1934 return false;
1935
1936 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1937 if (is_strrchr && optimize_function_for_size_p (cfun))
1938 {
1939 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1940
1941 if (strchr_fn)
1942 {
1943 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1944 replace_call_with_call_and_fold (gsi, repl);
1945 return true;
1946 }
1947
1948 return false;
1949 }
1950
1951 tree len;
1952 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1953
1954 if (!strlen_fn)
1955 return false;
1956
1957 /* Create newstr = strlen (str). */
1958 gimple_seq stmts = NULL;
1959 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1960 gimple_set_location (new_stmt, loc);
1961 len = create_tmp_reg_or_ssa_name (size_type_node);
1962 gimple_call_set_lhs (new_stmt, len);
1963 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1964
1965 /* Create (str p+ strlen (str)). */
1966 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1967 POINTER_PLUS_EXPR, str, len);
1968 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1969 gsi_replace_with_seq_vops (gsi, stmts);
1970 /* gsi now points at the assignment to the lhs, get a
1971 stmt iterator to the strlen.
1972 ??? We can't use gsi_for_stmt as that doesn't work when the
1973 CFG isn't built yet. */
1974 gimple_stmt_iterator gsi2 = *gsi;
1975 gsi_prev (&gsi2);
1976 fold_stmt (&gsi2);
1977 return true;
1978 }
1979
1980 /* Fold function call to builtin strstr.
1981 If both arguments are constant, evaluate and fold the result,
1982 additionally fold strstr (x, "") into x and strstr (x, "c")
1983 into strchr (x, 'c'). */
1984 static bool
1985 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1986 {
1987 gimple *stmt = gsi_stmt (*gsi);
1988 tree haystack = gimple_call_arg (stmt, 0);
1989 tree needle = gimple_call_arg (stmt, 1);
1990 const char *p, *q;
1991
1992 if (!gimple_call_lhs (stmt))
1993 return false;
1994
1995 q = c_getstr (needle);
1996 if (q == NULL)
1997 return false;
1998
1999 if ((p = c_getstr (haystack)))
2000 {
2001 const char *r = strstr (p, q);
2002
2003 if (r == NULL)
2004 {
2005 replace_call_with_value (gsi, integer_zero_node);
2006 return true;
2007 }
2008
2009 tree len = build_int_cst (size_type_node, r - p);
2010 gimple_seq stmts = NULL;
2011 gimple *new_stmt
2012 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2013 haystack, len);
2014 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2015 gsi_replace_with_seq_vops (gsi, stmts);
2016 return true;
2017 }
2018
2019 /* For strstr (x, "") return x. */
2020 if (q[0] == '\0')
2021 {
2022 replace_call_with_value (gsi, haystack);
2023 return true;
2024 }
2025
2026 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2027 if (q[1] == '\0')
2028 {
2029 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2030 if (strchr_fn)
2031 {
2032 tree c = build_int_cst (integer_type_node, q[0]);
2033 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2034 replace_call_with_call_and_fold (gsi, repl);
2035 return true;
2036 }
2037 }
2038
2039 return false;
2040 }
2041
2042 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2043 to the call.
2044
2045 Return NULL_TREE if no simplification was possible, otherwise return the
2046 simplified form of the call as a tree.
2047
2048 The simplified form may be a constant or other expression which
2049 computes the same value, but in a more efficient manner (including
2050 calls to other builtin functions).
2051
2052 The call may contain arguments which need to be evaluated, but
2053 which are not useful to determine the result of the call. In
2054 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2055 COMPOUND_EXPR will be an argument which must be evaluated.
2056 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2057 COMPOUND_EXPR in the chain will contain the tree for the simplified
2058 form of the builtin function call. */
2059
2060 static bool
2061 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2062 {
2063 gimple *stmt = gsi_stmt (*gsi);
2064 location_t loc = gimple_location (stmt);
2065
2066 const char *p = c_getstr (src);
2067
2068 /* If the string length is zero, return the dst parameter. */
2069 if (p && *p == '\0')
2070 {
2071 replace_call_with_value (gsi, dst);
2072 return true;
2073 }
2074
2075 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2076 return false;
2077
2078 /* See if we can store by pieces into (dst + strlen(dst)). */
2079 tree newdst;
2080 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2081 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2082
2083 if (!strlen_fn || !memcpy_fn)
2084 return false;
2085
2086 /* If the length of the source string isn't computable don't
2087 split strcat into strlen and memcpy. */
2088 tree len = get_maxval_strlen (src, SRK_STRLEN);
2089 if (! len)
2090 return false;
2091
2092 /* Create strlen (dst). */
2093 gimple_seq stmts = NULL, stmts2;
2094 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2095 gimple_set_location (repl, loc);
2096 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2097 gimple_call_set_lhs (repl, newdst);
2098 gimple_seq_add_stmt_without_update (&stmts, repl);
2099
2100 /* Create (dst p+ strlen (dst)). */
2101 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2102 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2103 gimple_seq_add_seq_without_update (&stmts, stmts2);
2104
2105 len = fold_convert_loc (loc, size_type_node, len);
2106 len = size_binop_loc (loc, PLUS_EXPR, len,
2107 build_int_cst (size_type_node, 1));
2108 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2109 gimple_seq_add_seq_without_update (&stmts, stmts2);
2110
2111 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2112 gimple_seq_add_stmt_without_update (&stmts, repl);
2113 if (gimple_call_lhs (stmt))
2114 {
2115 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2116 gimple_seq_add_stmt_without_update (&stmts, repl);
2117 gsi_replace_with_seq_vops (gsi, stmts);
2118 /* gsi now points at the assignment to the lhs, get a
2119 stmt iterator to the memcpy call.
2120 ??? We can't use gsi_for_stmt as that doesn't work when the
2121 CFG isn't built yet. */
2122 gimple_stmt_iterator gsi2 = *gsi;
2123 gsi_prev (&gsi2);
2124 fold_stmt (&gsi2);
2125 }
2126 else
2127 {
2128 gsi_replace_with_seq_vops (gsi, stmts);
2129 fold_stmt (gsi);
2130 }
2131 return true;
2132 }
2133
2134 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2135 are the arguments to the call. */
2136
2137 static bool
2138 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2139 {
2140 gimple *stmt = gsi_stmt (*gsi);
2141 tree dest = gimple_call_arg (stmt, 0);
2142 tree src = gimple_call_arg (stmt, 1);
2143 tree size = gimple_call_arg (stmt, 2);
2144 tree fn;
2145 const char *p;
2146
2147
2148 p = c_getstr (src);
2149 /* If the SRC parameter is "", return DEST. */
2150 if (p && *p == '\0')
2151 {
2152 replace_call_with_value (gsi, dest);
2153 return true;
2154 }
2155
2156 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2157 return false;
2158
2159 /* If __builtin_strcat_chk is used, assume strcat is available. */
2160 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2161 if (!fn)
2162 return false;
2163
2164 gimple *repl = gimple_build_call (fn, 2, dest, src);
2165 replace_call_with_call_and_fold (gsi, repl);
2166 return true;
2167 }
2168
2169 /* Simplify a call to the strncat builtin. */
2170
2171 static bool
2172 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2173 {
2174 gimple *stmt = gsi_stmt (*gsi);
2175 tree dst = gimple_call_arg (stmt, 0);
2176 tree src = gimple_call_arg (stmt, 1);
2177 tree len = gimple_call_arg (stmt, 2);
2178
2179 const char *p = c_getstr (src);
2180
2181 /* If the requested length is zero, or the src parameter string
2182 length is zero, return the dst parameter. */
2183 if (integer_zerop (len) || (p && *p == '\0'))
2184 {
2185 replace_call_with_value (gsi, dst);
2186 return true;
2187 }
2188
2189 if (TREE_CODE (len) != INTEGER_CST || !p)
2190 return false;
2191
2192 unsigned srclen = strlen (p);
2193
2194 int cmpsrc = compare_tree_int (len, srclen);
2195
2196 /* Return early if the requested len is less than the string length.
2197 Warnings will be issued elsewhere later. */
2198 if (cmpsrc < 0)
2199 return false;
2200
2201 unsigned HOST_WIDE_INT dstsize;
2202
2203 bool nowarn = gimple_no_warning_p (stmt);
2204
2205 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2206 {
2207 int cmpdst = compare_tree_int (len, dstsize);
2208
2209 if (cmpdst >= 0)
2210 {
2211 tree fndecl = gimple_call_fndecl (stmt);
2212
2213 /* Strncat copies (at most) LEN bytes and always appends
2214 the terminating NUL so the specified bound should never
2215 be equal to (or greater than) the size of the destination.
2216 If it is, the copy could overflow. */
2217 location_t loc = gimple_location (stmt);
2218 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2219 cmpdst == 0
2220 ? G_("%G%qD specified bound %E equals "
2221 "destination size")
2222 : G_("%G%qD specified bound %E exceeds "
2223 "destination size %wu"),
2224 stmt, fndecl, len, dstsize);
2225 if (nowarn)
2226 gimple_set_no_warning (stmt, true);
2227 }
2228 }
2229
2230 if (!nowarn && cmpsrc == 0)
2231 {
2232 tree fndecl = gimple_call_fndecl (stmt);
2233 location_t loc = gimple_location (stmt);
2234
2235 /* To avoid possible overflow the specified bound should also
2236 not be equal to the length of the source, even when the size
2237 of the destination is unknown (it's not an uncommon mistake
2238 to specify as the bound to strncpy the length of the source). */
2239 if (warning_at (loc, OPT_Wstringop_overflow_,
2240 "%G%qD specified bound %E equals source length",
2241 stmt, fndecl, len))
2242 gimple_set_no_warning (stmt, true);
2243 }
2244
2245 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2246
2247 /* If the replacement _DECL isn't initialized, don't do the
2248 transformation. */
2249 if (!fn)
2250 return false;
2251
2252 /* Otherwise, emit a call to strcat. */
2253 gcall *repl = gimple_build_call (fn, 2, dst, src);
2254 replace_call_with_call_and_fold (gsi, repl);
2255 return true;
2256 }
2257
2258 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2259 LEN, and SIZE. */
2260
2261 static bool
2262 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2263 {
2264 gimple *stmt = gsi_stmt (*gsi);
2265 tree dest = gimple_call_arg (stmt, 0);
2266 tree src = gimple_call_arg (stmt, 1);
2267 tree len = gimple_call_arg (stmt, 2);
2268 tree size = gimple_call_arg (stmt, 3);
2269 tree fn;
2270 const char *p;
2271
2272 p = c_getstr (src);
2273 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2274 if ((p && *p == '\0')
2275 || integer_zerop (len))
2276 {
2277 replace_call_with_value (gsi, dest);
2278 return true;
2279 }
2280
2281 if (! tree_fits_uhwi_p (size))
2282 return false;
2283
2284 if (! integer_all_onesp (size))
2285 {
2286 tree src_len = c_strlen (src, 1);
2287 if (src_len
2288 && tree_fits_uhwi_p (src_len)
2289 && tree_fits_uhwi_p (len)
2290 && ! tree_int_cst_lt (len, src_len))
2291 {
2292 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2293 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2294 if (!fn)
2295 return false;
2296
2297 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2298 replace_call_with_call_and_fold (gsi, repl);
2299 return true;
2300 }
2301 return false;
2302 }
2303
2304 /* If __builtin_strncat_chk is used, assume strncat is available. */
2305 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2306 if (!fn)
2307 return false;
2308
2309 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2310 replace_call_with_call_and_fold (gsi, repl);
2311 return true;
2312 }
2313
2314 /* Build and append gimple statements to STMTS that would load a first
2315 character of a memory location identified by STR. LOC is location
2316 of the statement. */
2317
2318 static tree
2319 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2320 {
2321 tree var;
2322
2323 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2324 tree cst_uchar_ptr_node
2325 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2326 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2327
2328 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2329 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2330 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2331
2332 gimple_assign_set_lhs (stmt, var);
2333 gimple_seq_add_stmt_without_update (stmts, stmt);
2334
2335 return var;
2336 }
2337
2338 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2339 FCODE is the name of the builtin. */
2340
2341 static bool
2342 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2343 {
2344 gimple *stmt = gsi_stmt (*gsi);
2345 tree callee = gimple_call_fndecl (stmt);
2346 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2347
2348 tree type = integer_type_node;
2349 tree str1 = gimple_call_arg (stmt, 0);
2350 tree str2 = gimple_call_arg (stmt, 1);
2351 tree lhs = gimple_call_lhs (stmt);
2352 HOST_WIDE_INT length = -1;
2353
2354 /* Handle strncmp and strncasecmp functions. */
2355 if (gimple_call_num_args (stmt) == 3)
2356 {
2357 tree len = gimple_call_arg (stmt, 2);
2358 if (tree_fits_uhwi_p (len))
2359 length = tree_to_uhwi (len);
2360 }
2361
2362 /* If the LEN parameter is zero, return zero. */
2363 if (length == 0)
2364 {
2365 replace_call_with_value (gsi, integer_zero_node);
2366 return true;
2367 }
2368
2369 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2370 if (operand_equal_p (str1, str2, 0))
2371 {
2372 replace_call_with_value (gsi, integer_zero_node);
2373 return true;
2374 }
2375
2376 const char *p1 = c_getstr (str1);
2377 const char *p2 = c_getstr (str2);
2378
2379 /* For known strings, return an immediate value. */
2380 if (p1 && p2)
2381 {
2382 int r = 0;
2383 bool known_result = false;
2384
2385 switch (fcode)
2386 {
2387 case BUILT_IN_STRCMP:
2388 case BUILT_IN_STRCMP_EQ:
2389 {
2390 r = strcmp (p1, p2);
2391 known_result = true;
2392 break;
2393 }
2394 case BUILT_IN_STRNCMP:
2395 case BUILT_IN_STRNCMP_EQ:
2396 {
2397 if (length == -1)
2398 break;
2399 r = strncmp (p1, p2, length);
2400 known_result = true;
2401 break;
2402 }
2403 /* Only handleable situation is where the string are equal (result 0),
2404 which is already handled by operand_equal_p case. */
2405 case BUILT_IN_STRCASECMP:
2406 break;
2407 case BUILT_IN_STRNCASECMP:
2408 {
2409 if (length == -1)
2410 break;
2411 r = strncmp (p1, p2, length);
2412 if (r == 0)
2413 known_result = true;
2414 break;
2415 }
2416 default:
2417 gcc_unreachable ();
2418 }
2419
2420 if (known_result)
2421 {
2422 replace_call_with_value (gsi, build_cmp_result (type, r));
2423 return true;
2424 }
2425 }
2426
2427 bool nonzero_length = length >= 1
2428 || fcode == BUILT_IN_STRCMP
2429 || fcode == BUILT_IN_STRCMP_EQ
2430 || fcode == BUILT_IN_STRCASECMP;
2431
2432 location_t loc = gimple_location (stmt);
2433
2434 /* If the second arg is "", return *(const unsigned char*)arg1. */
2435 if (p2 && *p2 == '\0' && nonzero_length)
2436 {
2437 gimple_seq stmts = NULL;
2438 tree var = gimple_load_first_char (loc, str1, &stmts);
2439 if (lhs)
2440 {
2441 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2442 gimple_seq_add_stmt_without_update (&stmts, stmt);
2443 }
2444
2445 gsi_replace_with_seq_vops (gsi, stmts);
2446 return true;
2447 }
2448
2449 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2450 if (p1 && *p1 == '\0' && nonzero_length)
2451 {
2452 gimple_seq stmts = NULL;
2453 tree var = gimple_load_first_char (loc, str2, &stmts);
2454
2455 if (lhs)
2456 {
2457 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2458 stmt = gimple_build_assign (c, NOP_EXPR, var);
2459 gimple_seq_add_stmt_without_update (&stmts, stmt);
2460
2461 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2462 gimple_seq_add_stmt_without_update (&stmts, stmt);
2463 }
2464
2465 gsi_replace_with_seq_vops (gsi, stmts);
2466 return true;
2467 }
2468
2469 /* If len parameter is one, return an expression corresponding to
2470 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2471 if (fcode == BUILT_IN_STRNCMP && length == 1)
2472 {
2473 gimple_seq stmts = NULL;
2474 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2475 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2476
2477 if (lhs)
2478 {
2479 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2480 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2481 gimple_seq_add_stmt_without_update (&stmts, convert1);
2482
2483 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2484 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2485 gimple_seq_add_stmt_without_update (&stmts, convert2);
2486
2487 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2488 gimple_seq_add_stmt_without_update (&stmts, stmt);
2489 }
2490
2491 gsi_replace_with_seq_vops (gsi, stmts);
2492 return true;
2493 }
2494
2495 /* If length is larger than the length of one constant string,
2496 replace strncmp with corresponding strcmp */
2497 if (fcode == BUILT_IN_STRNCMP
2498 && length > 0
2499 && ((p2 && (size_t) length > strlen (p2))
2500 || (p1 && (size_t) length > strlen (p1))))
2501 {
2502 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2503 if (!fn)
2504 return false;
2505 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2506 replace_call_with_call_and_fold (gsi, repl);
2507 return true;
2508 }
2509
2510 return false;
2511 }
2512
2513 /* Fold a call to the memchr pointed by GSI iterator. */
2514
2515 static bool
2516 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2517 {
2518 gimple *stmt = gsi_stmt (*gsi);
2519 tree lhs = gimple_call_lhs (stmt);
2520 tree arg1 = gimple_call_arg (stmt, 0);
2521 tree arg2 = gimple_call_arg (stmt, 1);
2522 tree len = gimple_call_arg (stmt, 2);
2523
2524 /* If the LEN parameter is zero, return zero. */
2525 if (integer_zerop (len))
2526 {
2527 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2528 return true;
2529 }
2530
2531 char c;
2532 if (TREE_CODE (arg2) != INTEGER_CST
2533 || !tree_fits_uhwi_p (len)
2534 || !target_char_cst_p (arg2, &c))
2535 return false;
2536
2537 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2538 unsigned HOST_WIDE_INT string_length;
2539 const char *p1 = c_getstr (arg1, &string_length);
2540
2541 if (p1)
2542 {
2543 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2544 if (r == NULL)
2545 {
2546 if (length <= string_length)
2547 {
2548 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2549 return true;
2550 }
2551 }
2552 else
2553 {
2554 unsigned HOST_WIDE_INT offset = r - p1;
2555 gimple_seq stmts = NULL;
2556 if (lhs != NULL_TREE)
2557 {
2558 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2559 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2560 arg1, offset_cst);
2561 gimple_seq_add_stmt_without_update (&stmts, stmt);
2562 }
2563 else
2564 gimple_seq_add_stmt_without_update (&stmts,
2565 gimple_build_nop ());
2566
2567 gsi_replace_with_seq_vops (gsi, stmts);
2568 return true;
2569 }
2570 }
2571
2572 return false;
2573 }
2574
2575 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2576 to the call. IGNORE is true if the value returned
2577 by the builtin will be ignored. UNLOCKED is true is true if this
2578 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2579 the known length of the string. Return NULL_TREE if no simplification
2580 was possible. */
2581
2582 static bool
2583 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2584 tree arg0, tree arg1,
2585 bool unlocked)
2586 {
2587 gimple *stmt = gsi_stmt (*gsi);
2588
2589 /* If we're using an unlocked function, assume the other unlocked
2590 functions exist explicitly. */
2591 tree const fn_fputc = (unlocked
2592 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2593 : builtin_decl_implicit (BUILT_IN_FPUTC));
2594 tree const fn_fwrite = (unlocked
2595 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2596 : builtin_decl_implicit (BUILT_IN_FWRITE));
2597
2598 /* If the return value is used, don't do the transformation. */
2599 if (gimple_call_lhs (stmt))
2600 return false;
2601
2602 /* Get the length of the string passed to fputs. If the length
2603 can't be determined, punt. */
2604 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2605 if (!len
2606 || TREE_CODE (len) != INTEGER_CST)
2607 return false;
2608
2609 switch (compare_tree_int (len, 1))
2610 {
2611 case -1: /* length is 0, delete the call entirely . */
2612 replace_call_with_value (gsi, integer_zero_node);
2613 return true;
2614
2615 case 0: /* length is 1, call fputc. */
2616 {
2617 const char *p = c_getstr (arg0);
2618 if (p != NULL)
2619 {
2620 if (!fn_fputc)
2621 return false;
2622
2623 gimple *repl = gimple_build_call (fn_fputc, 2,
2624 build_int_cst
2625 (integer_type_node, p[0]), arg1);
2626 replace_call_with_call_and_fold (gsi, repl);
2627 return true;
2628 }
2629 }
2630 /* FALLTHROUGH */
2631 case 1: /* length is greater than 1, call fwrite. */
2632 {
2633 /* If optimizing for size keep fputs. */
2634 if (optimize_function_for_size_p (cfun))
2635 return false;
2636 /* New argument list transforming fputs(string, stream) to
2637 fwrite(string, 1, len, stream). */
2638 if (!fn_fwrite)
2639 return false;
2640
2641 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2642 size_one_node, len, arg1);
2643 replace_call_with_call_and_fold (gsi, repl);
2644 return true;
2645 }
2646 default:
2647 gcc_unreachable ();
2648 }
2649 return false;
2650 }
2651
2652 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2653 DEST, SRC, LEN, and SIZE are the arguments to the call.
2654 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2655 code of the builtin. If MAXLEN is not NULL, it is maximum length
2656 passed as third argument. */
2657
2658 static bool
2659 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2660 tree dest, tree src, tree len, tree size,
2661 enum built_in_function fcode)
2662 {
2663 gimple *stmt = gsi_stmt (*gsi);
2664 location_t loc = gimple_location (stmt);
2665 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2666 tree fn;
2667
2668 /* If SRC and DEST are the same (and not volatile), return DEST
2669 (resp. DEST+LEN for __mempcpy_chk). */
2670 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2671 {
2672 if (fcode != BUILT_IN_MEMPCPY_CHK)
2673 {
2674 replace_call_with_value (gsi, dest);
2675 return true;
2676 }
2677 else
2678 {
2679 gimple_seq stmts = NULL;
2680 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2681 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2682 TREE_TYPE (dest), dest, len);
2683 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2684 replace_call_with_value (gsi, temp);
2685 return true;
2686 }
2687 }
2688
2689 if (! tree_fits_uhwi_p (size))
2690 return false;
2691
2692 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2693 if (! integer_all_onesp (size))
2694 {
2695 if (! tree_fits_uhwi_p (len))
2696 {
2697 /* If LEN is not constant, try MAXLEN too.
2698 For MAXLEN only allow optimizing into non-_ocs function
2699 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2700 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2701 {
2702 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2703 {
2704 /* (void) __mempcpy_chk () can be optimized into
2705 (void) __memcpy_chk (). */
2706 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2707 if (!fn)
2708 return false;
2709
2710 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2711 replace_call_with_call_and_fold (gsi, repl);
2712 return true;
2713 }
2714 return false;
2715 }
2716 }
2717 else
2718 maxlen = len;
2719
2720 if (tree_int_cst_lt (size, maxlen))
2721 return false;
2722 }
2723
2724 fn = NULL_TREE;
2725 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2726 mem{cpy,pcpy,move,set} is available. */
2727 switch (fcode)
2728 {
2729 case BUILT_IN_MEMCPY_CHK:
2730 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2731 break;
2732 case BUILT_IN_MEMPCPY_CHK:
2733 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2734 break;
2735 case BUILT_IN_MEMMOVE_CHK:
2736 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2737 break;
2738 case BUILT_IN_MEMSET_CHK:
2739 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2740 break;
2741 default:
2742 break;
2743 }
2744
2745 if (!fn)
2746 return false;
2747
2748 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2749 replace_call_with_call_and_fold (gsi, repl);
2750 return true;
2751 }
2752
2753 /* Fold a call to the __st[rp]cpy_chk builtin.
2754 DEST, SRC, and SIZE are the arguments to the call.
2755 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2756 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2757 strings passed as second argument. */
2758
2759 static bool
2760 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2761 tree dest,
2762 tree src, tree size,
2763 enum built_in_function fcode)
2764 {
2765 gimple *stmt = gsi_stmt (*gsi);
2766 location_t loc = gimple_location (stmt);
2767 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2768 tree len, fn;
2769
2770 /* If SRC and DEST are the same (and not volatile), return DEST. */
2771 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2772 {
2773 /* Issue -Wrestrict unless the pointers are null (those do
2774 not point to objects and so do not indicate an overlap;
2775 such calls could be the result of sanitization and jump
2776 threading). */
2777 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2778 {
2779 tree func = gimple_call_fndecl (stmt);
2780
2781 warning_at (loc, OPT_Wrestrict,
2782 "%qD source argument is the same as destination",
2783 func);
2784 }
2785
2786 replace_call_with_value (gsi, dest);
2787 return true;
2788 }
2789
2790 if (! tree_fits_uhwi_p (size))
2791 return false;
2792
2793 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2794 if (! integer_all_onesp (size))
2795 {
2796 len = c_strlen (src, 1);
2797 if (! len || ! tree_fits_uhwi_p (len))
2798 {
2799 /* If LEN is not constant, try MAXLEN too.
2800 For MAXLEN only allow optimizing into non-_ocs function
2801 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2802 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2803 {
2804 if (fcode == BUILT_IN_STPCPY_CHK)
2805 {
2806 if (! ignore)
2807 return false;
2808
2809 /* If return value of __stpcpy_chk is ignored,
2810 optimize into __strcpy_chk. */
2811 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2812 if (!fn)
2813 return false;
2814
2815 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2816 replace_call_with_call_and_fold (gsi, repl);
2817 return true;
2818 }
2819
2820 if (! len || TREE_SIDE_EFFECTS (len))
2821 return false;
2822
2823 /* If c_strlen returned something, but not a constant,
2824 transform __strcpy_chk into __memcpy_chk. */
2825 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2826 if (!fn)
2827 return false;
2828
2829 gimple_seq stmts = NULL;
2830 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2831 len = gimple_convert (&stmts, loc, size_type_node, len);
2832 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2833 build_int_cst (size_type_node, 1));
2834 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2835 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2836 replace_call_with_call_and_fold (gsi, repl);
2837 return true;
2838 }
2839 }
2840 else
2841 maxlen = len;
2842
2843 if (! tree_int_cst_lt (maxlen, size))
2844 return false;
2845 }
2846
2847 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2848 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2849 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2850 if (!fn)
2851 return false;
2852
2853 gimple *repl = gimple_build_call (fn, 2, dest, src);
2854 replace_call_with_call_and_fold (gsi, repl);
2855 return true;
2856 }
2857
2858 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2859 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2860 length passed as third argument. IGNORE is true if return value can be
2861 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2862
2863 static bool
2864 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2865 tree dest, tree src,
2866 tree len, tree size,
2867 enum built_in_function fcode)
2868 {
2869 gimple *stmt = gsi_stmt (*gsi);
2870 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2871 tree fn;
2872
2873 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2874 {
2875 /* If return value of __stpncpy_chk is ignored,
2876 optimize into __strncpy_chk. */
2877 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2878 if (fn)
2879 {
2880 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2881 replace_call_with_call_and_fold (gsi, repl);
2882 return true;
2883 }
2884 }
2885
2886 if (! tree_fits_uhwi_p (size))
2887 return false;
2888
2889 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2890 if (! integer_all_onesp (size))
2891 {
2892 if (! tree_fits_uhwi_p (len))
2893 {
2894 /* If LEN is not constant, try MAXLEN too.
2895 For MAXLEN only allow optimizing into non-_ocs function
2896 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2897 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2898 return false;
2899 }
2900 else
2901 maxlen = len;
2902
2903 if (tree_int_cst_lt (size, maxlen))
2904 return false;
2905 }
2906
2907 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2908 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2909 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2910 if (!fn)
2911 return false;
2912
2913 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2914 replace_call_with_call_and_fold (gsi, repl);
2915 return true;
2916 }
2917
2918 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2919 Return NULL_TREE if no simplification can be made. */
2920
2921 static bool
2922 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2923 {
2924 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2925 location_t loc = gimple_location (stmt);
2926 tree dest = gimple_call_arg (stmt, 0);
2927 tree src = gimple_call_arg (stmt, 1);
2928 tree fn, lenp1;
2929
2930 /* If the result is unused, replace stpcpy with strcpy. */
2931 if (gimple_call_lhs (stmt) == NULL_TREE)
2932 {
2933 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2934 if (!fn)
2935 return false;
2936 gimple_call_set_fndecl (stmt, fn);
2937 fold_stmt (gsi);
2938 return true;
2939 }
2940
2941 /* Set to non-null if ARG refers to an unterminated array. */
2942 c_strlen_data data = { };
2943 tree len = c_strlen (src, 1, &data, 1);
2944 if (!len
2945 || TREE_CODE (len) != INTEGER_CST)
2946 {
2947 data.decl = unterminated_array (src);
2948 if (!data.decl)
2949 return false;
2950 }
2951
2952 if (data.decl)
2953 {
2954 /* Avoid folding calls with unterminated arrays. */
2955 if (!gimple_no_warning_p (stmt))
2956 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2957 gimple_set_no_warning (stmt, true);
2958 return false;
2959 }
2960
2961 if (optimize_function_for_size_p (cfun)
2962 /* If length is zero it's small enough. */
2963 && !integer_zerop (len))
2964 return false;
2965
2966 /* If the source has a known length replace stpcpy with memcpy. */
2967 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2968 if (!fn)
2969 return false;
2970
2971 gimple_seq stmts = NULL;
2972 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2973 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2974 tem, build_int_cst (size_type_node, 1));
2975 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2976 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2977 gimple_set_vuse (repl, gimple_vuse (stmt));
2978 gimple_set_vdef (repl, gimple_vdef (stmt));
2979 if (gimple_vdef (repl)
2980 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2981 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2982 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2983 /* Replace the result with dest + len. */
2984 stmts = NULL;
2985 tem = gimple_convert (&stmts, loc, sizetype, len);
2986 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2987 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2988 POINTER_PLUS_EXPR, dest, tem);
2989 gsi_replace (gsi, ret, false);
2990 /* Finally fold the memcpy call. */
2991 gimple_stmt_iterator gsi2 = *gsi;
2992 gsi_prev (&gsi2);
2993 fold_stmt (&gsi2);
2994 return true;
2995 }
2996
2997 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2998 NULL_TREE if a normal call should be emitted rather than expanding
2999 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3000 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3001 passed as second argument. */
3002
3003 static bool
3004 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3005 enum built_in_function fcode)
3006 {
3007 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3008 tree dest, size, len, fn, fmt, flag;
3009 const char *fmt_str;
3010
3011 /* Verify the required arguments in the original call. */
3012 if (gimple_call_num_args (stmt) < 5)
3013 return false;
3014
3015 dest = gimple_call_arg (stmt, 0);
3016 len = gimple_call_arg (stmt, 1);
3017 flag = gimple_call_arg (stmt, 2);
3018 size = gimple_call_arg (stmt, 3);
3019 fmt = gimple_call_arg (stmt, 4);
3020
3021 if (! tree_fits_uhwi_p (size))
3022 return false;
3023
3024 if (! integer_all_onesp (size))
3025 {
3026 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3027 if (! tree_fits_uhwi_p (len))
3028 {
3029 /* If LEN is not constant, try MAXLEN too.
3030 For MAXLEN only allow optimizing into non-_ocs function
3031 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3032 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3033 return false;
3034 }
3035 else
3036 maxlen = len;
3037
3038 if (tree_int_cst_lt (size, maxlen))
3039 return false;
3040 }
3041
3042 if (!init_target_chars ())
3043 return false;
3044
3045 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3046 or if format doesn't contain % chars or is "%s". */
3047 if (! integer_zerop (flag))
3048 {
3049 fmt_str = c_getstr (fmt);
3050 if (fmt_str == NULL)
3051 return false;
3052 if (strchr (fmt_str, target_percent) != NULL
3053 && strcmp (fmt_str, target_percent_s))
3054 return false;
3055 }
3056
3057 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3058 available. */
3059 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3060 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3061 if (!fn)
3062 return false;
3063
3064 /* Replace the called function and the first 5 argument by 3 retaining
3065 trailing varargs. */
3066 gimple_call_set_fndecl (stmt, fn);
3067 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3068 gimple_call_set_arg (stmt, 0, dest);
3069 gimple_call_set_arg (stmt, 1, len);
3070 gimple_call_set_arg (stmt, 2, fmt);
3071 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3072 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3073 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3074 fold_stmt (gsi);
3075 return true;
3076 }
3077
3078 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3079 Return NULL_TREE if a normal call should be emitted rather than
3080 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3081 or BUILT_IN_VSPRINTF_CHK. */
3082
3083 static bool
3084 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3085 enum built_in_function fcode)
3086 {
3087 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3088 tree dest, size, len, fn, fmt, flag;
3089 const char *fmt_str;
3090 unsigned nargs = gimple_call_num_args (stmt);
3091
3092 /* Verify the required arguments in the original call. */
3093 if (nargs < 4)
3094 return false;
3095 dest = gimple_call_arg (stmt, 0);
3096 flag = gimple_call_arg (stmt, 1);
3097 size = gimple_call_arg (stmt, 2);
3098 fmt = gimple_call_arg (stmt, 3);
3099
3100 if (! tree_fits_uhwi_p (size))
3101 return false;
3102
3103 len = NULL_TREE;
3104
3105 if (!init_target_chars ())
3106 return false;
3107
3108 /* Check whether the format is a literal string constant. */
3109 fmt_str = c_getstr (fmt);
3110 if (fmt_str != NULL)
3111 {
3112 /* If the format doesn't contain % args or %%, we know the size. */
3113 if (strchr (fmt_str, target_percent) == 0)
3114 {
3115 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3116 len = build_int_cstu (size_type_node, strlen (fmt_str));
3117 }
3118 /* If the format is "%s" and first ... argument is a string literal,
3119 we know the size too. */
3120 else if (fcode == BUILT_IN_SPRINTF_CHK
3121 && strcmp (fmt_str, target_percent_s) == 0)
3122 {
3123 tree arg;
3124
3125 if (nargs == 5)
3126 {
3127 arg = gimple_call_arg (stmt, 4);
3128 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3129 {
3130 len = c_strlen (arg, 1);
3131 if (! len || ! tree_fits_uhwi_p (len))
3132 len = NULL_TREE;
3133 }
3134 }
3135 }
3136 }
3137
3138 if (! integer_all_onesp (size))
3139 {
3140 if (! len || ! tree_int_cst_lt (len, size))
3141 return false;
3142 }
3143
3144 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3145 or if format doesn't contain % chars or is "%s". */
3146 if (! integer_zerop (flag))
3147 {
3148 if (fmt_str == NULL)
3149 return false;
3150 if (strchr (fmt_str, target_percent) != NULL
3151 && strcmp (fmt_str, target_percent_s))
3152 return false;
3153 }
3154
3155 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3156 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3157 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3158 if (!fn)
3159 return false;
3160
3161 /* Replace the called function and the first 4 argument by 2 retaining
3162 trailing varargs. */
3163 gimple_call_set_fndecl (stmt, fn);
3164 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3165 gimple_call_set_arg (stmt, 0, dest);
3166 gimple_call_set_arg (stmt, 1, fmt);
3167 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3168 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3169 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3170 fold_stmt (gsi);
3171 return true;
3172 }
3173
3174 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3175 ORIG may be null if this is a 2-argument call. We don't attempt to
3176 simplify calls with more than 3 arguments.
3177
3178 Return true if simplification was possible, otherwise false. */
3179
3180 bool
3181 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3182 {
3183 gimple *stmt = gsi_stmt (*gsi);
3184 tree dest = gimple_call_arg (stmt, 0);
3185 tree fmt = gimple_call_arg (stmt, 1);
3186 tree orig = NULL_TREE;
3187 const char *fmt_str = NULL;
3188
3189 /* Verify the required arguments in the original call. We deal with two
3190 types of sprintf() calls: 'sprintf (str, fmt)' and
3191 'sprintf (dest, "%s", orig)'. */
3192 if (gimple_call_num_args (stmt) > 3)
3193 return false;
3194
3195 if (gimple_call_num_args (stmt) == 3)
3196 orig = gimple_call_arg (stmt, 2);
3197
3198 /* Check whether the format is a literal string constant. */
3199 fmt_str = c_getstr (fmt);
3200 if (fmt_str == NULL)
3201 return false;
3202
3203 if (!init_target_chars ())
3204 return false;
3205
3206 /* If the format doesn't contain % args or %%, use strcpy. */
3207 if (strchr (fmt_str, target_percent) == NULL)
3208 {
3209 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3210
3211 if (!fn)
3212 return false;
3213
3214 /* Don't optimize sprintf (buf, "abc", ptr++). */
3215 if (orig)
3216 return false;
3217
3218 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3219 'format' is known to contain no % formats. */
3220 gimple_seq stmts = NULL;
3221 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3222
3223 /* Propagate the NO_WARNING bit to avoid issuing the same
3224 warning more than once. */
3225 if (gimple_no_warning_p (stmt))
3226 gimple_set_no_warning (repl, true);
3227
3228 gimple_seq_add_stmt_without_update (&stmts, repl);
3229 if (tree lhs = gimple_call_lhs (stmt))
3230 {
3231 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3232 strlen (fmt_str)));
3233 gimple_seq_add_stmt_without_update (&stmts, repl);
3234 gsi_replace_with_seq_vops (gsi, stmts);
3235 /* gsi now points at the assignment to the lhs, get a
3236 stmt iterator to the memcpy call.
3237 ??? We can't use gsi_for_stmt as that doesn't work when the
3238 CFG isn't built yet. */
3239 gimple_stmt_iterator gsi2 = *gsi;
3240 gsi_prev (&gsi2);
3241 fold_stmt (&gsi2);
3242 }
3243 else
3244 {
3245 gsi_replace_with_seq_vops (gsi, stmts);
3246 fold_stmt (gsi);
3247 }
3248 return true;
3249 }
3250
3251 /* If the format is "%s", use strcpy if the result isn't used. */
3252 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3253 {
3254 tree fn;
3255 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3256
3257 if (!fn)
3258 return false;
3259
3260 /* Don't crash on sprintf (str1, "%s"). */
3261 if (!orig)
3262 return false;
3263
3264 tree orig_len = NULL_TREE;
3265 if (gimple_call_lhs (stmt))
3266 {
3267 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3268 if (!orig_len)
3269 return false;
3270 }
3271
3272 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3273 gimple_seq stmts = NULL;
3274 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3275
3276 /* Propagate the NO_WARNING bit to avoid issuing the same
3277 warning more than once. */
3278 if (gimple_no_warning_p (stmt))
3279 gimple_set_no_warning (repl, true);
3280
3281 gimple_seq_add_stmt_without_update (&stmts, repl);
3282 if (tree lhs = gimple_call_lhs (stmt))
3283 {
3284 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3285 TREE_TYPE (orig_len)))
3286 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3287 repl = gimple_build_assign (lhs, orig_len);
3288 gimple_seq_add_stmt_without_update (&stmts, repl);
3289 gsi_replace_with_seq_vops (gsi, stmts);
3290 /* gsi now points at the assignment to the lhs, get a
3291 stmt iterator to the memcpy call.
3292 ??? We can't use gsi_for_stmt as that doesn't work when the
3293 CFG isn't built yet. */
3294 gimple_stmt_iterator gsi2 = *gsi;
3295 gsi_prev (&gsi2);
3296 fold_stmt (&gsi2);
3297 }
3298 else
3299 {
3300 gsi_replace_with_seq_vops (gsi, stmts);
3301 fold_stmt (gsi);
3302 }
3303 return true;
3304 }
3305 return false;
3306 }
3307
3308 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3309 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3310 attempt to simplify calls with more than 4 arguments.
3311
3312 Return true if simplification was possible, otherwise false. */
3313
3314 bool
3315 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3316 {
3317 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3318 tree dest = gimple_call_arg (stmt, 0);
3319 tree destsize = gimple_call_arg (stmt, 1);
3320 tree fmt = gimple_call_arg (stmt, 2);
3321 tree orig = NULL_TREE;
3322 const char *fmt_str = NULL;
3323
3324 if (gimple_call_num_args (stmt) > 4)
3325 return false;
3326
3327 if (gimple_call_num_args (stmt) == 4)
3328 orig = gimple_call_arg (stmt, 3);
3329
3330 if (!tree_fits_uhwi_p (destsize))
3331 return false;
3332 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3333
3334 /* Check whether the format is a literal string constant. */
3335 fmt_str = c_getstr (fmt);
3336 if (fmt_str == NULL)
3337 return false;
3338
3339 if (!init_target_chars ())
3340 return false;
3341
3342 /* If the format doesn't contain % args or %%, use strcpy. */
3343 if (strchr (fmt_str, target_percent) == NULL)
3344 {
3345 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3346 if (!fn)
3347 return false;
3348
3349 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3350 if (orig)
3351 return false;
3352
3353 /* We could expand this as
3354 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3355 or to
3356 memcpy (str, fmt_with_nul_at_cstm1, cst);
3357 but in the former case that might increase code size
3358 and in the latter case grow .rodata section too much.
3359 So punt for now. */
3360 size_t len = strlen (fmt_str);
3361 if (len >= destlen)
3362 return false;
3363
3364 gimple_seq stmts = NULL;
3365 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3366 gimple_seq_add_stmt_without_update (&stmts, repl);
3367 if (tree lhs = gimple_call_lhs (stmt))
3368 {
3369 repl = gimple_build_assign (lhs,
3370 build_int_cst (TREE_TYPE (lhs), len));
3371 gimple_seq_add_stmt_without_update (&stmts, repl);
3372 gsi_replace_with_seq_vops (gsi, stmts);
3373 /* gsi now points at the assignment to the lhs, get a
3374 stmt iterator to the memcpy call.
3375 ??? We can't use gsi_for_stmt as that doesn't work when the
3376 CFG isn't built yet. */
3377 gimple_stmt_iterator gsi2 = *gsi;
3378 gsi_prev (&gsi2);
3379 fold_stmt (&gsi2);
3380 }
3381 else
3382 {
3383 gsi_replace_with_seq_vops (gsi, stmts);
3384 fold_stmt (gsi);
3385 }
3386 return true;
3387 }
3388
3389 /* If the format is "%s", use strcpy if the result isn't used. */
3390 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3391 {
3392 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3393 if (!fn)
3394 return false;
3395
3396 /* Don't crash on snprintf (str1, cst, "%s"). */
3397 if (!orig)
3398 return false;
3399
3400 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3401 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3402 return false;
3403
3404 /* We could expand this as
3405 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3406 or to
3407 memcpy (str1, str2_with_nul_at_cstm1, cst);
3408 but in the former case that might increase code size
3409 and in the latter case grow .rodata section too much.
3410 So punt for now. */
3411 if (compare_tree_int (orig_len, destlen) >= 0)
3412 return false;
3413
3414 /* Convert snprintf (str1, cst, "%s", str2) into
3415 strcpy (str1, str2) if strlen (str2) < cst. */
3416 gimple_seq stmts = NULL;
3417 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3418 gimple_seq_add_stmt_without_update (&stmts, repl);
3419 if (tree lhs = gimple_call_lhs (stmt))
3420 {
3421 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3422 TREE_TYPE (orig_len)))
3423 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3424 repl = gimple_build_assign (lhs, orig_len);
3425 gimple_seq_add_stmt_without_update (&stmts, repl);
3426 gsi_replace_with_seq_vops (gsi, stmts);
3427 /* gsi now points at the assignment to the lhs, get a
3428 stmt iterator to the memcpy call.
3429 ??? We can't use gsi_for_stmt as that doesn't work when the
3430 CFG isn't built yet. */
3431 gimple_stmt_iterator gsi2 = *gsi;
3432 gsi_prev (&gsi2);
3433 fold_stmt (&gsi2);
3434 }
3435 else
3436 {
3437 gsi_replace_with_seq_vops (gsi, stmts);
3438 fold_stmt (gsi);
3439 }
3440 return true;
3441 }
3442 return false;
3443 }
3444
3445 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3446 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3447 more than 3 arguments, and ARG may be null in the 2-argument case.
3448
3449 Return NULL_TREE if no simplification was possible, otherwise return the
3450 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3451 code of the function to be simplified. */
3452
3453 static bool
3454 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3455 tree fp, tree fmt, tree arg,
3456 enum built_in_function fcode)
3457 {
3458 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3459 tree fn_fputc, fn_fputs;
3460 const char *fmt_str = NULL;
3461
3462 /* If the return value is used, don't do the transformation. */
3463 if (gimple_call_lhs (stmt) != NULL_TREE)
3464 return false;
3465
3466 /* Check whether the format is a literal string constant. */
3467 fmt_str = c_getstr (fmt);
3468 if (fmt_str == NULL)
3469 return false;
3470
3471 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3472 {
3473 /* If we're using an unlocked function, assume the other
3474 unlocked functions exist explicitly. */
3475 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3476 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3477 }
3478 else
3479 {
3480 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3481 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3482 }
3483
3484 if (!init_target_chars ())
3485 return false;
3486
3487 /* If the format doesn't contain % args or %%, use strcpy. */
3488 if (strchr (fmt_str, target_percent) == NULL)
3489 {
3490 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3491 && arg)
3492 return false;
3493
3494 /* If the format specifier was "", fprintf does nothing. */
3495 if (fmt_str[0] == '\0')
3496 {
3497 replace_call_with_value (gsi, NULL_TREE);
3498 return true;
3499 }
3500
3501 /* When "string" doesn't contain %, replace all cases of
3502 fprintf (fp, string) with fputs (string, fp). The fputs
3503 builtin will take care of special cases like length == 1. */
3504 if (fn_fputs)
3505 {
3506 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3507 replace_call_with_call_and_fold (gsi, repl);
3508 return true;
3509 }
3510 }
3511
3512 /* The other optimizations can be done only on the non-va_list variants. */
3513 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3514 return false;
3515
3516 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3517 else if (strcmp (fmt_str, target_percent_s) == 0)
3518 {
3519 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3520 return false;
3521 if (fn_fputs)
3522 {
3523 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3524 replace_call_with_call_and_fold (gsi, repl);
3525 return true;
3526 }
3527 }
3528
3529 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3530 else if (strcmp (fmt_str, target_percent_c) == 0)
3531 {
3532 if (!arg
3533 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3534 return false;
3535 if (fn_fputc)
3536 {
3537 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3538 replace_call_with_call_and_fold (gsi, repl);
3539 return true;
3540 }
3541 }
3542
3543 return false;
3544 }
3545
3546 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3547 FMT and ARG are the arguments to the call; we don't fold cases with
3548 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3549
3550 Return NULL_TREE if no simplification was possible, otherwise return the
3551 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3552 code of the function to be simplified. */
3553
3554 static bool
3555 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3556 tree arg, enum built_in_function fcode)
3557 {
3558 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3559 tree fn_putchar, fn_puts, newarg;
3560 const char *fmt_str = NULL;
3561
3562 /* If the return value is used, don't do the transformation. */
3563 if (gimple_call_lhs (stmt) != NULL_TREE)
3564 return false;
3565
3566 /* Check whether the format is a literal string constant. */
3567 fmt_str = c_getstr (fmt);
3568 if (fmt_str == NULL)
3569 return false;
3570
3571 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3572 {
3573 /* If we're using an unlocked function, assume the other
3574 unlocked functions exist explicitly. */
3575 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3576 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3577 }
3578 else
3579 {
3580 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3581 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3582 }
3583
3584 if (!init_target_chars ())
3585 return false;
3586
3587 if (strcmp (fmt_str, target_percent_s) == 0
3588 || strchr (fmt_str, target_percent) == NULL)
3589 {
3590 const char *str;
3591
3592 if (strcmp (fmt_str, target_percent_s) == 0)
3593 {
3594 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3595 return false;
3596
3597 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3598 return false;
3599
3600 str = c_getstr (arg);
3601 if (str == NULL)
3602 return false;
3603 }
3604 else
3605 {
3606 /* The format specifier doesn't contain any '%' characters. */
3607 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3608 && arg)
3609 return false;
3610 str = fmt_str;
3611 }
3612
3613 /* If the string was "", printf does nothing. */
3614 if (str[0] == '\0')
3615 {
3616 replace_call_with_value (gsi, NULL_TREE);
3617 return true;
3618 }
3619
3620 /* If the string has length of 1, call putchar. */
3621 if (str[1] == '\0')
3622 {
3623 /* Given printf("c"), (where c is any one character,)
3624 convert "c"[0] to an int and pass that to the replacement
3625 function. */
3626 newarg = build_int_cst (integer_type_node, str[0]);
3627 if (fn_putchar)
3628 {
3629 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3630 replace_call_with_call_and_fold (gsi, repl);
3631 return true;
3632 }
3633 }
3634 else
3635 {
3636 /* If the string was "string\n", call puts("string"). */
3637 size_t len = strlen (str);
3638 if ((unsigned char)str[len - 1] == target_newline
3639 && (size_t) (int) len == len
3640 && (int) len > 0)
3641 {
3642 char *newstr;
3643
3644 /* Create a NUL-terminated string that's one char shorter
3645 than the original, stripping off the trailing '\n'. */
3646 newstr = xstrdup (str);
3647 newstr[len - 1] = '\0';
3648 newarg = build_string_literal (len, newstr);
3649 free (newstr);
3650 if (fn_puts)
3651 {
3652 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3653 replace_call_with_call_and_fold (gsi, repl);
3654 return true;
3655 }
3656 }
3657 else
3658 /* We'd like to arrange to call fputs(string,stdout) here,
3659 but we need stdout and don't have a way to get it yet. */
3660 return false;
3661 }
3662 }
3663
3664 /* The other optimizations can be done only on the non-va_list variants. */
3665 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3666 return false;
3667
3668 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3669 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3670 {
3671 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3672 return false;
3673 if (fn_puts)
3674 {
3675 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3676 replace_call_with_call_and_fold (gsi, repl);
3677 return true;
3678 }
3679 }
3680
3681 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3682 else if (strcmp (fmt_str, target_percent_c) == 0)
3683 {
3684 if (!arg || ! useless_type_conversion_p (integer_type_node,
3685 TREE_TYPE (arg)))
3686 return false;
3687 if (fn_putchar)
3688 {
3689 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3690 replace_call_with_call_and_fold (gsi, repl);
3691 return true;
3692 }
3693 }
3694
3695 return false;
3696 }
3697
3698
3699
3700 /* Fold a call to __builtin_strlen with known length LEN. */
3701
3702 static bool
3703 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3704 {
3705 gimple *stmt = gsi_stmt (*gsi);
3706 tree arg = gimple_call_arg (stmt, 0);
3707
3708 wide_int minlen;
3709 wide_int maxlen;
3710
3711 c_strlen_data lendata = { };
3712 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3713 && !lendata.decl
3714 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3715 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3716 {
3717 /* The range of lengths refers to either a single constant
3718 string or to the longest and shortest constant string
3719 referenced by the argument of the strlen() call, or to
3720 the strings that can possibly be stored in the arrays
3721 the argument refers to. */
3722 minlen = wi::to_wide (lendata.minlen);
3723 maxlen = wi::to_wide (lendata.maxlen);
3724 }
3725 else
3726 {
3727 unsigned prec = TYPE_PRECISION (sizetype);
3728
3729 minlen = wi::shwi (0, prec);
3730 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3731 }
3732
3733 if (minlen == maxlen)
3734 {
3735 /* Fold the strlen call to a constant. */
3736 tree type = TREE_TYPE (lendata.minlen);
3737 tree len = force_gimple_operand_gsi (gsi,
3738 wide_int_to_tree (type, minlen),
3739 true, NULL, true, GSI_SAME_STMT);
3740 replace_call_with_value (gsi, len);
3741 return true;
3742 }
3743
3744 /* Set the strlen() range to [0, MAXLEN]. */
3745 if (tree lhs = gimple_call_lhs (stmt))
3746 set_strlen_range (lhs, maxlen);
3747
3748 return false;
3749 }
3750
3751 /* Fold a call to __builtin_acc_on_device. */
3752
3753 static bool
3754 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3755 {
3756 /* Defer folding until we know which compiler we're in. */
3757 if (symtab->state != EXPANSION)
3758 return false;
3759
3760 unsigned val_host = GOMP_DEVICE_HOST;
3761 unsigned val_dev = GOMP_DEVICE_NONE;
3762
3763 #ifdef ACCEL_COMPILER
3764 val_host = GOMP_DEVICE_NOT_HOST;
3765 val_dev = ACCEL_COMPILER_acc_device;
3766 #endif
3767
3768 location_t loc = gimple_location (gsi_stmt (*gsi));
3769
3770 tree host_eq = make_ssa_name (boolean_type_node);
3771 gimple *host_ass = gimple_build_assign
3772 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3773 gimple_set_location (host_ass, loc);
3774 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3775
3776 tree dev_eq = make_ssa_name (boolean_type_node);
3777 gimple *dev_ass = gimple_build_assign
3778 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3779 gimple_set_location (dev_ass, loc);
3780 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3781
3782 tree result = make_ssa_name (boolean_type_node);
3783 gimple *result_ass = gimple_build_assign
3784 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3785 gimple_set_location (result_ass, loc);
3786 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3787
3788 replace_call_with_value (gsi, result);
3789
3790 return true;
3791 }
3792
3793 /* Fold realloc (0, n) -> malloc (n). */
3794
3795 static bool
3796 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3797 {
3798 gimple *stmt = gsi_stmt (*gsi);
3799 tree arg = gimple_call_arg (stmt, 0);
3800 tree size = gimple_call_arg (stmt, 1);
3801
3802 if (operand_equal_p (arg, null_pointer_node, 0))
3803 {
3804 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3805 if (fn_malloc)
3806 {
3807 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3808 replace_call_with_call_and_fold (gsi, repl);
3809 return true;
3810 }
3811 }
3812 return false;
3813 }
3814
3815 /* Fold the non-target builtin at *GSI and return whether any simplification
3816 was made. */
3817
3818 static bool
3819 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3820 {
3821 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3822 tree callee = gimple_call_fndecl (stmt);
3823
3824 /* Give up for always_inline inline builtins until they are
3825 inlined. */
3826 if (avoid_folding_inline_builtin (callee))
3827 return false;
3828
3829 unsigned n = gimple_call_num_args (stmt);
3830 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3831 switch (fcode)
3832 {
3833 case BUILT_IN_BCMP:
3834 return gimple_fold_builtin_bcmp (gsi);
3835 case BUILT_IN_BCOPY:
3836 return gimple_fold_builtin_bcopy (gsi);
3837 case BUILT_IN_BZERO:
3838 return gimple_fold_builtin_bzero (gsi);
3839
3840 case BUILT_IN_MEMSET:
3841 return gimple_fold_builtin_memset (gsi,
3842 gimple_call_arg (stmt, 1),
3843 gimple_call_arg (stmt, 2));
3844 case BUILT_IN_MEMCPY:
3845 case BUILT_IN_MEMPCPY:
3846 case BUILT_IN_MEMMOVE:
3847 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3848 gimple_call_arg (stmt, 1), fcode);
3849 case BUILT_IN_SPRINTF_CHK:
3850 case BUILT_IN_VSPRINTF_CHK:
3851 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3852 case BUILT_IN_STRCAT_CHK:
3853 return gimple_fold_builtin_strcat_chk (gsi);
3854 case BUILT_IN_STRNCAT_CHK:
3855 return gimple_fold_builtin_strncat_chk (gsi);
3856 case BUILT_IN_STRLEN:
3857 return gimple_fold_builtin_strlen (gsi);
3858 case BUILT_IN_STRCPY:
3859 return gimple_fold_builtin_strcpy (gsi,
3860 gimple_call_arg (stmt, 0),
3861 gimple_call_arg (stmt, 1));
3862 case BUILT_IN_STRNCPY:
3863 return gimple_fold_builtin_strncpy (gsi,
3864 gimple_call_arg (stmt, 0),
3865 gimple_call_arg (stmt, 1),
3866 gimple_call_arg (stmt, 2));
3867 case BUILT_IN_STRCAT:
3868 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3869 gimple_call_arg (stmt, 1));
3870 case BUILT_IN_STRNCAT:
3871 return gimple_fold_builtin_strncat (gsi);
3872 case BUILT_IN_INDEX:
3873 case BUILT_IN_STRCHR:
3874 return gimple_fold_builtin_strchr (gsi, false);
3875 case BUILT_IN_RINDEX:
3876 case BUILT_IN_STRRCHR:
3877 return gimple_fold_builtin_strchr (gsi, true);
3878 case BUILT_IN_STRSTR:
3879 return gimple_fold_builtin_strstr (gsi);
3880 case BUILT_IN_STRCMP:
3881 case BUILT_IN_STRCMP_EQ:
3882 case BUILT_IN_STRCASECMP:
3883 case BUILT_IN_STRNCMP:
3884 case BUILT_IN_STRNCMP_EQ:
3885 case BUILT_IN_STRNCASECMP:
3886 return gimple_fold_builtin_string_compare (gsi);
3887 case BUILT_IN_MEMCHR:
3888 return gimple_fold_builtin_memchr (gsi);
3889 case BUILT_IN_FPUTS:
3890 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3891 gimple_call_arg (stmt, 1), false);
3892 case BUILT_IN_FPUTS_UNLOCKED:
3893 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3894 gimple_call_arg (stmt, 1), true);
3895 case BUILT_IN_MEMCPY_CHK:
3896 case BUILT_IN_MEMPCPY_CHK:
3897 case BUILT_IN_MEMMOVE_CHK:
3898 case BUILT_IN_MEMSET_CHK:
3899 return gimple_fold_builtin_memory_chk (gsi,
3900 gimple_call_arg (stmt, 0),
3901 gimple_call_arg (stmt, 1),
3902 gimple_call_arg (stmt, 2),
3903 gimple_call_arg (stmt, 3),
3904 fcode);
3905 case BUILT_IN_STPCPY:
3906 return gimple_fold_builtin_stpcpy (gsi);
3907 case BUILT_IN_STRCPY_CHK:
3908 case BUILT_IN_STPCPY_CHK:
3909 return gimple_fold_builtin_stxcpy_chk (gsi,
3910 gimple_call_arg (stmt, 0),
3911 gimple_call_arg (stmt, 1),
3912 gimple_call_arg (stmt, 2),
3913 fcode);
3914 case BUILT_IN_STRNCPY_CHK:
3915 case BUILT_IN_STPNCPY_CHK:
3916 return gimple_fold_builtin_stxncpy_chk (gsi,
3917 gimple_call_arg (stmt, 0),
3918 gimple_call_arg (stmt, 1),
3919 gimple_call_arg (stmt, 2),
3920 gimple_call_arg (stmt, 3),
3921 fcode);
3922 case BUILT_IN_SNPRINTF_CHK:
3923 case BUILT_IN_VSNPRINTF_CHK:
3924 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3925
3926 case BUILT_IN_FPRINTF:
3927 case BUILT_IN_FPRINTF_UNLOCKED:
3928 case BUILT_IN_VFPRINTF:
3929 if (n == 2 || n == 3)
3930 return gimple_fold_builtin_fprintf (gsi,
3931 gimple_call_arg (stmt, 0),
3932 gimple_call_arg (stmt, 1),
3933 n == 3
3934 ? gimple_call_arg (stmt, 2)
3935 : NULL_TREE,
3936 fcode);
3937 break;
3938 case BUILT_IN_FPRINTF_CHK:
3939 case BUILT_IN_VFPRINTF_CHK:
3940 if (n == 3 || n == 4)
3941 return gimple_fold_builtin_fprintf (gsi,
3942 gimple_call_arg (stmt, 0),
3943 gimple_call_arg (stmt, 2),
3944 n == 4
3945 ? gimple_call_arg (stmt, 3)
3946 : NULL_TREE,
3947 fcode);
3948 break;
3949 case BUILT_IN_PRINTF:
3950 case BUILT_IN_PRINTF_UNLOCKED:
3951 case BUILT_IN_VPRINTF:
3952 if (n == 1 || n == 2)
3953 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3954 n == 2
3955 ? gimple_call_arg (stmt, 1)
3956 : NULL_TREE, fcode);
3957 break;
3958 case BUILT_IN_PRINTF_CHK:
3959 case BUILT_IN_VPRINTF_CHK:
3960 if (n == 2 || n == 3)
3961 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3962 n == 3
3963 ? gimple_call_arg (stmt, 2)
3964 : NULL_TREE, fcode);
3965 break;
3966 case BUILT_IN_ACC_ON_DEVICE:
3967 return gimple_fold_builtin_acc_on_device (gsi,
3968 gimple_call_arg (stmt, 0));
3969 case BUILT_IN_REALLOC:
3970 return gimple_fold_builtin_realloc (gsi);
3971
3972 default:;
3973 }
3974
3975 /* Try the generic builtin folder. */
3976 bool ignore = (gimple_call_lhs (stmt) == NULL);
3977 tree result = fold_call_stmt (stmt, ignore);
3978 if (result)
3979 {
3980 if (ignore)
3981 STRIP_NOPS (result);
3982 else
3983 result = fold_convert (gimple_call_return_type (stmt), result);
3984 if (!update_call_from_tree (gsi, result))
3985 gimplify_and_update_call_from_tree (gsi, result);
3986 return true;
3987 }
3988
3989 return false;
3990 }
3991
3992 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3993 function calls to constants, where possible. */
3994
3995 static tree
3996 fold_internal_goacc_dim (const gimple *call)
3997 {
3998 int axis = oacc_get_ifn_dim_arg (call);
3999 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4000 tree result = NULL_TREE;
4001 tree type = TREE_TYPE (gimple_call_lhs (call));
4002
4003 switch (gimple_call_internal_fn (call))
4004 {
4005 case IFN_GOACC_DIM_POS:
4006 /* If the size is 1, we know the answer. */
4007 if (size == 1)
4008 result = build_int_cst (type, 0);
4009 break;
4010 case IFN_GOACC_DIM_SIZE:
4011 /* If the size is not dynamic, we know the answer. */
4012 if (size)
4013 result = build_int_cst (type, size);
4014 break;
4015 default:
4016 break;
4017 }
4018
4019 return result;
4020 }
4021
4022 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4023 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4024 &var where var is only addressable because of such calls. */
4025
4026 bool
4027 optimize_atomic_compare_exchange_p (gimple *stmt)
4028 {
4029 if (gimple_call_num_args (stmt) != 6
4030 || !flag_inline_atomics
4031 || !optimize
4032 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
4033 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
4034 || !gimple_vdef (stmt)
4035 || !gimple_vuse (stmt))
4036 return false;
4037
4038 tree fndecl = gimple_call_fndecl (stmt);
4039 switch (DECL_FUNCTION_CODE (fndecl))
4040 {
4041 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
4042 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
4043 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
4044 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
4045 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
4046 break;
4047 default:
4048 return false;
4049 }
4050
4051 tree expected = gimple_call_arg (stmt, 1);
4052 if (TREE_CODE (expected) != ADDR_EXPR
4053 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
4054 return false;
4055
4056 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
4057 if (!is_gimple_reg_type (etype)
4058 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
4059 || TREE_THIS_VOLATILE (etype)
4060 || VECTOR_TYPE_P (etype)
4061 || TREE_CODE (etype) == COMPLEX_TYPE
4062 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4063 might not preserve all the bits. See PR71716. */
4064 || SCALAR_FLOAT_TYPE_P (etype)
4065 || maybe_ne (TYPE_PRECISION (etype),
4066 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4067 return false;
4068
4069 tree weak = gimple_call_arg (stmt, 3);
4070 if (!integer_zerop (weak) && !integer_onep (weak))
4071 return false;
4072
4073 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4074 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4075 machine_mode mode = TYPE_MODE (itype);
4076
4077 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
4078 == CODE_FOR_nothing
4079 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
4080 return false;
4081
4082 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
4083 return false;
4084
4085 return true;
4086 }
4087
4088 /* Fold
4089 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4090 into
4091 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4092 i = IMAGPART_EXPR <t>;
4093 r = (_Bool) i;
4094 e = REALPART_EXPR <t>; */
4095
4096 void
4097 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
4098 {
4099 gimple *stmt = gsi_stmt (*gsi);
4100 tree fndecl = gimple_call_fndecl (stmt);
4101 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4102 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
4103 tree ctype = build_complex_type (itype);
4104 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
4105 bool throws = false;
4106 edge e = NULL;
4107 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4108 expected);
4109 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4110 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4111 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4112 {
4113 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4114 build1 (VIEW_CONVERT_EXPR, itype,
4115 gimple_assign_lhs (g)));
4116 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4117 }
4118 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4119 + int_size_in_bytes (itype);
4120 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4121 gimple_call_arg (stmt, 0),
4122 gimple_assign_lhs (g),
4123 gimple_call_arg (stmt, 2),
4124 build_int_cst (integer_type_node, flag),
4125 gimple_call_arg (stmt, 4),
4126 gimple_call_arg (stmt, 5));
4127 tree lhs = make_ssa_name (ctype);
4128 gimple_call_set_lhs (g, lhs);
4129 gimple_set_vdef (g, gimple_vdef (stmt));
4130 gimple_set_vuse (g, gimple_vuse (stmt));
4131 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4132 tree oldlhs = gimple_call_lhs (stmt);
4133 if (stmt_can_throw_internal (cfun, stmt))
4134 {
4135 throws = true;
4136 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4137 }
4138 gimple_call_set_nothrow (as_a <gcall *> (g),
4139 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4140 gimple_call_set_lhs (stmt, NULL_TREE);
4141 gsi_replace (gsi, g, true);
4142 if (oldlhs)
4143 {
4144 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4145 build1 (IMAGPART_EXPR, itype, lhs));
4146 if (throws)
4147 {
4148 gsi_insert_on_edge_immediate (e, g);
4149 *gsi = gsi_for_stmt (g);
4150 }
4151 else
4152 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4153 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4154 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4155 }
4156 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4157 build1 (REALPART_EXPR, itype, lhs));
4158 if (throws && oldlhs == NULL_TREE)
4159 {
4160 gsi_insert_on_edge_immediate (e, g);
4161 *gsi = gsi_for_stmt (g);
4162 }
4163 else
4164 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4165 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4166 {
4167 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4168 VIEW_CONVERT_EXPR,
4169 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4170 gimple_assign_lhs (g)));
4171 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4172 }
4173 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4174 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4175 *gsi = gsiret;
4176 }
4177
4178 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4179 doesn't fit into TYPE. The test for overflow should be regardless of
4180 -fwrapv, and even for unsigned types. */
4181
4182 bool
4183 arith_overflowed_p (enum tree_code code, const_tree type,
4184 const_tree arg0, const_tree arg1)
4185 {
4186 widest2_int warg0 = widest2_int_cst (arg0);
4187 widest2_int warg1 = widest2_int_cst (arg1);
4188 widest2_int wres;
4189 switch (code)
4190 {
4191 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4192 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4193 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4194 default: gcc_unreachable ();
4195 }
4196 signop sign = TYPE_SIGN (type);
4197 if (sign == UNSIGNED && wi::neg_p (wres))
4198 return true;
4199 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4200 }
4201
4202 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4203 The statement may be replaced by another statement, e.g., if the call
4204 simplifies to a constant value. Return true if any changes were made.
4205 It is assumed that the operands have been previously folded. */
4206
4207 static bool
4208 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4209 {
4210 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4211 tree callee;
4212 bool changed = false;
4213 unsigned i;
4214
4215 /* Fold *& in call arguments. */
4216 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4217 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4218 {
4219 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4220 if (tmp)
4221 {
4222 gimple_call_set_arg (stmt, i, tmp);
4223 changed = true;
4224 }
4225 }
4226
4227 /* Check for virtual calls that became direct calls. */
4228 callee = gimple_call_fn (stmt);
4229 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4230 {
4231 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4232 {
4233 if (dump_file && virtual_method_call_p (callee)
4234 && !possible_polymorphic_call_target_p
4235 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4236 (OBJ_TYPE_REF_EXPR (callee)))))
4237 {
4238 fprintf (dump_file,
4239 "Type inheritance inconsistent devirtualization of ");
4240 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4241 fprintf (dump_file, " to ");
4242 print_generic_expr (dump_file, callee, TDF_SLIM);
4243 fprintf (dump_file, "\n");
4244 }
4245
4246 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4247 changed = true;
4248 }
4249 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4250 {
4251 bool final;
4252 vec <cgraph_node *>targets
4253 = possible_polymorphic_call_targets (callee, stmt, &final);
4254 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4255 {
4256 tree lhs = gimple_call_lhs (stmt);
4257 if (dump_enabled_p ())
4258 {
4259 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4260 "folding virtual function call to %s\n",
4261 targets.length () == 1
4262 ? targets[0]->name ()
4263 : "__builtin_unreachable");
4264 }
4265 if (targets.length () == 1)
4266 {
4267 tree fndecl = targets[0]->decl;
4268 gimple_call_set_fndecl (stmt, fndecl);
4269 changed = true;
4270 /* If changing the call to __cxa_pure_virtual
4271 or similar noreturn function, adjust gimple_call_fntype
4272 too. */
4273 if (gimple_call_noreturn_p (stmt)
4274 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4275 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4276 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4277 == void_type_node))
4278 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4279 /* If the call becomes noreturn, remove the lhs. */
4280 if (lhs
4281 && gimple_call_noreturn_p (stmt)
4282 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4283 || should_remove_lhs_p (lhs)))
4284 {
4285 if (TREE_CODE (lhs) == SSA_NAME)
4286 {
4287 tree var = create_tmp_var (TREE_TYPE (lhs));
4288 tree def = get_or_create_ssa_default_def (cfun, var);
4289 gimple *new_stmt = gimple_build_assign (lhs, def);
4290 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4291 }
4292 gimple_call_set_lhs (stmt, NULL_TREE);
4293 }
4294 maybe_remove_unused_call_args (cfun, stmt);
4295 }
4296 else
4297 {
4298 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4299 gimple *new_stmt = gimple_build_call (fndecl, 0);
4300 gimple_set_location (new_stmt, gimple_location (stmt));
4301 /* If the call had a SSA name as lhs morph that into
4302 an uninitialized value. */
4303 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4304 {
4305 tree var = create_tmp_var (TREE_TYPE (lhs));
4306 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4307 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4308 set_ssa_default_def (cfun, var, lhs);
4309 }
4310 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4311 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4312 gsi_replace (gsi, new_stmt, false);
4313 return true;
4314 }
4315 }
4316 }
4317 }
4318
4319 /* Check for indirect calls that became direct calls, and then
4320 no longer require a static chain. */
4321 if (gimple_call_chain (stmt))
4322 {
4323 tree fn = gimple_call_fndecl (stmt);
4324 if (fn && !DECL_STATIC_CHAIN (fn))
4325 {
4326 gimple_call_set_chain (stmt, NULL);
4327 changed = true;
4328 }
4329 else
4330 {
4331 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4332 if (tmp)
4333 {
4334 gimple_call_set_chain (stmt, tmp);
4335 changed = true;
4336 }
4337 }
4338 }
4339
4340 if (inplace)
4341 return changed;
4342
4343 /* Check for builtins that CCP can handle using information not
4344 available in the generic fold routines. */
4345 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4346 {
4347 if (gimple_fold_builtin (gsi))
4348 changed = true;
4349 }
4350 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4351 {
4352 changed |= targetm.gimple_fold_builtin (gsi);
4353 }
4354 else if (gimple_call_internal_p (stmt))
4355 {
4356 enum tree_code subcode = ERROR_MARK;
4357 tree result = NULL_TREE;
4358 bool cplx_result = false;
4359 tree overflow = NULL_TREE;
4360 switch (gimple_call_internal_fn (stmt))
4361 {
4362 case IFN_BUILTIN_EXPECT:
4363 result = fold_builtin_expect (gimple_location (stmt),
4364 gimple_call_arg (stmt, 0),
4365 gimple_call_arg (stmt, 1),
4366 gimple_call_arg (stmt, 2),
4367 NULL_TREE);
4368 break;
4369 case IFN_UBSAN_OBJECT_SIZE:
4370 {
4371 tree offset = gimple_call_arg (stmt, 1);
4372 tree objsize = gimple_call_arg (stmt, 2);
4373 if (integer_all_onesp (objsize)
4374 || (TREE_CODE (offset) == INTEGER_CST
4375 && TREE_CODE (objsize) == INTEGER_CST
4376 && tree_int_cst_le (offset, objsize)))
4377 {
4378 replace_call_with_value (gsi, NULL_TREE);
4379 return true;
4380 }
4381 }
4382 break;
4383 case IFN_UBSAN_PTR:
4384 if (integer_zerop (gimple_call_arg (stmt, 1)))
4385 {
4386 replace_call_with_value (gsi, NULL_TREE);
4387 return true;
4388 }
4389 break;
4390 case IFN_UBSAN_BOUNDS:
4391 {
4392 tree index = gimple_call_arg (stmt, 1);
4393 tree bound = gimple_call_arg (stmt, 2);
4394 if (TREE_CODE (index) == INTEGER_CST
4395 && TREE_CODE (bound) == INTEGER_CST)
4396 {
4397 index = fold_convert (TREE_TYPE (bound), index);
4398 if (TREE_CODE (index) == INTEGER_CST
4399 && tree_int_cst_le (index, bound))
4400 {
4401 replace_call_with_value (gsi, NULL_TREE);
4402 return true;
4403 }
4404 }
4405 }
4406 break;
4407 case IFN_GOACC_DIM_SIZE:
4408 case IFN_GOACC_DIM_POS:
4409 result = fold_internal_goacc_dim (stmt);
4410 break;
4411 case IFN_UBSAN_CHECK_ADD:
4412 subcode = PLUS_EXPR;
4413 break;
4414 case IFN_UBSAN_CHECK_SUB:
4415 subcode = MINUS_EXPR;
4416 break;
4417 case IFN_UBSAN_CHECK_MUL:
4418 subcode = MULT_EXPR;
4419 break;
4420 case IFN_ADD_OVERFLOW:
4421 subcode = PLUS_EXPR;
4422 cplx_result = true;
4423 break;
4424 case IFN_SUB_OVERFLOW:
4425 subcode = MINUS_EXPR;
4426 cplx_result = true;
4427 break;
4428 case IFN_MUL_OVERFLOW:
4429 subcode = MULT_EXPR;
4430 cplx_result = true;
4431 break;
4432 default:
4433 break;
4434 }
4435 if (subcode != ERROR_MARK)
4436 {
4437 tree arg0 = gimple_call_arg (stmt, 0);
4438 tree arg1 = gimple_call_arg (stmt, 1);
4439 tree type = TREE_TYPE (arg0);
4440 if (cplx_result)
4441 {
4442 tree lhs = gimple_call_lhs (stmt);
4443 if (lhs == NULL_TREE)
4444 type = NULL_TREE;
4445 else
4446 type = TREE_TYPE (TREE_TYPE (lhs));
4447 }
4448 if (type == NULL_TREE)
4449 ;
4450 /* x = y + 0; x = y - 0; x = y * 0; */
4451 else if (integer_zerop (arg1))
4452 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4453 /* x = 0 + y; x = 0 * y; */
4454 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4455 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4456 /* x = y - y; */
4457 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4458 result = integer_zero_node;
4459 /* x = y * 1; x = 1 * y; */
4460 else if (subcode == MULT_EXPR && integer_onep (arg1))
4461 result = arg0;
4462 else if (subcode == MULT_EXPR && integer_onep (arg0))
4463 result = arg1;
4464 else if (TREE_CODE (arg0) == INTEGER_CST
4465 && TREE_CODE (arg1) == INTEGER_CST)
4466 {
4467 if (cplx_result)
4468 result = int_const_binop (subcode, fold_convert (type, arg0),
4469 fold_convert (type, arg1));
4470 else
4471 result = int_const_binop (subcode, arg0, arg1);
4472 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4473 {
4474 if (cplx_result)
4475 overflow = build_one_cst (type);
4476 else
4477 result = NULL_TREE;
4478 }
4479 }
4480 if (result)
4481 {
4482 if (result == integer_zero_node)
4483 result = build_zero_cst (type);
4484 else if (cplx_result && TREE_TYPE (result) != type)
4485 {
4486 if (TREE_CODE (result) == INTEGER_CST)
4487 {
4488 if (arith_overflowed_p (PLUS_EXPR, type, result,
4489 integer_zero_node))
4490 overflow = build_one_cst (type);
4491 }
4492 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4493 && TYPE_UNSIGNED (type))
4494 || (TYPE_PRECISION (type)
4495 < (TYPE_PRECISION (TREE_TYPE (result))
4496 + (TYPE_UNSIGNED (TREE_TYPE (result))
4497 && !TYPE_UNSIGNED (type)))))
4498 result = NULL_TREE;
4499 if (result)
4500 result = fold_convert (type, result);
4501 }
4502 }
4503 }
4504
4505 if (result)
4506 {
4507 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4508 result = drop_tree_overflow (result);
4509 if (cplx_result)
4510 {
4511 if (overflow == NULL_TREE)
4512 overflow = build_zero_cst (TREE_TYPE (result));
4513 tree ctype = build_complex_type (TREE_TYPE (result));
4514 if (TREE_CODE (result) == INTEGER_CST
4515 && TREE_CODE (overflow) == INTEGER_CST)
4516 result = build_complex (ctype, result, overflow);
4517 else
4518 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4519 ctype, result, overflow);
4520 }
4521 if (!update_call_from_tree (gsi, result))
4522 gimplify_and_update_call_from_tree (gsi, result);
4523 changed = true;
4524 }
4525 }
4526
4527 return changed;
4528 }
4529
4530
4531 /* Return true whether NAME has a use on STMT. */
4532
4533 static bool
4534 has_use_on_stmt (tree name, gimple *stmt)
4535 {
4536 imm_use_iterator iter;
4537 use_operand_p use_p;
4538 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4539 if (USE_STMT (use_p) == stmt)
4540 return true;
4541 return false;
4542 }
4543
4544 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4545 gimple_simplify.
4546
4547 Replaces *GSI with the simplification result in RCODE and OPS
4548 and the associated statements in *SEQ. Does the replacement
4549 according to INPLACE and returns true if the operation succeeded. */
4550
4551 static bool
4552 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4553 gimple_match_op *res_op,
4554 gimple_seq *seq, bool inplace)
4555 {
4556 gimple *stmt = gsi_stmt (*gsi);
4557 tree *ops = res_op->ops;
4558 unsigned int num_ops = res_op->num_ops;
4559
4560 /* Play safe and do not allow abnormals to be mentioned in
4561 newly created statements. See also maybe_push_res_to_seq.
4562 As an exception allow such uses if there was a use of the
4563 same SSA name on the old stmt. */
4564 for (unsigned int i = 0; i < num_ops; ++i)
4565 if (TREE_CODE (ops[i]) == SSA_NAME
4566 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4567 && !has_use_on_stmt (ops[i], stmt))
4568 return false;
4569
4570 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4571 for (unsigned int i = 0; i < 2; ++i)
4572 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4573 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4574 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4575 return false;
4576
4577 /* Don't insert new statements when INPLACE is true, even if we could
4578 reuse STMT for the final statement. */
4579 if (inplace && !gimple_seq_empty_p (*seq))
4580 return false;
4581
4582 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4583 {
4584 gcc_assert (res_op->code.is_tree_code ());
4585 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4586 /* GIMPLE_CONDs condition may not throw. */
4587 && (!flag_exceptions
4588 || !cfun->can_throw_non_call_exceptions
4589 || !operation_could_trap_p (res_op->code,
4590 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4591 false, NULL_TREE)))
4592 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4593 else if (res_op->code == SSA_NAME)
4594 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4595 build_zero_cst (TREE_TYPE (ops[0])));
4596 else if (res_op->code == INTEGER_CST)
4597 {
4598 if (integer_zerop (ops[0]))
4599 gimple_cond_make_false (cond_stmt);
4600 else
4601 gimple_cond_make_true (cond_stmt);
4602 }
4603 else if (!inplace)
4604 {
4605 tree res = maybe_push_res_to_seq (res_op, seq);
4606 if (!res)
4607 return false;
4608 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4609 build_zero_cst (TREE_TYPE (res)));
4610 }
4611 else
4612 return false;
4613 if (dump_file && (dump_flags & TDF_DETAILS))
4614 {
4615 fprintf (dump_file, "gimple_simplified to ");
4616 if (!gimple_seq_empty_p (*seq))
4617 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4618 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4619 0, TDF_SLIM);
4620 }
4621 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4622 return true;
4623 }
4624 else if (is_gimple_assign (stmt)
4625 && res_op->code.is_tree_code ())
4626 {
4627 if (!inplace
4628 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4629 {
4630 maybe_build_generic_op (res_op);
4631 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4632 res_op->op_or_null (0),
4633 res_op->op_or_null (1),
4634 res_op->op_or_null (2));
4635 if (dump_file && (dump_flags & TDF_DETAILS))
4636 {
4637 fprintf (dump_file, "gimple_simplified to ");
4638 if (!gimple_seq_empty_p (*seq))
4639 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4640 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4641 0, TDF_SLIM);
4642 }
4643 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4644 return true;
4645 }
4646 }
4647 else if (res_op->code.is_fn_code ()
4648 && gimple_call_combined_fn (stmt) == res_op->code)
4649 {
4650 gcc_assert (num_ops == gimple_call_num_args (stmt));
4651 for (unsigned int i = 0; i < num_ops; ++i)
4652 gimple_call_set_arg (stmt, i, ops[i]);
4653 if (dump_file && (dump_flags & TDF_DETAILS))
4654 {
4655 fprintf (dump_file, "gimple_simplified to ");
4656 if (!gimple_seq_empty_p (*seq))
4657 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4658 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4659 }
4660 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4661 return true;
4662 }
4663 else if (!inplace)
4664 {
4665 if (gimple_has_lhs (stmt))
4666 {
4667 tree lhs = gimple_get_lhs (stmt);
4668 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4669 return false;
4670 if (dump_file && (dump_flags & TDF_DETAILS))
4671 {
4672 fprintf (dump_file, "gimple_simplified to ");
4673 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4674 }
4675 gsi_replace_with_seq_vops (gsi, *seq);
4676 return true;
4677 }
4678 else
4679 gcc_unreachable ();
4680 }
4681
4682 return false;
4683 }
4684
4685 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4686
4687 static bool
4688 maybe_canonicalize_mem_ref_addr (tree *t)
4689 {
4690 bool res = false;
4691
4692 if (TREE_CODE (*t) == ADDR_EXPR)
4693 t = &TREE_OPERAND (*t, 0);
4694
4695 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4696 generic vector extension. The actual vector referenced is
4697 view-converted to an array type for this purpose. If the index
4698 is constant the canonical representation in the middle-end is a
4699 BIT_FIELD_REF so re-write the former to the latter here. */
4700 if (TREE_CODE (*t) == ARRAY_REF
4701 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4702 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4703 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4704 {
4705 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4706 if (VECTOR_TYPE_P (vtype))
4707 {
4708 tree low = array_ref_low_bound (*t);
4709 if (TREE_CODE (low) == INTEGER_CST)
4710 {
4711 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4712 {
4713 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4714 wi::to_widest (low));
4715 idx = wi::mul (idx, wi::to_widest
4716 (TYPE_SIZE (TREE_TYPE (*t))));
4717 widest_int ext
4718 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4719 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4720 {
4721 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4722 TREE_TYPE (*t),
4723 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4724 TYPE_SIZE (TREE_TYPE (*t)),
4725 wide_int_to_tree (bitsizetype, idx));
4726 res = true;
4727 }
4728 }
4729 }
4730 }
4731 }
4732
4733 while (handled_component_p (*t))
4734 t = &TREE_OPERAND (*t, 0);
4735
4736 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4737 of invariant addresses into a SSA name MEM_REF address. */
4738 if (TREE_CODE (*t) == MEM_REF
4739 || TREE_CODE (*t) == TARGET_MEM_REF)
4740 {
4741 tree addr = TREE_OPERAND (*t, 0);
4742 if (TREE_CODE (addr) == ADDR_EXPR
4743 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4744 || handled_component_p (TREE_OPERAND (addr, 0))))
4745 {
4746 tree base;
4747 poly_int64 coffset;
4748 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4749 &coffset);
4750 if (!base)
4751 gcc_unreachable ();
4752
4753 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4754 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4755 TREE_OPERAND (*t, 1),
4756 size_int (coffset));
4757 res = true;
4758 }
4759 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4760 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4761 }
4762
4763 /* Canonicalize back MEM_REFs to plain reference trees if the object
4764 accessed is a decl that has the same access semantics as the MEM_REF. */
4765 if (TREE_CODE (*t) == MEM_REF
4766 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4767 && integer_zerop (TREE_OPERAND (*t, 1))
4768 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4769 {
4770 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4771 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4772 if (/* Same volatile qualification. */
4773 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4774 /* Same TBAA behavior with -fstrict-aliasing. */
4775 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4776 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4777 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4778 /* Same alignment. */
4779 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4780 /* We have to look out here to not drop a required conversion
4781 from the rhs to the lhs if *t appears on the lhs or vice-versa
4782 if it appears on the rhs. Thus require strict type
4783 compatibility. */
4784 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4785 {
4786 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4787 res = true;
4788 }
4789 }
4790
4791 /* Canonicalize TARGET_MEM_REF in particular with respect to
4792 the indexes becoming constant. */
4793 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4794 {
4795 tree tem = maybe_fold_tmr (*t);
4796 if (tem)
4797 {
4798 *t = tem;
4799 res = true;
4800 }
4801 }
4802
4803 return res;
4804 }
4805
4806 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4807 distinguishes both cases. */
4808
4809 static bool
4810 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4811 {
4812 bool changed = false;
4813 gimple *stmt = gsi_stmt (*gsi);
4814 bool nowarning = gimple_no_warning_p (stmt);
4815 unsigned i;
4816 fold_defer_overflow_warnings ();
4817
4818 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4819 after propagation.
4820 ??? This shouldn't be done in generic folding but in the
4821 propagation helpers which also know whether an address was
4822 propagated.
4823 Also canonicalize operand order. */
4824 switch (gimple_code (stmt))
4825 {
4826 case GIMPLE_ASSIGN:
4827 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4828 {
4829 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4830 if ((REFERENCE_CLASS_P (*rhs)
4831 || TREE_CODE (*rhs) == ADDR_EXPR)
4832 && maybe_canonicalize_mem_ref_addr (rhs))
4833 changed = true;
4834 tree *lhs = gimple_assign_lhs_ptr (stmt);
4835 if (REFERENCE_CLASS_P (*lhs)
4836 && maybe_canonicalize_mem_ref_addr (lhs))
4837 changed = true;
4838 }
4839 else
4840 {
4841 /* Canonicalize operand order. */
4842 enum tree_code code = gimple_assign_rhs_code (stmt);
4843 if (TREE_CODE_CLASS (code) == tcc_comparison
4844 || commutative_tree_code (code)
4845 || commutative_ternary_tree_code (code))
4846 {
4847 tree rhs1 = gimple_assign_rhs1 (stmt);
4848 tree rhs2 = gimple_assign_rhs2 (stmt);
4849 if (tree_swap_operands_p (rhs1, rhs2))
4850 {
4851 gimple_assign_set_rhs1 (stmt, rhs2);
4852 gimple_assign_set_rhs2 (stmt, rhs1);
4853 if (TREE_CODE_CLASS (code) == tcc_comparison)
4854 gimple_assign_set_rhs_code (stmt,
4855 swap_tree_comparison (code));
4856 changed = true;
4857 }
4858 }
4859 }
4860 break;
4861 case GIMPLE_CALL:
4862 {
4863 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4864 {
4865 tree *arg = gimple_call_arg_ptr (stmt, i);
4866 if (REFERENCE_CLASS_P (*arg)
4867 && maybe_canonicalize_mem_ref_addr (arg))
4868 changed = true;
4869 }
4870 tree *lhs = gimple_call_lhs_ptr (stmt);
4871 if (*lhs
4872 && REFERENCE_CLASS_P (*lhs)
4873 && maybe_canonicalize_mem_ref_addr (lhs))
4874 changed = true;
4875 break;
4876 }
4877 case GIMPLE_ASM:
4878 {
4879 gasm *asm_stmt = as_a <gasm *> (stmt);
4880 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4881 {
4882 tree link = gimple_asm_output_op (asm_stmt, i);
4883 tree op = TREE_VALUE (link);
4884 if (REFERENCE_CLASS_P (op)
4885 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4886 changed = true;
4887 }
4888 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4889 {
4890 tree link = gimple_asm_input_op (asm_stmt, i);
4891 tree op = TREE_VALUE (link);
4892 if ((REFERENCE_CLASS_P (op)
4893 || TREE_CODE (op) == ADDR_EXPR)
4894 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4895 changed = true;
4896 }
4897 }
4898 break;
4899 case GIMPLE_DEBUG:
4900 if (gimple_debug_bind_p (stmt))
4901 {
4902 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4903 if (*val
4904 && (REFERENCE_CLASS_P (*val)
4905 || TREE_CODE (*val) == ADDR_EXPR)
4906 && maybe_canonicalize_mem_ref_addr (val))
4907 changed = true;
4908 }
4909 break;
4910 case GIMPLE_COND:
4911 {
4912 /* Canonicalize operand order. */
4913 tree lhs = gimple_cond_lhs (stmt);
4914 tree rhs = gimple_cond_rhs (stmt);
4915 if (tree_swap_operands_p (lhs, rhs))
4916 {
4917 gcond *gc = as_a <gcond *> (stmt);
4918 gimple_cond_set_lhs (gc, rhs);
4919 gimple_cond_set_rhs (gc, lhs);
4920 gimple_cond_set_code (gc,
4921 swap_tree_comparison (gimple_cond_code (gc)));
4922 changed = true;
4923 }
4924 }
4925 default:;
4926 }
4927
4928 /* Dispatch to pattern-based folding. */
4929 if (!inplace
4930 || is_gimple_assign (stmt)
4931 || gimple_code (stmt) == GIMPLE_COND)
4932 {
4933 gimple_seq seq = NULL;
4934 gimple_match_op res_op;
4935 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4936 valueize, valueize))
4937 {
4938 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4939 changed = true;
4940 else
4941 gimple_seq_discard (seq);
4942 }
4943 }
4944
4945 stmt = gsi_stmt (*gsi);
4946
4947 /* Fold the main computation performed by the statement. */
4948 switch (gimple_code (stmt))
4949 {
4950 case GIMPLE_ASSIGN:
4951 {
4952 /* Try to canonicalize for boolean-typed X the comparisons
4953 X == 0, X == 1, X != 0, and X != 1. */
4954 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4955 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4956 {
4957 tree lhs = gimple_assign_lhs (stmt);
4958 tree op1 = gimple_assign_rhs1 (stmt);
4959 tree op2 = gimple_assign_rhs2 (stmt);
4960 tree type = TREE_TYPE (op1);
4961
4962 /* Check whether the comparison operands are of the same boolean
4963 type as the result type is.
4964 Check that second operand is an integer-constant with value
4965 one or zero. */
4966 if (TREE_CODE (op2) == INTEGER_CST
4967 && (integer_zerop (op2) || integer_onep (op2))
4968 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4969 {
4970 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4971 bool is_logical_not = false;
4972
4973 /* X == 0 and X != 1 is a logical-not.of X
4974 X == 1 and X != 0 is X */
4975 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4976 || (cmp_code == NE_EXPR && integer_onep (op2)))
4977 is_logical_not = true;
4978
4979 if (is_logical_not == false)
4980 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4981 /* Only for one-bit precision typed X the transformation
4982 !X -> ~X is valied. */
4983 else if (TYPE_PRECISION (type) == 1)
4984 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4985 /* Otherwise we use !X -> X ^ 1. */
4986 else
4987 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4988 build_int_cst (type, 1));
4989 changed = true;
4990 break;
4991 }
4992 }
4993
4994 unsigned old_num_ops = gimple_num_ops (stmt);
4995 tree lhs = gimple_assign_lhs (stmt);
4996 tree new_rhs = fold_gimple_assign (gsi);
4997 if (new_rhs
4998 && !useless_type_conversion_p (TREE_TYPE (lhs),
4999 TREE_TYPE (new_rhs)))
5000 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
5001 if (new_rhs
5002 && (!inplace
5003 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
5004 {
5005 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
5006 changed = true;
5007 }
5008 break;
5009 }
5010
5011 case GIMPLE_CALL:
5012 changed |= gimple_fold_call (gsi, inplace);
5013 break;
5014
5015 case GIMPLE_ASM:
5016 /* Fold *& in asm operands. */
5017 {
5018 gasm *asm_stmt = as_a <gasm *> (stmt);
5019 size_t noutputs;
5020 const char **oconstraints;
5021 const char *constraint;
5022 bool allows_mem, allows_reg;
5023
5024 noutputs = gimple_asm_noutputs (asm_stmt);
5025 oconstraints = XALLOCAVEC (const char *, noutputs);
5026
5027 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5028 {
5029 tree link = gimple_asm_output_op (asm_stmt, i);
5030 tree op = TREE_VALUE (link);
5031 oconstraints[i]
5032 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5033 if (REFERENCE_CLASS_P (op)
5034 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
5035 {
5036 TREE_VALUE (link) = op;
5037 changed = true;
5038 }
5039 }
5040 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5041 {
5042 tree link = gimple_asm_input_op (asm_stmt, i);
5043 tree op = TREE_VALUE (link);
5044 constraint
5045 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5046 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5047 oconstraints, &allows_mem, &allows_reg);
5048 if (REFERENCE_CLASS_P (op)
5049 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
5050 != NULL_TREE)
5051 {
5052 TREE_VALUE (link) = op;
5053 changed = true;
5054 }
5055 }
5056 }
5057 break;
5058
5059 case GIMPLE_DEBUG:
5060 if (gimple_debug_bind_p (stmt))
5061 {
5062 tree val = gimple_debug_bind_get_value (stmt);
5063 if (val
5064 && REFERENCE_CLASS_P (val))
5065 {
5066 tree tem = maybe_fold_reference (val, false);
5067 if (tem)
5068 {
5069 gimple_debug_bind_set_value (stmt, tem);
5070 changed = true;
5071 }
5072 }
5073 else if (val
5074 && TREE_CODE (val) == ADDR_EXPR)
5075 {
5076 tree ref = TREE_OPERAND (val, 0);
5077 tree tem = maybe_fold_reference (ref, false);
5078 if (tem)
5079 {
5080 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
5081 gimple_debug_bind_set_value (stmt, tem);
5082 changed = true;
5083 }
5084 }
5085 }
5086 break;
5087
5088 case GIMPLE_RETURN:
5089 {
5090 greturn *ret_stmt = as_a<greturn *> (stmt);
5091 tree ret = gimple_return_retval(ret_stmt);
5092
5093 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
5094 {
5095 tree val = valueize (ret);
5096 if (val && val != ret
5097 && may_propagate_copy (ret, val))
5098 {
5099 gimple_return_set_retval (ret_stmt, val);
5100 changed = true;
5101 }
5102 }
5103 }
5104 break;
5105
5106 default:;
5107 }
5108
5109 stmt = gsi_stmt (*gsi);
5110
5111 /* Fold *& on the lhs. */
5112 if (gimple_has_lhs (stmt))
5113 {
5114 tree lhs = gimple_get_lhs (stmt);
5115 if (lhs && REFERENCE_CLASS_P (lhs))
5116 {
5117 tree new_lhs = maybe_fold_reference (lhs, true);
5118 if (new_lhs)
5119 {
5120 gimple_set_lhs (stmt, new_lhs);
5121 changed = true;
5122 }
5123 }
5124 }
5125
5126 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5127 return changed;
5128 }
5129
5130 /* Valueziation callback that ends up not following SSA edges. */
5131
5132 tree
5133 no_follow_ssa_edges (tree)
5134 {
5135 return NULL_TREE;
5136 }
5137
5138 /* Valueization callback that ends up following single-use SSA edges only. */
5139
5140 tree
5141 follow_single_use_edges (tree val)
5142 {
5143 if (TREE_CODE (val) == SSA_NAME
5144 && !has_single_use (val))
5145 return NULL_TREE;
5146 return val;
5147 }
5148
5149 /* Valueization callback that follows all SSA edges. */
5150
5151 tree
5152 follow_all_ssa_edges (tree val)
5153 {
5154 return val;
5155 }
5156
5157 /* Fold the statement pointed to by GSI. In some cases, this function may
5158 replace the whole statement with a new one. Returns true iff folding
5159 makes any changes.
5160 The statement pointed to by GSI should be in valid gimple form but may
5161 be in unfolded state as resulting from for example constant propagation
5162 which can produce *&x = 0. */
5163
5164 bool
5165 fold_stmt (gimple_stmt_iterator *gsi)
5166 {
5167 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5168 }
5169
5170 bool
5171 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5172 {
5173 return fold_stmt_1 (gsi, false, valueize);
5174 }
5175
5176 /* Perform the minimal folding on statement *GSI. Only operations like
5177 *&x created by constant propagation are handled. The statement cannot
5178 be replaced with a new one. Return true if the statement was
5179 changed, false otherwise.
5180 The statement *GSI should be in valid gimple form but may
5181 be in unfolded state as resulting from for example constant propagation
5182 which can produce *&x = 0. */
5183
5184 bool
5185 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5186 {
5187 gimple *stmt = gsi_stmt (*gsi);
5188 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5189 gcc_assert (gsi_stmt (*gsi) == stmt);
5190 return changed;
5191 }
5192
5193 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5194 if EXPR is null or we don't know how.
5195 If non-null, the result always has boolean type. */
5196
5197 static tree
5198 canonicalize_bool (tree expr, bool invert)
5199 {
5200 if (!expr)
5201 return NULL_TREE;
5202 else if (invert)
5203 {
5204 if (integer_nonzerop (expr))
5205 return boolean_false_node;
5206 else if (integer_zerop (expr))
5207 return boolean_true_node;
5208 else if (TREE_CODE (expr) == SSA_NAME)
5209 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5210 build_int_cst (TREE_TYPE (expr), 0));
5211 else if (COMPARISON_CLASS_P (expr))
5212 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5213 boolean_type_node,
5214 TREE_OPERAND (expr, 0),
5215 TREE_OPERAND (expr, 1));
5216 else
5217 return NULL_TREE;
5218 }
5219 else
5220 {
5221 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5222 return expr;
5223 if (integer_nonzerop (expr))
5224 return boolean_true_node;
5225 else if (integer_zerop (expr))
5226 return boolean_false_node;
5227 else if (TREE_CODE (expr) == SSA_NAME)
5228 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5229 build_int_cst (TREE_TYPE (expr), 0));
5230 else if (COMPARISON_CLASS_P (expr))
5231 return fold_build2 (TREE_CODE (expr),
5232 boolean_type_node,
5233 TREE_OPERAND (expr, 0),
5234 TREE_OPERAND (expr, 1));
5235 else
5236 return NULL_TREE;
5237 }
5238 }
5239
5240 /* Check to see if a boolean expression EXPR is logically equivalent to the
5241 comparison (OP1 CODE OP2). Check for various identities involving
5242 SSA_NAMEs. */
5243
5244 static bool
5245 same_bool_comparison_p (const_tree expr, enum tree_code code,
5246 const_tree op1, const_tree op2)
5247 {
5248 gimple *s;
5249
5250 /* The obvious case. */
5251 if (TREE_CODE (expr) == code
5252 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5253 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5254 return true;
5255
5256 /* Check for comparing (name, name != 0) and the case where expr
5257 is an SSA_NAME with a definition matching the comparison. */
5258 if (TREE_CODE (expr) == SSA_NAME
5259 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5260 {
5261 if (operand_equal_p (expr, op1, 0))
5262 return ((code == NE_EXPR && integer_zerop (op2))
5263 || (code == EQ_EXPR && integer_nonzerop (op2)));
5264 s = SSA_NAME_DEF_STMT (expr);
5265 if (is_gimple_assign (s)
5266 && gimple_assign_rhs_code (s) == code
5267 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5268 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5269 return true;
5270 }
5271
5272 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5273 of name is a comparison, recurse. */
5274 if (TREE_CODE (op1) == SSA_NAME
5275 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5276 {
5277 s = SSA_NAME_DEF_STMT (op1);
5278 if (is_gimple_assign (s)
5279 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5280 {
5281 enum tree_code c = gimple_assign_rhs_code (s);
5282 if ((c == NE_EXPR && integer_zerop (op2))
5283 || (c == EQ_EXPR && integer_nonzerop (op2)))
5284 return same_bool_comparison_p (expr, c,
5285 gimple_assign_rhs1 (s),
5286 gimple_assign_rhs2 (s));
5287 if ((c == EQ_EXPR && integer_zerop (op2))
5288 || (c == NE_EXPR && integer_nonzerop (op2)))
5289 return same_bool_comparison_p (expr,
5290 invert_tree_comparison (c, false),
5291 gimple_assign_rhs1 (s),
5292 gimple_assign_rhs2 (s));
5293 }
5294 }
5295 return false;
5296 }
5297
5298 /* Check to see if two boolean expressions OP1 and OP2 are logically
5299 equivalent. */
5300
5301 static bool
5302 same_bool_result_p (const_tree op1, const_tree op2)
5303 {
5304 /* Simple cases first. */
5305 if (operand_equal_p (op1, op2, 0))
5306 return true;
5307
5308 /* Check the cases where at least one of the operands is a comparison.
5309 These are a bit smarter than operand_equal_p in that they apply some
5310 identifies on SSA_NAMEs. */
5311 if (COMPARISON_CLASS_P (op2)
5312 && same_bool_comparison_p (op1, TREE_CODE (op2),
5313 TREE_OPERAND (op2, 0),
5314 TREE_OPERAND (op2, 1)))
5315 return true;
5316 if (COMPARISON_CLASS_P (op1)
5317 && same_bool_comparison_p (op2, TREE_CODE (op1),
5318 TREE_OPERAND (op1, 0),
5319 TREE_OPERAND (op1, 1)))
5320 return true;
5321
5322 /* Default case. */
5323 return false;
5324 }
5325
5326 /* Forward declarations for some mutually recursive functions. */
5327
5328 static tree
5329 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5330 enum tree_code code2, tree op2a, tree op2b);
5331 static tree
5332 and_var_with_comparison (tree var, bool invert,
5333 enum tree_code code2, tree op2a, tree op2b);
5334 static tree
5335 and_var_with_comparison_1 (gimple *stmt,
5336 enum tree_code code2, tree op2a, tree op2b);
5337 static tree
5338 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5339 enum tree_code code2, tree op2a, tree op2b);
5340 static tree
5341 or_var_with_comparison (tree var, bool invert,
5342 enum tree_code code2, tree op2a, tree op2b);
5343 static tree
5344 or_var_with_comparison_1 (gimple *stmt,
5345 enum tree_code code2, tree op2a, tree op2b);
5346
5347 /* Helper function for and_comparisons_1: try to simplify the AND of the
5348 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5349 If INVERT is true, invert the value of the VAR before doing the AND.
5350 Return NULL_EXPR if we can't simplify this to a single expression. */
5351
5352 static tree
5353 and_var_with_comparison (tree var, bool invert,
5354 enum tree_code code2, tree op2a, tree op2b)
5355 {
5356 tree t;
5357 gimple *stmt = SSA_NAME_DEF_STMT (var);
5358
5359 /* We can only deal with variables whose definitions are assignments. */
5360 if (!is_gimple_assign (stmt))
5361 return NULL_TREE;
5362
5363 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5364 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5365 Then we only have to consider the simpler non-inverted cases. */
5366 if (invert)
5367 t = or_var_with_comparison_1 (stmt,
5368 invert_tree_comparison (code2, false),
5369 op2a, op2b);
5370 else
5371 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5372 return canonicalize_bool (t, invert);
5373 }
5374
5375 /* Try to simplify the AND of the ssa variable defined by the assignment
5376 STMT with the comparison specified by (OP2A CODE2 OP2B).
5377 Return NULL_EXPR if we can't simplify this to a single expression. */
5378
5379 static tree
5380 and_var_with_comparison_1 (gimple *stmt,
5381 enum tree_code code2, tree op2a, tree op2b)
5382 {
5383 tree var = gimple_assign_lhs (stmt);
5384 tree true_test_var = NULL_TREE;
5385 tree false_test_var = NULL_TREE;
5386 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5387
5388 /* Check for identities like (var AND (var == 0)) => false. */
5389 if (TREE_CODE (op2a) == SSA_NAME
5390 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5391 {
5392 if ((code2 == NE_EXPR && integer_zerop (op2b))
5393 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5394 {
5395 true_test_var = op2a;
5396 if (var == true_test_var)
5397 return var;
5398 }
5399 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5400 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5401 {
5402 false_test_var = op2a;
5403 if (var == false_test_var)
5404 return boolean_false_node;
5405 }
5406 }
5407
5408 /* If the definition is a comparison, recurse on it. */
5409 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5410 {
5411 tree t = and_comparisons_1 (innercode,
5412 gimple_assign_rhs1 (stmt),
5413 gimple_assign_rhs2 (stmt),
5414 code2,
5415 op2a,
5416 op2b);
5417 if (t)
5418 return t;
5419 }
5420
5421 /* If the definition is an AND or OR expression, we may be able to
5422 simplify by reassociating. */
5423 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5424 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5425 {
5426 tree inner1 = gimple_assign_rhs1 (stmt);
5427 tree inner2 = gimple_assign_rhs2 (stmt);
5428 gimple *s;
5429 tree t;
5430 tree partial = NULL_TREE;
5431 bool is_and = (innercode == BIT_AND_EXPR);
5432
5433 /* Check for boolean identities that don't require recursive examination
5434 of inner1/inner2:
5435 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5436 inner1 AND (inner1 OR inner2) => inner1
5437 !inner1 AND (inner1 AND inner2) => false
5438 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5439 Likewise for similar cases involving inner2. */
5440 if (inner1 == true_test_var)
5441 return (is_and ? var : inner1);
5442 else if (inner2 == true_test_var)
5443 return (is_and ? var : inner2);
5444 else if (inner1 == false_test_var)
5445 return (is_and
5446 ? boolean_false_node
5447 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5448 else if (inner2 == false_test_var)
5449 return (is_and
5450 ? boolean_false_node
5451 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5452
5453 /* Next, redistribute/reassociate the AND across the inner tests.
5454 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5455 if (TREE_CODE (inner1) == SSA_NAME
5456 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5457 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5458 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5459 gimple_assign_rhs1 (s),
5460 gimple_assign_rhs2 (s),
5461 code2, op2a, op2b)))
5462 {
5463 /* Handle the AND case, where we are reassociating:
5464 (inner1 AND inner2) AND (op2a code2 op2b)
5465 => (t AND inner2)
5466 If the partial result t is a constant, we win. Otherwise
5467 continue on to try reassociating with the other inner test. */
5468 if (is_and)
5469 {
5470 if (integer_onep (t))
5471 return inner2;
5472 else if (integer_zerop (t))
5473 return boolean_false_node;
5474 }
5475
5476 /* Handle the OR case, where we are redistributing:
5477 (inner1 OR inner2) AND (op2a code2 op2b)
5478 => (t OR (inner2 AND (op2a code2 op2b))) */
5479 else if (integer_onep (t))
5480 return boolean_true_node;
5481
5482 /* Save partial result for later. */
5483 partial = t;
5484 }
5485
5486 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5487 if (TREE_CODE (inner2) == SSA_NAME
5488 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5489 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5490 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5491 gimple_assign_rhs1 (s),
5492 gimple_assign_rhs2 (s),
5493 code2, op2a, op2b)))
5494 {
5495 /* Handle the AND case, where we are reassociating:
5496 (inner1 AND inner2) AND (op2a code2 op2b)
5497 => (inner1 AND t) */
5498 if (is_and)
5499 {
5500 if (integer_onep (t))
5501 return inner1;
5502 else if (integer_zerop (t))
5503 return boolean_false_node;
5504 /* If both are the same, we can apply the identity
5505 (x AND x) == x. */
5506 else if (partial && same_bool_result_p (t, partial))
5507 return t;
5508 }
5509
5510 /* Handle the OR case. where we are redistributing:
5511 (inner1 OR inner2) AND (op2a code2 op2b)
5512 => (t OR (inner1 AND (op2a code2 op2b)))
5513 => (t OR partial) */
5514 else
5515 {
5516 if (integer_onep (t))
5517 return boolean_true_node;
5518 else if (partial)
5519 {
5520 /* We already got a simplification for the other
5521 operand to the redistributed OR expression. The
5522 interesting case is when at least one is false.
5523 Or, if both are the same, we can apply the identity
5524 (x OR x) == x. */
5525 if (integer_zerop (partial))
5526 return t;
5527 else if (integer_zerop (t))
5528 return partial;
5529 else if (same_bool_result_p (t, partial))
5530 return t;
5531 }
5532 }
5533 }
5534 }
5535 return NULL_TREE;
5536 }
5537
5538 /* Try to simplify the AND of two comparisons defined by
5539 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5540 If this can be done without constructing an intermediate value,
5541 return the resulting tree; otherwise NULL_TREE is returned.
5542 This function is deliberately asymmetric as it recurses on SSA_DEFs
5543 in the first comparison but not the second. */
5544
5545 static tree
5546 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5547 enum tree_code code2, tree op2a, tree op2b)
5548 {
5549 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5550
5551 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5552 if (operand_equal_p (op1a, op2a, 0)
5553 && operand_equal_p (op1b, op2b, 0))
5554 {
5555 /* Result will be either NULL_TREE, or a combined comparison. */
5556 tree t = combine_comparisons (UNKNOWN_LOCATION,
5557 TRUTH_ANDIF_EXPR, code1, code2,
5558 truth_type, op1a, op1b);
5559 if (t)
5560 return t;
5561 }
5562
5563 /* Likewise the swapped case of the above. */
5564 if (operand_equal_p (op1a, op2b, 0)
5565 && operand_equal_p (op1b, op2a, 0))
5566 {
5567 /* Result will be either NULL_TREE, or a combined comparison. */
5568 tree t = combine_comparisons (UNKNOWN_LOCATION,
5569 TRUTH_ANDIF_EXPR, code1,
5570 swap_tree_comparison (code2),
5571 truth_type, op1a, op1b);
5572 if (t)
5573 return t;
5574 }
5575
5576 /* If both comparisons are of the same value against constants, we might
5577 be able to merge them. */
5578 if (operand_equal_p (op1a, op2a, 0)
5579 && TREE_CODE (op1b) == INTEGER_CST
5580 && TREE_CODE (op2b) == INTEGER_CST)
5581 {
5582 int cmp = tree_int_cst_compare (op1b, op2b);
5583
5584 /* If we have (op1a == op1b), we should either be able to
5585 return that or FALSE, depending on whether the constant op1b
5586 also satisfies the other comparison against op2b. */
5587 if (code1 == EQ_EXPR)
5588 {
5589 bool done = true;
5590 bool val;
5591 switch (code2)
5592 {
5593 case EQ_EXPR: val = (cmp == 0); break;
5594 case NE_EXPR: val = (cmp != 0); break;
5595 case LT_EXPR: val = (cmp < 0); break;
5596 case GT_EXPR: val = (cmp > 0); break;
5597 case LE_EXPR: val = (cmp <= 0); break;
5598 case GE_EXPR: val = (cmp >= 0); break;
5599 default: done = false;
5600 }
5601 if (done)
5602 {
5603 if (val)
5604 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5605 else
5606 return boolean_false_node;
5607 }
5608 }
5609 /* Likewise if the second comparison is an == comparison. */
5610 else if (code2 == EQ_EXPR)
5611 {
5612 bool done = true;
5613 bool val;
5614 switch (code1)
5615 {
5616 case EQ_EXPR: val = (cmp == 0); break;
5617 case NE_EXPR: val = (cmp != 0); break;
5618 case LT_EXPR: val = (cmp > 0); break;
5619 case GT_EXPR: val = (cmp < 0); break;
5620 case LE_EXPR: val = (cmp >= 0); break;
5621 case GE_EXPR: val = (cmp <= 0); break;
5622 default: done = false;
5623 }
5624 if (done)
5625 {
5626 if (val)
5627 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5628 else
5629 return boolean_false_node;
5630 }
5631 }
5632
5633 /* Same business with inequality tests. */
5634 else if (code1 == NE_EXPR)
5635 {
5636 bool val;
5637 switch (code2)
5638 {
5639 case EQ_EXPR: val = (cmp != 0); break;
5640 case NE_EXPR: val = (cmp == 0); break;
5641 case LT_EXPR: val = (cmp >= 0); break;
5642 case GT_EXPR: val = (cmp <= 0); break;
5643 case LE_EXPR: val = (cmp > 0); break;
5644 case GE_EXPR: val = (cmp < 0); break;
5645 default:
5646 val = false;
5647 }
5648 if (val)
5649 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5650 }
5651 else if (code2 == NE_EXPR)
5652 {
5653 bool val;
5654 switch (code1)
5655 {
5656 case EQ_EXPR: val = (cmp == 0); break;
5657 case NE_EXPR: val = (cmp != 0); break;
5658 case LT_EXPR: val = (cmp <= 0); break;
5659 case GT_EXPR: val = (cmp >= 0); break;
5660 case LE_EXPR: val = (cmp < 0); break;
5661 case GE_EXPR: val = (cmp > 0); break;
5662 default:
5663 val = false;
5664 }
5665 if (val)
5666 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5667 }
5668
5669 /* Chose the more restrictive of two < or <= comparisons. */
5670 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5671 && (code2 == LT_EXPR || code2 == LE_EXPR))
5672 {
5673 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5674 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5675 else
5676 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5677 }
5678
5679 /* Likewise chose the more restrictive of two > or >= comparisons. */
5680 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5681 && (code2 == GT_EXPR || code2 == GE_EXPR))
5682 {
5683 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5684 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5685 else
5686 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5687 }
5688
5689 /* Check for singleton ranges. */
5690 else if (cmp == 0
5691 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5692 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5693 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5694
5695 /* Check for disjoint ranges. */
5696 else if (cmp <= 0
5697 && (code1 == LT_EXPR || code1 == LE_EXPR)
5698 && (code2 == GT_EXPR || code2 == GE_EXPR))
5699 return boolean_false_node;
5700 else if (cmp >= 0
5701 && (code1 == GT_EXPR || code1 == GE_EXPR)
5702 && (code2 == LT_EXPR || code2 == LE_EXPR))
5703 return boolean_false_node;
5704 }
5705
5706 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5707 NAME's definition is a truth value. See if there are any simplifications
5708 that can be done against the NAME's definition. */
5709 if (TREE_CODE (op1a) == SSA_NAME
5710 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5711 && (integer_zerop (op1b) || integer_onep (op1b)))
5712 {
5713 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5714 || (code1 == NE_EXPR && integer_onep (op1b)));
5715 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5716 switch (gimple_code (stmt))
5717 {
5718 case GIMPLE_ASSIGN:
5719 /* Try to simplify by copy-propagating the definition. */
5720 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5721
5722 case GIMPLE_PHI:
5723 /* If every argument to the PHI produces the same result when
5724 ANDed with the second comparison, we win.
5725 Do not do this unless the type is bool since we need a bool
5726 result here anyway. */
5727 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5728 {
5729 tree result = NULL_TREE;
5730 unsigned i;
5731 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5732 {
5733 tree arg = gimple_phi_arg_def (stmt, i);
5734
5735 /* If this PHI has itself as an argument, ignore it.
5736 If all the other args produce the same result,
5737 we're still OK. */
5738 if (arg == gimple_phi_result (stmt))
5739 continue;
5740 else if (TREE_CODE (arg) == INTEGER_CST)
5741 {
5742 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5743 {
5744 if (!result)
5745 result = boolean_false_node;
5746 else if (!integer_zerop (result))
5747 return NULL_TREE;
5748 }
5749 else if (!result)
5750 result = fold_build2 (code2, boolean_type_node,
5751 op2a, op2b);
5752 else if (!same_bool_comparison_p (result,
5753 code2, op2a, op2b))
5754 return NULL_TREE;
5755 }
5756 else if (TREE_CODE (arg) == SSA_NAME
5757 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5758 {
5759 tree temp;
5760 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5761 /* In simple cases we can look through PHI nodes,
5762 but we have to be careful with loops.
5763 See PR49073. */
5764 if (! dom_info_available_p (CDI_DOMINATORS)
5765 || gimple_bb (def_stmt) == gimple_bb (stmt)
5766 || dominated_by_p (CDI_DOMINATORS,
5767 gimple_bb (def_stmt),
5768 gimple_bb (stmt)))
5769 return NULL_TREE;
5770 temp = and_var_with_comparison (arg, invert, code2,
5771 op2a, op2b);
5772 if (!temp)
5773 return NULL_TREE;
5774 else if (!result)
5775 result = temp;
5776 else if (!same_bool_result_p (result, temp))
5777 return NULL_TREE;
5778 }
5779 else
5780 return NULL_TREE;
5781 }
5782 return result;
5783 }
5784
5785 default:
5786 break;
5787 }
5788 }
5789 return NULL_TREE;
5790 }
5791
5792 /* Try to simplify the AND of two comparisons, specified by
5793 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5794 If this can be simplified to a single expression (without requiring
5795 introducing more SSA variables to hold intermediate values),
5796 return the resulting tree. Otherwise return NULL_TREE.
5797 If the result expression is non-null, it has boolean type. */
5798
5799 tree
5800 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5801 enum tree_code code2, tree op2a, tree op2b)
5802 {
5803 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5804 if (t)
5805 return t;
5806 else
5807 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5808 }
5809
5810 /* Helper function for or_comparisons_1: try to simplify the OR of the
5811 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5812 If INVERT is true, invert the value of VAR before doing the OR.
5813 Return NULL_EXPR if we can't simplify this to a single expression. */
5814
5815 static tree
5816 or_var_with_comparison (tree var, bool invert,
5817 enum tree_code code2, tree op2a, tree op2b)
5818 {
5819 tree t;
5820 gimple *stmt = SSA_NAME_DEF_STMT (var);
5821
5822 /* We can only deal with variables whose definitions are assignments. */
5823 if (!is_gimple_assign (stmt))
5824 return NULL_TREE;
5825
5826 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5827 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5828 Then we only have to consider the simpler non-inverted cases. */
5829 if (invert)
5830 t = and_var_with_comparison_1 (stmt,
5831 invert_tree_comparison (code2, false),
5832 op2a, op2b);
5833 else
5834 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5835 return canonicalize_bool (t, invert);
5836 }
5837
5838 /* Try to simplify the OR of the ssa variable defined by the assignment
5839 STMT with the comparison specified by (OP2A CODE2 OP2B).
5840 Return NULL_EXPR if we can't simplify this to a single expression. */
5841
5842 static tree
5843 or_var_with_comparison_1 (gimple *stmt,
5844 enum tree_code code2, tree op2a, tree op2b)
5845 {
5846 tree var = gimple_assign_lhs (stmt);
5847 tree true_test_var = NULL_TREE;
5848 tree false_test_var = NULL_TREE;
5849 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5850
5851 /* Check for identities like (var OR (var != 0)) => true . */
5852 if (TREE_CODE (op2a) == SSA_NAME
5853 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5854 {
5855 if ((code2 == NE_EXPR && integer_zerop (op2b))
5856 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5857 {
5858 true_test_var = op2a;
5859 if (var == true_test_var)
5860 return var;
5861 }
5862 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5863 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5864 {
5865 false_test_var = op2a;
5866 if (var == false_test_var)
5867 return boolean_true_node;
5868 }
5869 }
5870
5871 /* If the definition is a comparison, recurse on it. */
5872 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5873 {
5874 tree t = or_comparisons_1 (innercode,
5875 gimple_assign_rhs1 (stmt),
5876 gimple_assign_rhs2 (stmt),
5877 code2,
5878 op2a,
5879 op2b);
5880 if (t)
5881 return t;
5882 }
5883
5884 /* If the definition is an AND or OR expression, we may be able to
5885 simplify by reassociating. */
5886 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5887 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5888 {
5889 tree inner1 = gimple_assign_rhs1 (stmt);
5890 tree inner2 = gimple_assign_rhs2 (stmt);
5891 gimple *s;
5892 tree t;
5893 tree partial = NULL_TREE;
5894 bool is_or = (innercode == BIT_IOR_EXPR);
5895
5896 /* Check for boolean identities that don't require recursive examination
5897 of inner1/inner2:
5898 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5899 inner1 OR (inner1 AND inner2) => inner1
5900 !inner1 OR (inner1 OR inner2) => true
5901 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5902 */
5903 if (inner1 == true_test_var)
5904 return (is_or ? var : inner1);
5905 else if (inner2 == true_test_var)
5906 return (is_or ? var : inner2);
5907 else if (inner1 == false_test_var)
5908 return (is_or
5909 ? boolean_true_node
5910 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5911 else if (inner2 == false_test_var)
5912 return (is_or
5913 ? boolean_true_node
5914 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5915
5916 /* Next, redistribute/reassociate the OR across the inner tests.
5917 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5918 if (TREE_CODE (inner1) == SSA_NAME
5919 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5920 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5921 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5922 gimple_assign_rhs1 (s),
5923 gimple_assign_rhs2 (s),
5924 code2, op2a, op2b)))
5925 {
5926 /* Handle the OR case, where we are reassociating:
5927 (inner1 OR inner2) OR (op2a code2 op2b)
5928 => (t OR inner2)
5929 If the partial result t is a constant, we win. Otherwise
5930 continue on to try reassociating with the other inner test. */
5931 if (is_or)
5932 {
5933 if (integer_onep (t))
5934 return boolean_true_node;
5935 else if (integer_zerop (t))
5936 return inner2;
5937 }
5938
5939 /* Handle the AND case, where we are redistributing:
5940 (inner1 AND inner2) OR (op2a code2 op2b)
5941 => (t AND (inner2 OR (op2a code op2b))) */
5942 else if (integer_zerop (t))
5943 return boolean_false_node;
5944
5945 /* Save partial result for later. */
5946 partial = t;
5947 }
5948
5949 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5950 if (TREE_CODE (inner2) == SSA_NAME
5951 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5952 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5953 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5954 gimple_assign_rhs1 (s),
5955 gimple_assign_rhs2 (s),
5956 code2, op2a, op2b)))
5957 {
5958 /* Handle the OR case, where we are reassociating:
5959 (inner1 OR inner2) OR (op2a code2 op2b)
5960 => (inner1 OR t)
5961 => (t OR partial) */
5962 if (is_or)
5963 {
5964 if (integer_zerop (t))
5965 return inner1;
5966 else if (integer_onep (t))
5967 return boolean_true_node;
5968 /* If both are the same, we can apply the identity
5969 (x OR x) == x. */
5970 else if (partial && same_bool_result_p (t, partial))
5971 return t;
5972 }
5973
5974 /* Handle the AND case, where we are redistributing:
5975 (inner1 AND inner2) OR (op2a code2 op2b)
5976 => (t AND (inner1 OR (op2a code2 op2b)))
5977 => (t AND partial) */
5978 else
5979 {
5980 if (integer_zerop (t))
5981 return boolean_false_node;
5982 else if (partial)
5983 {
5984 /* We already got a simplification for the other
5985 operand to the redistributed AND expression. The
5986 interesting case is when at least one is true.
5987 Or, if both are the same, we can apply the identity
5988 (x AND x) == x. */
5989 if (integer_onep (partial))
5990 return t;
5991 else if (integer_onep (t))
5992 return partial;
5993 else if (same_bool_result_p (t, partial))
5994 return t;
5995 }
5996 }
5997 }
5998 }
5999 return NULL_TREE;
6000 }
6001
6002 /* Try to simplify the OR of two comparisons defined by
6003 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6004 If this can be done without constructing an intermediate value,
6005 return the resulting tree; otherwise NULL_TREE is returned.
6006 This function is deliberately asymmetric as it recurses on SSA_DEFs
6007 in the first comparison but not the second. */
6008
6009 static tree
6010 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
6011 enum tree_code code2, tree op2a, tree op2b)
6012 {
6013 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6014
6015 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6016 if (operand_equal_p (op1a, op2a, 0)
6017 && operand_equal_p (op1b, op2b, 0))
6018 {
6019 /* Result will be either NULL_TREE, or a combined comparison. */
6020 tree t = combine_comparisons (UNKNOWN_LOCATION,
6021 TRUTH_ORIF_EXPR, code1, code2,
6022 truth_type, op1a, op1b);
6023 if (t)
6024 return t;
6025 }
6026
6027 /* Likewise the swapped case of the above. */
6028 if (operand_equal_p (op1a, op2b, 0)
6029 && operand_equal_p (op1b, op2a, 0))
6030 {
6031 /* Result will be either NULL_TREE, or a combined comparison. */
6032 tree t = combine_comparisons (UNKNOWN_LOCATION,
6033 TRUTH_ORIF_EXPR, code1,
6034 swap_tree_comparison (code2),
6035 truth_type, op1a, op1b);
6036 if (t)
6037 return t;
6038 }
6039
6040 /* If both comparisons are of the same value against constants, we might
6041 be able to merge them. */
6042 if (operand_equal_p (op1a, op2a, 0)
6043 && TREE_CODE (op1b) == INTEGER_CST
6044 && TREE_CODE (op2b) == INTEGER_CST)
6045 {
6046 int cmp = tree_int_cst_compare (op1b, op2b);
6047
6048 /* If we have (op1a != op1b), we should either be able to
6049 return that or TRUE, depending on whether the constant op1b
6050 also satisfies the other comparison against op2b. */
6051 if (code1 == NE_EXPR)
6052 {
6053 bool done = true;
6054 bool val;
6055 switch (code2)
6056 {
6057 case EQ_EXPR: val = (cmp == 0); break;
6058 case NE_EXPR: val = (cmp != 0); break;
6059 case LT_EXPR: val = (cmp < 0); break;
6060 case GT_EXPR: val = (cmp > 0); break;
6061 case LE_EXPR: val = (cmp <= 0); break;
6062 case GE_EXPR: val = (cmp >= 0); break;
6063 default: done = false;
6064 }
6065 if (done)
6066 {
6067 if (val)
6068 return boolean_true_node;
6069 else
6070 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6071 }
6072 }
6073 /* Likewise if the second comparison is a != comparison. */
6074 else if (code2 == NE_EXPR)
6075 {
6076 bool done = true;
6077 bool val;
6078 switch (code1)
6079 {
6080 case EQ_EXPR: val = (cmp == 0); break;
6081 case NE_EXPR: val = (cmp != 0); break;
6082 case LT_EXPR: val = (cmp > 0); break;
6083 case GT_EXPR: val = (cmp < 0); break;
6084 case LE_EXPR: val = (cmp >= 0); break;
6085 case GE_EXPR: val = (cmp <= 0); break;
6086 default: done = false;
6087 }
6088 if (done)
6089 {
6090 if (val)
6091 return boolean_true_node;
6092 else
6093 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6094 }
6095 }
6096
6097 /* See if an equality test is redundant with the other comparison. */
6098 else if (code1 == EQ_EXPR)
6099 {
6100 bool val;
6101 switch (code2)
6102 {
6103 case EQ_EXPR: val = (cmp == 0); break;
6104 case NE_EXPR: val = (cmp != 0); break;
6105 case LT_EXPR: val = (cmp < 0); break;
6106 case GT_EXPR: val = (cmp > 0); break;
6107 case LE_EXPR: val = (cmp <= 0); break;
6108 case GE_EXPR: val = (cmp >= 0); break;
6109 default:
6110 val = false;
6111 }
6112 if (val)
6113 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6114 }
6115 else if (code2 == EQ_EXPR)
6116 {
6117 bool val;
6118 switch (code1)
6119 {
6120 case EQ_EXPR: val = (cmp == 0); break;
6121 case NE_EXPR: val = (cmp != 0); break;
6122 case LT_EXPR: val = (cmp > 0); break;
6123 case GT_EXPR: val = (cmp < 0); break;
6124 case LE_EXPR: val = (cmp >= 0); break;
6125 case GE_EXPR: val = (cmp <= 0); break;
6126 default:
6127 val = false;
6128 }
6129 if (val)
6130 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6131 }
6132
6133 /* Chose the less restrictive of two < or <= comparisons. */
6134 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6135 && (code2 == LT_EXPR || code2 == LE_EXPR))
6136 {
6137 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6138 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6139 else
6140 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6141 }
6142
6143 /* Likewise chose the less restrictive of two > or >= comparisons. */
6144 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6145 && (code2 == GT_EXPR || code2 == GE_EXPR))
6146 {
6147 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6148 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6149 else
6150 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6151 }
6152
6153 /* Check for singleton ranges. */
6154 else if (cmp == 0
6155 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6156 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6157 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6158
6159 /* Check for less/greater pairs that don't restrict the range at all. */
6160 else if (cmp >= 0
6161 && (code1 == LT_EXPR || code1 == LE_EXPR)
6162 && (code2 == GT_EXPR || code2 == GE_EXPR))
6163 return boolean_true_node;
6164 else if (cmp <= 0
6165 && (code1 == GT_EXPR || code1 == GE_EXPR)
6166 && (code2 == LT_EXPR || code2 == LE_EXPR))
6167 return boolean_true_node;
6168 }
6169
6170 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6171 NAME's definition is a truth value. See if there are any simplifications
6172 that can be done against the NAME's definition. */
6173 if (TREE_CODE (op1a) == SSA_NAME
6174 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6175 && (integer_zerop (op1b) || integer_onep (op1b)))
6176 {
6177 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6178 || (code1 == NE_EXPR && integer_onep (op1b)));
6179 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6180 switch (gimple_code (stmt))
6181 {
6182 case GIMPLE_ASSIGN:
6183 /* Try to simplify by copy-propagating the definition. */
6184 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6185
6186 case GIMPLE_PHI:
6187 /* If every argument to the PHI produces the same result when
6188 ORed with the second comparison, we win.
6189 Do not do this unless the type is bool since we need a bool
6190 result here anyway. */
6191 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6192 {
6193 tree result = NULL_TREE;
6194 unsigned i;
6195 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6196 {
6197 tree arg = gimple_phi_arg_def (stmt, i);
6198
6199 /* If this PHI has itself as an argument, ignore it.
6200 If all the other args produce the same result,
6201 we're still OK. */
6202 if (arg == gimple_phi_result (stmt))
6203 continue;
6204 else if (TREE_CODE (arg) == INTEGER_CST)
6205 {
6206 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6207 {
6208 if (!result)
6209 result = boolean_true_node;
6210 else if (!integer_onep (result))
6211 return NULL_TREE;
6212 }
6213 else if (!result)
6214 result = fold_build2 (code2, boolean_type_node,
6215 op2a, op2b);
6216 else if (!same_bool_comparison_p (result,
6217 code2, op2a, op2b))
6218 return NULL_TREE;
6219 }
6220 else if (TREE_CODE (arg) == SSA_NAME
6221 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6222 {
6223 tree temp;
6224 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6225 /* In simple cases we can look through PHI nodes,
6226 but we have to be careful with loops.
6227 See PR49073. */
6228 if (! dom_info_available_p (CDI_DOMINATORS)
6229 || gimple_bb (def_stmt) == gimple_bb (stmt)
6230 || dominated_by_p (CDI_DOMINATORS,
6231 gimple_bb (def_stmt),
6232 gimple_bb (stmt)))
6233 return NULL_TREE;
6234 temp = or_var_with_comparison (arg, invert, code2,
6235 op2a, op2b);
6236 if (!temp)
6237 return NULL_TREE;
6238 else if (!result)
6239 result = temp;
6240 else if (!same_bool_result_p (result, temp))
6241 return NULL_TREE;
6242 }
6243 else
6244 return NULL_TREE;
6245 }
6246 return result;
6247 }
6248
6249 default:
6250 break;
6251 }
6252 }
6253 return NULL_TREE;
6254 }
6255
6256 /* Try to simplify the OR of two comparisons, specified by
6257 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6258 If this can be simplified to a single expression (without requiring
6259 introducing more SSA variables to hold intermediate values),
6260 return the resulting tree. Otherwise return NULL_TREE.
6261 If the result expression is non-null, it has boolean type. */
6262
6263 tree
6264 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6265 enum tree_code code2, tree op2a, tree op2b)
6266 {
6267 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6268 if (t)
6269 return t;
6270 else
6271 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6272 }
6273
6274
6275 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6276
6277 Either NULL_TREE, a simplified but non-constant or a constant
6278 is returned.
6279
6280 ??? This should go into a gimple-fold-inline.h file to be eventually
6281 privatized with the single valueize function used in the various TUs
6282 to avoid the indirect function call overhead. */
6283
6284 tree
6285 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6286 tree (*gvalueize) (tree))
6287 {
6288 gimple_match_op res_op;
6289 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6290 edges if there are intermediate VARYING defs. For this reason
6291 do not follow SSA edges here even though SCCVN can technically
6292 just deal fine with that. */
6293 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6294 {
6295 tree res = NULL_TREE;
6296 if (gimple_simplified_result_is_gimple_val (&res_op))
6297 res = res_op.ops[0];
6298 else if (mprts_hook)
6299 res = mprts_hook (&res_op);
6300 if (res)
6301 {
6302 if (dump_file && dump_flags & TDF_DETAILS)
6303 {
6304 fprintf (dump_file, "Match-and-simplified ");
6305 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6306 fprintf (dump_file, " to ");
6307 print_generic_expr (dump_file, res);
6308 fprintf (dump_file, "\n");
6309 }
6310 return res;
6311 }
6312 }
6313
6314 location_t loc = gimple_location (stmt);
6315 switch (gimple_code (stmt))
6316 {
6317 case GIMPLE_ASSIGN:
6318 {
6319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6320
6321 switch (get_gimple_rhs_class (subcode))
6322 {
6323 case GIMPLE_SINGLE_RHS:
6324 {
6325 tree rhs = gimple_assign_rhs1 (stmt);
6326 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6327
6328 if (TREE_CODE (rhs) == SSA_NAME)
6329 {
6330 /* If the RHS is an SSA_NAME, return its known constant value,
6331 if any. */
6332 return (*valueize) (rhs);
6333 }
6334 /* Handle propagating invariant addresses into address
6335 operations. */
6336 else if (TREE_CODE (rhs) == ADDR_EXPR
6337 && !is_gimple_min_invariant (rhs))
6338 {
6339 poly_int64 offset = 0;
6340 tree base;
6341 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6342 &offset,
6343 valueize);
6344 if (base
6345 && (CONSTANT_CLASS_P (base)
6346 || decl_address_invariant_p (base)))
6347 return build_invariant_address (TREE_TYPE (rhs),
6348 base, offset);
6349 }
6350 else if (TREE_CODE (rhs) == CONSTRUCTOR
6351 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6352 && known_eq (CONSTRUCTOR_NELTS (rhs),
6353 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6354 {
6355 unsigned i, nelts;
6356 tree val;
6357
6358 nelts = CONSTRUCTOR_NELTS (rhs);
6359 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6360 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6361 {
6362 val = (*valueize) (val);
6363 if (TREE_CODE (val) == INTEGER_CST
6364 || TREE_CODE (val) == REAL_CST
6365 || TREE_CODE (val) == FIXED_CST)
6366 vec.quick_push (val);
6367 else
6368 return NULL_TREE;
6369 }
6370
6371 return vec.build ();
6372 }
6373 if (subcode == OBJ_TYPE_REF)
6374 {
6375 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6376 /* If callee is constant, we can fold away the wrapper. */
6377 if (is_gimple_min_invariant (val))
6378 return val;
6379 }
6380
6381 if (kind == tcc_reference)
6382 {
6383 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6384 || TREE_CODE (rhs) == REALPART_EXPR
6385 || TREE_CODE (rhs) == IMAGPART_EXPR)
6386 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6387 {
6388 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6389 return fold_unary_loc (EXPR_LOCATION (rhs),
6390 TREE_CODE (rhs),
6391 TREE_TYPE (rhs), val);
6392 }
6393 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6394 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6395 {
6396 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6397 return fold_ternary_loc (EXPR_LOCATION (rhs),
6398 TREE_CODE (rhs),
6399 TREE_TYPE (rhs), val,
6400 TREE_OPERAND (rhs, 1),
6401 TREE_OPERAND (rhs, 2));
6402 }
6403 else if (TREE_CODE (rhs) == MEM_REF
6404 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6405 {
6406 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6407 if (TREE_CODE (val) == ADDR_EXPR
6408 && is_gimple_min_invariant (val))
6409 {
6410 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6411 unshare_expr (val),
6412 TREE_OPERAND (rhs, 1));
6413 if (tem)
6414 rhs = tem;
6415 }
6416 }
6417 return fold_const_aggregate_ref_1 (rhs, valueize);
6418 }
6419 else if (kind == tcc_declaration)
6420 return get_symbol_constant_value (rhs);
6421 return rhs;
6422 }
6423
6424 case GIMPLE_UNARY_RHS:
6425 return NULL_TREE;
6426
6427 case GIMPLE_BINARY_RHS:
6428 /* Translate &x + CST into an invariant form suitable for
6429 further propagation. */
6430 if (subcode == POINTER_PLUS_EXPR)
6431 {
6432 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6433 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6434 if (TREE_CODE (op0) == ADDR_EXPR
6435 && TREE_CODE (op1) == INTEGER_CST)
6436 {
6437 tree off = fold_convert (ptr_type_node, op1);
6438 return build_fold_addr_expr_loc
6439 (loc,
6440 fold_build2 (MEM_REF,
6441 TREE_TYPE (TREE_TYPE (op0)),
6442 unshare_expr (op0), off));
6443 }
6444 }
6445 /* Canonicalize bool != 0 and bool == 0 appearing after
6446 valueization. While gimple_simplify handles this
6447 it can get confused by the ~X == 1 -> X == 0 transform
6448 which we cant reduce to a SSA name or a constant
6449 (and we have no way to tell gimple_simplify to not
6450 consider those transforms in the first place). */
6451 else if (subcode == EQ_EXPR
6452 || subcode == NE_EXPR)
6453 {
6454 tree lhs = gimple_assign_lhs (stmt);
6455 tree op0 = gimple_assign_rhs1 (stmt);
6456 if (useless_type_conversion_p (TREE_TYPE (lhs),
6457 TREE_TYPE (op0)))
6458 {
6459 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6460 op0 = (*valueize) (op0);
6461 if (TREE_CODE (op0) == INTEGER_CST)
6462 std::swap (op0, op1);
6463 if (TREE_CODE (op1) == INTEGER_CST
6464 && ((subcode == NE_EXPR && integer_zerop (op1))
6465 || (subcode == EQ_EXPR && integer_onep (op1))))
6466 return op0;
6467 }
6468 }
6469 return NULL_TREE;
6470
6471 case GIMPLE_TERNARY_RHS:
6472 {
6473 /* Handle ternary operators that can appear in GIMPLE form. */
6474 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6475 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6476 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6477 return fold_ternary_loc (loc, subcode,
6478 gimple_expr_type (stmt), op0, op1, op2);
6479 }
6480
6481 default:
6482 gcc_unreachable ();
6483 }
6484 }
6485
6486 case GIMPLE_CALL:
6487 {
6488 tree fn;
6489 gcall *call_stmt = as_a <gcall *> (stmt);
6490
6491 if (gimple_call_internal_p (stmt))
6492 {
6493 enum tree_code subcode = ERROR_MARK;
6494 switch (gimple_call_internal_fn (stmt))
6495 {
6496 case IFN_UBSAN_CHECK_ADD:
6497 subcode = PLUS_EXPR;
6498 break;
6499 case IFN_UBSAN_CHECK_SUB:
6500 subcode = MINUS_EXPR;
6501 break;
6502 case IFN_UBSAN_CHECK_MUL:
6503 subcode = MULT_EXPR;
6504 break;
6505 case IFN_BUILTIN_EXPECT:
6506 {
6507 tree arg0 = gimple_call_arg (stmt, 0);
6508 tree op0 = (*valueize) (arg0);
6509 if (TREE_CODE (op0) == INTEGER_CST)
6510 return op0;
6511 return NULL_TREE;
6512 }
6513 default:
6514 return NULL_TREE;
6515 }
6516 tree arg0 = gimple_call_arg (stmt, 0);
6517 tree arg1 = gimple_call_arg (stmt, 1);
6518 tree op0 = (*valueize) (arg0);
6519 tree op1 = (*valueize) (arg1);
6520
6521 if (TREE_CODE (op0) != INTEGER_CST
6522 || TREE_CODE (op1) != INTEGER_CST)
6523 {
6524 switch (subcode)
6525 {
6526 case MULT_EXPR:
6527 /* x * 0 = 0 * x = 0 without overflow. */
6528 if (integer_zerop (op0) || integer_zerop (op1))
6529 return build_zero_cst (TREE_TYPE (arg0));
6530 break;
6531 case MINUS_EXPR:
6532 /* y - y = 0 without overflow. */
6533 if (operand_equal_p (op0, op1, 0))
6534 return build_zero_cst (TREE_TYPE (arg0));
6535 break;
6536 default:
6537 break;
6538 }
6539 }
6540 tree res
6541 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6542 if (res
6543 && TREE_CODE (res) == INTEGER_CST
6544 && !TREE_OVERFLOW (res))
6545 return res;
6546 return NULL_TREE;
6547 }
6548
6549 fn = (*valueize) (gimple_call_fn (stmt));
6550 if (TREE_CODE (fn) == ADDR_EXPR
6551 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6552 && gimple_builtin_call_types_compatible_p (stmt,
6553 TREE_OPERAND (fn, 0)))
6554 {
6555 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6556 tree retval;
6557 unsigned i;
6558 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6559 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6560 retval = fold_builtin_call_array (loc,
6561 gimple_call_return_type (call_stmt),
6562 fn, gimple_call_num_args (stmt), args);
6563 if (retval)
6564 {
6565 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6566 STRIP_NOPS (retval);
6567 retval = fold_convert (gimple_call_return_type (call_stmt),
6568 retval);
6569 }
6570 return retval;
6571 }
6572 return NULL_TREE;
6573 }
6574
6575 default:
6576 return NULL_TREE;
6577 }
6578 }
6579
6580 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6581 Returns NULL_TREE if folding to a constant is not possible, otherwise
6582 returns a constant according to is_gimple_min_invariant. */
6583
6584 tree
6585 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6586 {
6587 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6588 if (res && is_gimple_min_invariant (res))
6589 return res;
6590 return NULL_TREE;
6591 }
6592
6593
6594 /* The following set of functions are supposed to fold references using
6595 their constant initializers. */
6596
6597 /* See if we can find constructor defining value of BASE.
6598 When we know the consructor with constant offset (such as
6599 base is array[40] and we do know constructor of array), then
6600 BIT_OFFSET is adjusted accordingly.
6601
6602 As a special case, return error_mark_node when constructor
6603 is not explicitly available, but it is known to be zero
6604 such as 'static const int a;'. */
6605 static tree
6606 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6607 tree (*valueize)(tree))
6608 {
6609 poly_int64 bit_offset2, size, max_size;
6610 bool reverse;
6611
6612 if (TREE_CODE (base) == MEM_REF)
6613 {
6614 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6615 if (!boff.to_shwi (bit_offset))
6616 return NULL_TREE;
6617
6618 if (valueize
6619 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6620 base = valueize (TREE_OPERAND (base, 0));
6621 if (!base || TREE_CODE (base) != ADDR_EXPR)
6622 return NULL_TREE;
6623 base = TREE_OPERAND (base, 0);
6624 }
6625 else if (valueize
6626 && TREE_CODE (base) == SSA_NAME)
6627 base = valueize (base);
6628
6629 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6630 DECL_INITIAL. If BASE is a nested reference into another
6631 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6632 the inner reference. */
6633 switch (TREE_CODE (base))
6634 {
6635 case VAR_DECL:
6636 case CONST_DECL:
6637 {
6638 tree init = ctor_for_folding (base);
6639
6640 /* Our semantic is exact opposite of ctor_for_folding;
6641 NULL means unknown, while error_mark_node is 0. */
6642 if (init == error_mark_node)
6643 return NULL_TREE;
6644 if (!init)
6645 return error_mark_node;
6646 return init;
6647 }
6648
6649 case VIEW_CONVERT_EXPR:
6650 return get_base_constructor (TREE_OPERAND (base, 0),
6651 bit_offset, valueize);
6652
6653 case ARRAY_REF:
6654 case COMPONENT_REF:
6655 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6656 &reverse);
6657 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6658 return NULL_TREE;
6659 *bit_offset += bit_offset2;
6660 return get_base_constructor (base, bit_offset, valueize);
6661
6662 case CONSTRUCTOR:
6663 return base;
6664
6665 default:
6666 if (CONSTANT_CLASS_P (base))
6667 return base;
6668
6669 return NULL_TREE;
6670 }
6671 }
6672
6673 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6674 to the memory at bit OFFSET. When non-null, TYPE is the expected
6675 type of the reference; otherwise the type of the referenced element
6676 is used instead. When SIZE is zero, attempt to fold a reference to
6677 the entire element which OFFSET refers to. Increment *SUBOFF by
6678 the bit offset of the accessed element. */
6679
6680 static tree
6681 fold_array_ctor_reference (tree type, tree ctor,
6682 unsigned HOST_WIDE_INT offset,
6683 unsigned HOST_WIDE_INT size,
6684 tree from_decl,
6685 unsigned HOST_WIDE_INT *suboff)
6686 {
6687 offset_int low_bound;
6688 offset_int elt_size;
6689 offset_int access_index;
6690 tree domain_type = NULL_TREE;
6691 HOST_WIDE_INT inner_offset;
6692
6693 /* Compute low bound and elt size. */
6694 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6695 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6696 if (domain_type && TYPE_MIN_VALUE (domain_type))
6697 {
6698 /* Static constructors for variably sized objects make no sense. */
6699 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6700 return NULL_TREE;
6701 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6702 }
6703 else
6704 low_bound = 0;
6705 /* Static constructors for variably sized objects make no sense. */
6706 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6707 return NULL_TREE;
6708 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6709
6710 /* When TYPE is non-null, verify that it specifies a constant-sized
6711 accessed not larger than size of array element. Avoid division
6712 by zero below when ELT_SIZE is zero, such as with the result of
6713 an initializer for a zero-length array or an empty struct. */
6714 if (elt_size == 0
6715 || (type
6716 && (!TYPE_SIZE_UNIT (type)
6717 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6718 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type)))))
6719 return NULL_TREE;
6720
6721 /* Compute the array index we look for. */
6722 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6723 elt_size);
6724 access_index += low_bound;
6725
6726 /* And offset within the access. */
6727 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6728
6729 /* See if the array field is large enough to span whole access. We do not
6730 care to fold accesses spanning multiple array indexes. */
6731 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6732 return NULL_TREE;
6733 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6734 {
6735 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6736 {
6737 /* For the final reference to the entire accessed element
6738 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6739 may be null) in favor of the type of the element, and set
6740 SIZE to the size of the accessed element. */
6741 inner_offset = 0;
6742 type = TREE_TYPE (val);
6743 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6744 }
6745
6746 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6747 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6748 suboff);
6749 }
6750
6751 /* Memory not explicitly mentioned in constructor is 0 (or
6752 the reference is out of range). */
6753 return type ? build_zero_cst (type) : NULL_TREE;
6754 }
6755
6756 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6757 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6758 is the expected type of the reference; otherwise the type of
6759 the referenced member is used instead. When SIZE is zero,
6760 attempt to fold a reference to the entire member which OFFSET
6761 refers to; in this case. Increment *SUBOFF by the bit offset
6762 of the accessed member. */
6763
6764 static tree
6765 fold_nonarray_ctor_reference (tree type, tree ctor,
6766 unsigned HOST_WIDE_INT offset,
6767 unsigned HOST_WIDE_INT size,
6768 tree from_decl,
6769 unsigned HOST_WIDE_INT *suboff)
6770 {
6771 unsigned HOST_WIDE_INT cnt;
6772 tree cfield, cval;
6773
6774 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6775 cval)
6776 {
6777 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6778 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6779 tree field_size = DECL_SIZE (cfield);
6780
6781 if (!field_size)
6782 {
6783 /* Determine the size of the flexible array member from
6784 the size of the initializer provided for it. */
6785 field_size = TYPE_SIZE (TREE_TYPE (cval));
6786 }
6787
6788 /* Variable sized objects in static constructors makes no sense,
6789 but field_size can be NULL for flexible array members. */
6790 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6791 && TREE_CODE (byte_offset) == INTEGER_CST
6792 && (field_size != NULL_TREE
6793 ? TREE_CODE (field_size) == INTEGER_CST
6794 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6795
6796 /* Compute bit offset of the field. */
6797 offset_int bitoffset
6798 = (wi::to_offset (field_offset)
6799 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6800 /* Compute bit offset where the field ends. */
6801 offset_int bitoffset_end;
6802 if (field_size != NULL_TREE)
6803 bitoffset_end = bitoffset + wi::to_offset (field_size);
6804 else
6805 bitoffset_end = 0;
6806
6807 /* Compute the bit offset of the end of the desired access.
6808 As a special case, if the size of the desired access is
6809 zero, assume the access is to the entire field (and let
6810 the caller make any necessary adjustments by storing
6811 the actual bounds of the field in FIELDBOUNDS). */
6812 offset_int access_end = offset_int (offset);
6813 if (size)
6814 access_end += size;
6815 else
6816 access_end = bitoffset_end;
6817
6818 /* Is there any overlap between the desired access at
6819 [OFFSET, OFFSET+SIZE) and the offset of the field within
6820 the object at [BITOFFSET, BITOFFSET_END)? */
6821 if (wi::cmps (access_end, bitoffset) > 0
6822 && (field_size == NULL_TREE
6823 || wi::lts_p (offset, bitoffset_end)))
6824 {
6825 *suboff += bitoffset.to_uhwi ();
6826
6827 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6828 {
6829 /* For the final reference to the entire accessed member
6830 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6831 be null) in favor of the type of the member, and set
6832 SIZE to the size of the accessed member. */
6833 offset = bitoffset.to_uhwi ();
6834 type = TREE_TYPE (cval);
6835 size = (bitoffset_end - bitoffset).to_uhwi ();
6836 }
6837
6838 /* We do have overlap. Now see if the field is large enough
6839 to cover the access. Give up for accesses that extend
6840 beyond the end of the object or that span multiple fields. */
6841 if (wi::cmps (access_end, bitoffset_end) > 0)
6842 return NULL_TREE;
6843 if (offset < bitoffset)
6844 return NULL_TREE;
6845
6846 offset_int inner_offset = offset_int (offset) - bitoffset;
6847 return fold_ctor_reference (type, cval,
6848 inner_offset.to_uhwi (), size,
6849 from_decl, suboff);
6850 }
6851 }
6852 /* Memory not explicitly mentioned in constructor is 0. */
6853 return type ? build_zero_cst (type) : NULL_TREE;
6854 }
6855
6856 /* CTOR is value initializing memory. Fold a reference of TYPE and
6857 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6858 is zero, attempt to fold a reference to the entire subobject
6859 which OFFSET refers to. This is used when folding accesses to
6860 string members of aggregates. When non-null, set *SUBOFF to
6861 the bit offset of the accessed subobject. */
6862
6863 tree
6864 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6865 const poly_uint64 &poly_size, tree from_decl,
6866 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6867 {
6868 tree ret;
6869
6870 /* We found the field with exact match. */
6871 if (type
6872 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6873 && known_eq (poly_offset, 0U))
6874 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6875
6876 /* The remaining optimizations need a constant size and offset. */
6877 unsigned HOST_WIDE_INT size, offset;
6878 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6879 return NULL_TREE;
6880
6881 /* We are at the end of walk, see if we can view convert the
6882 result. */
6883 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6884 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6885 && !compare_tree_int (TYPE_SIZE (type), size)
6886 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6887 {
6888 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6889 if (ret)
6890 {
6891 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6892 if (ret)
6893 STRIP_USELESS_TYPE_CONVERSION (ret);
6894 }
6895 return ret;
6896 }
6897 /* For constants and byte-aligned/sized reads try to go through
6898 native_encode/interpret. */
6899 if (CONSTANT_CLASS_P (ctor)
6900 && BITS_PER_UNIT == 8
6901 && offset % BITS_PER_UNIT == 0
6902 && size % BITS_PER_UNIT == 0
6903 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6904 {
6905 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6906 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6907 offset / BITS_PER_UNIT);
6908 if (len > 0)
6909 return native_interpret_expr (type, buf, len);
6910 }
6911 if (TREE_CODE (ctor) == CONSTRUCTOR)
6912 {
6913 unsigned HOST_WIDE_INT dummy = 0;
6914 if (!suboff)
6915 suboff = &dummy;
6916
6917 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6918 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6919 return fold_array_ctor_reference (type, ctor, offset, size,
6920 from_decl, suboff);
6921
6922 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6923 from_decl, suboff);
6924 }
6925
6926 return NULL_TREE;
6927 }
6928
6929 /* Return the tree representing the element referenced by T if T is an
6930 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6931 names using VALUEIZE. Return NULL_TREE otherwise. */
6932
6933 tree
6934 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6935 {
6936 tree ctor, idx, base;
6937 poly_int64 offset, size, max_size;
6938 tree tem;
6939 bool reverse;
6940
6941 if (TREE_THIS_VOLATILE (t))
6942 return NULL_TREE;
6943
6944 if (DECL_P (t))
6945 return get_symbol_constant_value (t);
6946
6947 tem = fold_read_from_constant_string (t);
6948 if (tem)
6949 return tem;
6950
6951 switch (TREE_CODE (t))
6952 {
6953 case ARRAY_REF:
6954 case ARRAY_RANGE_REF:
6955 /* Constant indexes are handled well by get_base_constructor.
6956 Only special case variable offsets.
6957 FIXME: This code can't handle nested references with variable indexes
6958 (they will be handled only by iteration of ccp). Perhaps we can bring
6959 get_ref_base_and_extent here and make it use a valueize callback. */
6960 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6961 && valueize
6962 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6963 && poly_int_tree_p (idx))
6964 {
6965 tree low_bound, unit_size;
6966
6967 /* If the resulting bit-offset is constant, track it. */
6968 if ((low_bound = array_ref_low_bound (t),
6969 poly_int_tree_p (low_bound))
6970 && (unit_size = array_ref_element_size (t),
6971 tree_fits_uhwi_p (unit_size)))
6972 {
6973 poly_offset_int woffset
6974 = wi::sext (wi::to_poly_offset (idx)
6975 - wi::to_poly_offset (low_bound),
6976 TYPE_PRECISION (TREE_TYPE (idx)));
6977 woffset *= tree_to_uhwi (unit_size);
6978 woffset *= BITS_PER_UNIT;
6979 if (woffset.to_shwi (&offset))
6980 {
6981 base = TREE_OPERAND (t, 0);
6982 ctor = get_base_constructor (base, &offset, valueize);
6983 /* Empty constructor. Always fold to 0. */
6984 if (ctor == error_mark_node)
6985 return build_zero_cst (TREE_TYPE (t));
6986 /* Out of bound array access. Value is undefined,
6987 but don't fold. */
6988 if (maybe_lt (offset, 0))
6989 return NULL_TREE;
6990 /* We cannot determine ctor. */
6991 if (!ctor)
6992 return NULL_TREE;
6993 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6994 tree_to_uhwi (unit_size)
6995 * BITS_PER_UNIT,
6996 base);
6997 }
6998 }
6999 }
7000 /* Fallthru. */
7001
7002 case COMPONENT_REF:
7003 case BIT_FIELD_REF:
7004 case TARGET_MEM_REF:
7005 case MEM_REF:
7006 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
7007 ctor = get_base_constructor (base, &offset, valueize);
7008
7009 /* Empty constructor. Always fold to 0. */
7010 if (ctor == error_mark_node)
7011 return build_zero_cst (TREE_TYPE (t));
7012 /* We do not know precise address. */
7013 if (!known_size_p (max_size) || maybe_ne (max_size, size))
7014 return NULL_TREE;
7015 /* We cannot determine ctor. */
7016 if (!ctor)
7017 return NULL_TREE;
7018
7019 /* Out of bound array access. Value is undefined, but don't fold. */
7020 if (maybe_lt (offset, 0))
7021 return NULL_TREE;
7022
7023 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
7024 base);
7025
7026 case REALPART_EXPR:
7027 case IMAGPART_EXPR:
7028 {
7029 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
7030 if (c && TREE_CODE (c) == COMPLEX_CST)
7031 return fold_build1_loc (EXPR_LOCATION (t),
7032 TREE_CODE (t), TREE_TYPE (t), c);
7033 break;
7034 }
7035
7036 default:
7037 break;
7038 }
7039
7040 return NULL_TREE;
7041 }
7042
7043 tree
7044 fold_const_aggregate_ref (tree t)
7045 {
7046 return fold_const_aggregate_ref_1 (t, NULL);
7047 }
7048
7049 /* Lookup virtual method with index TOKEN in a virtual table V
7050 at OFFSET.
7051 Set CAN_REFER if non-NULL to false if method
7052 is not referable or if the virtual table is ill-formed (such as rewriten
7053 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7054
7055 tree
7056 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
7057 tree v,
7058 unsigned HOST_WIDE_INT offset,
7059 bool *can_refer)
7060 {
7061 tree vtable = v, init, fn;
7062 unsigned HOST_WIDE_INT size;
7063 unsigned HOST_WIDE_INT elt_size, access_index;
7064 tree domain_type;
7065
7066 if (can_refer)
7067 *can_refer = true;
7068
7069 /* First of all double check we have virtual table. */
7070 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
7071 {
7072 /* Pass down that we lost track of the target. */
7073 if (can_refer)
7074 *can_refer = false;
7075 return NULL_TREE;
7076 }
7077
7078 init = ctor_for_folding (v);
7079
7080 /* The virtual tables should always be born with constructors
7081 and we always should assume that they are avaialble for
7082 folding. At the moment we do not stream them in all cases,
7083 but it should never happen that ctor seem unreachable. */
7084 gcc_assert (init);
7085 if (init == error_mark_node)
7086 {
7087 /* Pass down that we lost track of the target. */
7088 if (can_refer)
7089 *can_refer = false;
7090 return NULL_TREE;
7091 }
7092 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
7093 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
7094 offset *= BITS_PER_UNIT;
7095 offset += token * size;
7096
7097 /* Lookup the value in the constructor that is assumed to be array.
7098 This is equivalent to
7099 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7100 offset, size, NULL);
7101 but in a constant time. We expect that frontend produced a simple
7102 array without indexed initializers. */
7103
7104 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
7105 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
7106 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7107 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7108
7109 access_index = offset / BITS_PER_UNIT / elt_size;
7110 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7111
7112 /* The C++ FE can now produce indexed fields, and we check if the indexes
7113 match. */
7114 if (access_index < CONSTRUCTOR_NELTS (init))
7115 {
7116 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7117 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7118 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7119 STRIP_NOPS (fn);
7120 }
7121 else
7122 fn = NULL;
7123
7124 /* For type inconsistent program we may end up looking up virtual method
7125 in virtual table that does not contain TOKEN entries. We may overrun
7126 the virtual table and pick up a constant or RTTI info pointer.
7127 In any case the call is undefined. */
7128 if (!fn
7129 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7130 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7131 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7132 else
7133 {
7134 fn = TREE_OPERAND (fn, 0);
7135
7136 /* When cgraph node is missing and function is not public, we cannot
7137 devirtualize. This can happen in WHOPR when the actual method
7138 ends up in other partition, because we found devirtualization
7139 possibility too late. */
7140 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7141 {
7142 if (can_refer)
7143 {
7144 *can_refer = false;
7145 return fn;
7146 }
7147 return NULL_TREE;
7148 }
7149 }
7150
7151 /* Make sure we create a cgraph node for functions we'll reference.
7152 They can be non-existent if the reference comes from an entry
7153 of an external vtable for example. */
7154 cgraph_node::get_create (fn);
7155
7156 return fn;
7157 }
7158
7159 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7160 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7161 KNOWN_BINFO carries the binfo describing the true type of
7162 OBJ_TYPE_REF_OBJECT(REF).
7163 Set CAN_REFER if non-NULL to false if method
7164 is not referable or if the virtual table is ill-formed (such as rewriten
7165 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7166
7167 tree
7168 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7169 bool *can_refer)
7170 {
7171 unsigned HOST_WIDE_INT offset;
7172 tree v;
7173
7174 v = BINFO_VTABLE (known_binfo);
7175 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7176 if (!v)
7177 return NULL_TREE;
7178
7179 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7180 {
7181 if (can_refer)
7182 *can_refer = false;
7183 return NULL_TREE;
7184 }
7185 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7186 }
7187
7188 /* Given a pointer value T, return a simplified version of an
7189 indirection through T, or NULL_TREE if no simplification is
7190 possible. Note that the resulting type may be different from
7191 the type pointed to in the sense that it is still compatible
7192 from the langhooks point of view. */
7193
7194 tree
7195 gimple_fold_indirect_ref (tree t)
7196 {
7197 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7198 tree sub = t;
7199 tree subtype;
7200
7201 STRIP_NOPS (sub);
7202 subtype = TREE_TYPE (sub);
7203 if (!POINTER_TYPE_P (subtype)
7204 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7205 return NULL_TREE;
7206
7207 if (TREE_CODE (sub) == ADDR_EXPR)
7208 {
7209 tree op = TREE_OPERAND (sub, 0);
7210 tree optype = TREE_TYPE (op);
7211 /* *&p => p */
7212 if (useless_type_conversion_p (type, optype))
7213 return op;
7214
7215 /* *(foo *)&fooarray => fooarray[0] */
7216 if (TREE_CODE (optype) == ARRAY_TYPE
7217 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7218 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7219 {
7220 tree type_domain = TYPE_DOMAIN (optype);
7221 tree min_val = size_zero_node;
7222 if (type_domain && TYPE_MIN_VALUE (type_domain))
7223 min_val = TYPE_MIN_VALUE (type_domain);
7224 if (TREE_CODE (min_val) == INTEGER_CST)
7225 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7226 }
7227 /* *(foo *)&complexfoo => __real__ complexfoo */
7228 else if (TREE_CODE (optype) == COMPLEX_TYPE
7229 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7230 return fold_build1 (REALPART_EXPR, type, op);
7231 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7232 else if (TREE_CODE (optype) == VECTOR_TYPE
7233 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7234 {
7235 tree part_width = TYPE_SIZE (type);
7236 tree index = bitsize_int (0);
7237 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7238 }
7239 }
7240
7241 /* *(p + CST) -> ... */
7242 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7243 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7244 {
7245 tree addr = TREE_OPERAND (sub, 0);
7246 tree off = TREE_OPERAND (sub, 1);
7247 tree addrtype;
7248
7249 STRIP_NOPS (addr);
7250 addrtype = TREE_TYPE (addr);
7251
7252 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7253 if (TREE_CODE (addr) == ADDR_EXPR
7254 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7255 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7256 && tree_fits_uhwi_p (off))
7257 {
7258 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7259 tree part_width = TYPE_SIZE (type);
7260 unsigned HOST_WIDE_INT part_widthi
7261 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7262 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7263 tree index = bitsize_int (indexi);
7264 if (known_lt (offset / part_widthi,
7265 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7266 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7267 part_width, index);
7268 }
7269
7270 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7271 if (TREE_CODE (addr) == ADDR_EXPR
7272 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7273 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7274 {
7275 tree size = TYPE_SIZE_UNIT (type);
7276 if (tree_int_cst_equal (size, off))
7277 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7278 }
7279
7280 /* *(p + CST) -> MEM_REF <p, CST>. */
7281 if (TREE_CODE (addr) != ADDR_EXPR
7282 || DECL_P (TREE_OPERAND (addr, 0)))
7283 return fold_build2 (MEM_REF, type,
7284 addr,
7285 wide_int_to_tree (ptype, wi::to_wide (off)));
7286 }
7287
7288 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7289 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7290 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7291 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7292 {
7293 tree type_domain;
7294 tree min_val = size_zero_node;
7295 tree osub = sub;
7296 sub = gimple_fold_indirect_ref (sub);
7297 if (! sub)
7298 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7299 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7300 if (type_domain && TYPE_MIN_VALUE (type_domain))
7301 min_val = TYPE_MIN_VALUE (type_domain);
7302 if (TREE_CODE (min_val) == INTEGER_CST)
7303 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7304 }
7305
7306 return NULL_TREE;
7307 }
7308
7309 /* Return true if CODE is an operation that when operating on signed
7310 integer types involves undefined behavior on overflow and the
7311 operation can be expressed with unsigned arithmetic. */
7312
7313 bool
7314 arith_code_with_undefined_signed_overflow (tree_code code)
7315 {
7316 switch (code)
7317 {
7318 case ABS_EXPR:
7319 case PLUS_EXPR:
7320 case MINUS_EXPR:
7321 case MULT_EXPR:
7322 case NEGATE_EXPR:
7323 case POINTER_PLUS_EXPR:
7324 return true;
7325 default:
7326 return false;
7327 }
7328 }
7329
7330 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7331 operation that can be transformed to unsigned arithmetic by converting
7332 its operand, carrying out the operation in the corresponding unsigned
7333 type and converting the result back to the original type.
7334
7335 Returns a sequence of statements that replace STMT and also contain
7336 a modified form of STMT itself. */
7337
7338 gimple_seq
7339 rewrite_to_defined_overflow (gimple *stmt)
7340 {
7341 if (dump_file && (dump_flags & TDF_DETAILS))
7342 {
7343 fprintf (dump_file, "rewriting stmt with undefined signed "
7344 "overflow ");
7345 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7346 }
7347
7348 tree lhs = gimple_assign_lhs (stmt);
7349 tree type = unsigned_type_for (TREE_TYPE (lhs));
7350 gimple_seq stmts = NULL;
7351 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
7352 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
7353 else
7354 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7355 {
7356 tree op = gimple_op (stmt, i);
7357 op = gimple_convert (&stmts, type, op);
7358 gimple_set_op (stmt, i, op);
7359 }
7360 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7361 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7362 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7363 gimple_seq_add_stmt (&stmts, stmt);
7364 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7365 gimple_seq_add_stmt (&stmts, cvt);
7366
7367 return stmts;
7368 }
7369
7370
7371 /* The valueization hook we use for the gimple_build API simplification.
7372 This makes us match fold_buildN behavior by only combining with
7373 statements in the sequence(s) we are currently building. */
7374
7375 static tree
7376 gimple_build_valueize (tree op)
7377 {
7378 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7379 return op;
7380 return NULL_TREE;
7381 }
7382
7383 /* Build the expression CODE OP0 of type TYPE with location LOC,
7384 simplifying it first if possible. Returns the built
7385 expression value and appends statements possibly defining it
7386 to SEQ. */
7387
7388 tree
7389 gimple_build (gimple_seq *seq, location_t loc,
7390 enum tree_code code, tree type, tree op0)
7391 {
7392 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7393 if (!res)
7394 {
7395 res = create_tmp_reg_or_ssa_name (type);
7396 gimple *stmt;
7397 if (code == REALPART_EXPR
7398 || code == IMAGPART_EXPR
7399 || code == VIEW_CONVERT_EXPR)
7400 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7401 else
7402 stmt = gimple_build_assign (res, code, op0);
7403 gimple_set_location (stmt, loc);
7404 gimple_seq_add_stmt_without_update (seq, stmt);
7405 }
7406 return res;
7407 }
7408
7409 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7410 simplifying it first if possible. Returns the built
7411 expression value and appends statements possibly defining it
7412 to SEQ. */
7413
7414 tree
7415 gimple_build (gimple_seq *seq, location_t loc,
7416 enum tree_code code, tree type, tree op0, tree op1)
7417 {
7418 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7419 if (!res)
7420 {
7421 res = create_tmp_reg_or_ssa_name (type);
7422 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7423 gimple_set_location (stmt, loc);
7424 gimple_seq_add_stmt_without_update (seq, stmt);
7425 }
7426 return res;
7427 }
7428
7429 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7430 simplifying it first if possible. Returns the built
7431 expression value and appends statements possibly defining it
7432 to SEQ. */
7433
7434 tree
7435 gimple_build (gimple_seq *seq, location_t loc,
7436 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7437 {
7438 tree res = gimple_simplify (code, type, op0, op1, op2,
7439 seq, gimple_build_valueize);
7440 if (!res)
7441 {
7442 res = create_tmp_reg_or_ssa_name (type);
7443 gimple *stmt;
7444 if (code == BIT_FIELD_REF)
7445 stmt = gimple_build_assign (res, code,
7446 build3 (code, type, op0, op1, op2));
7447 else
7448 stmt = gimple_build_assign (res, code, op0, op1, op2);
7449 gimple_set_location (stmt, loc);
7450 gimple_seq_add_stmt_without_update (seq, stmt);
7451 }
7452 return res;
7453 }
7454
7455 /* Build the call FN (ARG0) with a result of type TYPE
7456 (or no result if TYPE is void) with location LOC,
7457 simplifying it first if possible. Returns the built
7458 expression value (or NULL_TREE if TYPE is void) and appends
7459 statements possibly defining it to SEQ. */
7460
7461 tree
7462 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7463 tree type, tree arg0)
7464 {
7465 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7466 if (!res)
7467 {
7468 gcall *stmt;
7469 if (internal_fn_p (fn))
7470 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7471 else
7472 {
7473 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7474 stmt = gimple_build_call (decl, 1, arg0);
7475 }
7476 if (!VOID_TYPE_P (type))
7477 {
7478 res = create_tmp_reg_or_ssa_name (type);
7479 gimple_call_set_lhs (stmt, res);
7480 }
7481 gimple_set_location (stmt, loc);
7482 gimple_seq_add_stmt_without_update (seq, stmt);
7483 }
7484 return res;
7485 }
7486
7487 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7488 (or no result if TYPE is void) with location LOC,
7489 simplifying it first if possible. Returns the built
7490 expression value (or NULL_TREE if TYPE is void) and appends
7491 statements possibly defining it to SEQ. */
7492
7493 tree
7494 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7495 tree type, tree arg0, tree arg1)
7496 {
7497 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7498 if (!res)
7499 {
7500 gcall *stmt;
7501 if (internal_fn_p (fn))
7502 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7503 else
7504 {
7505 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7506 stmt = gimple_build_call (decl, 2, arg0, arg1);
7507 }
7508 if (!VOID_TYPE_P (type))
7509 {
7510 res = create_tmp_reg_or_ssa_name (type);
7511 gimple_call_set_lhs (stmt, res);
7512 }
7513 gimple_set_location (stmt, loc);
7514 gimple_seq_add_stmt_without_update (seq, stmt);
7515 }
7516 return res;
7517 }
7518
7519 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7520 (or no result if TYPE is void) with location LOC,
7521 simplifying it first if possible. Returns the built
7522 expression value (or NULL_TREE if TYPE is void) and appends
7523 statements possibly defining it to SEQ. */
7524
7525 tree
7526 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7527 tree type, tree arg0, tree arg1, tree arg2)
7528 {
7529 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7530 seq, gimple_build_valueize);
7531 if (!res)
7532 {
7533 gcall *stmt;
7534 if (internal_fn_p (fn))
7535 stmt = gimple_build_call_internal (as_internal_fn (fn),
7536 3, arg0, arg1, arg2);
7537 else
7538 {
7539 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7540 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7541 }
7542 if (!VOID_TYPE_P (type))
7543 {
7544 res = create_tmp_reg_or_ssa_name (type);
7545 gimple_call_set_lhs (stmt, res);
7546 }
7547 gimple_set_location (stmt, loc);
7548 gimple_seq_add_stmt_without_update (seq, stmt);
7549 }
7550 return res;
7551 }
7552
7553 /* Build the conversion (TYPE) OP with a result of type TYPE
7554 with location LOC if such conversion is neccesary in GIMPLE,
7555 simplifying it first.
7556 Returns the built expression value and appends
7557 statements possibly defining it to SEQ. */
7558
7559 tree
7560 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7561 {
7562 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7563 return op;
7564 return gimple_build (seq, loc, NOP_EXPR, type, op);
7565 }
7566
7567 /* Build the conversion (ptrofftype) OP with a result of a type
7568 compatible with ptrofftype with location LOC if such conversion
7569 is neccesary in GIMPLE, simplifying it first.
7570 Returns the built expression value and appends
7571 statements possibly defining it to SEQ. */
7572
7573 tree
7574 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7575 {
7576 if (ptrofftype_p (TREE_TYPE (op)))
7577 return op;
7578 return gimple_convert (seq, loc, sizetype, op);
7579 }
7580
7581 /* Build a vector of type TYPE in which each element has the value OP.
7582 Return a gimple value for the result, appending any new statements
7583 to SEQ. */
7584
7585 tree
7586 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7587 tree op)
7588 {
7589 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7590 && !CONSTANT_CLASS_P (op))
7591 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7592
7593 tree res, vec = build_vector_from_val (type, op);
7594 if (is_gimple_val (vec))
7595 return vec;
7596 if (gimple_in_ssa_p (cfun))
7597 res = make_ssa_name (type);
7598 else
7599 res = create_tmp_reg (type);
7600 gimple *stmt = gimple_build_assign (res, vec);
7601 gimple_set_location (stmt, loc);
7602 gimple_seq_add_stmt_without_update (seq, stmt);
7603 return res;
7604 }
7605
7606 /* Build a vector from BUILDER, handling the case in which some elements
7607 are non-constant. Return a gimple value for the result, appending any
7608 new instructions to SEQ.
7609
7610 BUILDER must not have a stepped encoding on entry. This is because
7611 the function is not geared up to handle the arithmetic that would
7612 be needed in the variable case, and any code building a vector that
7613 is known to be constant should use BUILDER->build () directly. */
7614
7615 tree
7616 gimple_build_vector (gimple_seq *seq, location_t loc,
7617 tree_vector_builder *builder)
7618 {
7619 gcc_assert (builder->nelts_per_pattern () <= 2);
7620 unsigned int encoded_nelts = builder->encoded_nelts ();
7621 for (unsigned int i = 0; i < encoded_nelts; ++i)
7622 if (!TREE_CONSTANT ((*builder)[i]))
7623 {
7624 tree type = builder->type ();
7625 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7626 vec<constructor_elt, va_gc> *v;
7627 vec_alloc (v, nelts);
7628 for (i = 0; i < nelts; ++i)
7629 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7630
7631 tree res;
7632 if (gimple_in_ssa_p (cfun))
7633 res = make_ssa_name (type);
7634 else
7635 res = create_tmp_reg (type);
7636 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7637 gimple_set_location (stmt, loc);
7638 gimple_seq_add_stmt_without_update (seq, stmt);
7639 return res;
7640 }
7641 return builder->build ();
7642 }
7643
7644 /* Return true if the result of assignment STMT is known to be non-negative.
7645 If the return value is based on the assumption that signed overflow is
7646 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7647 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7648
7649 static bool
7650 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7651 int depth)
7652 {
7653 enum tree_code code = gimple_assign_rhs_code (stmt);
7654 switch (get_gimple_rhs_class (code))
7655 {
7656 case GIMPLE_UNARY_RHS:
7657 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7658 gimple_expr_type (stmt),
7659 gimple_assign_rhs1 (stmt),
7660 strict_overflow_p, depth);
7661 case GIMPLE_BINARY_RHS:
7662 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7663 gimple_expr_type (stmt),
7664 gimple_assign_rhs1 (stmt),
7665 gimple_assign_rhs2 (stmt),
7666 strict_overflow_p, depth);
7667 case GIMPLE_TERNARY_RHS:
7668 return false;
7669 case GIMPLE_SINGLE_RHS:
7670 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7671 strict_overflow_p, depth);
7672 case GIMPLE_INVALID_RHS:
7673 break;
7674 }
7675 gcc_unreachable ();
7676 }
7677
7678 /* Return true if return value of call STMT is known to be non-negative.
7679 If the return value is based on the assumption that signed overflow is
7680 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7681 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7682
7683 static bool
7684 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7685 int depth)
7686 {
7687 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7688 gimple_call_arg (stmt, 0) : NULL_TREE;
7689 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7690 gimple_call_arg (stmt, 1) : NULL_TREE;
7691
7692 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7693 gimple_call_combined_fn (stmt),
7694 arg0,
7695 arg1,
7696 strict_overflow_p, depth);
7697 }
7698
7699 /* Return true if return value of call STMT is known to be non-negative.
7700 If the return value is based on the assumption that signed overflow is
7701 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7702 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7703
7704 static bool
7705 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7706 int depth)
7707 {
7708 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7709 {
7710 tree arg = gimple_phi_arg_def (stmt, i);
7711 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7712 return false;
7713 }
7714 return true;
7715 }
7716
7717 /* Return true if STMT is known to compute a non-negative value.
7718 If the return value is based on the assumption that signed overflow is
7719 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7720 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7721
7722 bool
7723 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7724 int depth)
7725 {
7726 switch (gimple_code (stmt))
7727 {
7728 case GIMPLE_ASSIGN:
7729 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7730 depth);
7731 case GIMPLE_CALL:
7732 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7733 depth);
7734 case GIMPLE_PHI:
7735 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7736 depth);
7737 default:
7738 return false;
7739 }
7740 }
7741
7742 /* Return true if the floating-point value computed by assignment STMT
7743 is known to have an integer value. We also allow +Inf, -Inf and NaN
7744 to be considered integer values. Return false for signaling NaN.
7745
7746 DEPTH is the current nesting depth of the query. */
7747
7748 static bool
7749 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7750 {
7751 enum tree_code code = gimple_assign_rhs_code (stmt);
7752 switch (get_gimple_rhs_class (code))
7753 {
7754 case GIMPLE_UNARY_RHS:
7755 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7756 gimple_assign_rhs1 (stmt), depth);
7757 case GIMPLE_BINARY_RHS:
7758 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7759 gimple_assign_rhs1 (stmt),
7760 gimple_assign_rhs2 (stmt), depth);
7761 case GIMPLE_TERNARY_RHS:
7762 return false;
7763 case GIMPLE_SINGLE_RHS:
7764 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7765 case GIMPLE_INVALID_RHS:
7766 break;
7767 }
7768 gcc_unreachable ();
7769 }
7770
7771 /* Return true if the floating-point value computed by call STMT is known
7772 to have an integer value. We also allow +Inf, -Inf and NaN to be
7773 considered integer values. Return false for signaling NaN.
7774
7775 DEPTH is the current nesting depth of the query. */
7776
7777 static bool
7778 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7779 {
7780 tree arg0 = (gimple_call_num_args (stmt) > 0
7781 ? gimple_call_arg (stmt, 0)
7782 : NULL_TREE);
7783 tree arg1 = (gimple_call_num_args (stmt) > 1
7784 ? gimple_call_arg (stmt, 1)
7785 : NULL_TREE);
7786 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7787 arg0, arg1, depth);
7788 }
7789
7790 /* Return true if the floating-point result of phi STMT is known to have
7791 an integer value. We also allow +Inf, -Inf and NaN to be considered
7792 integer values. Return false for signaling NaN.
7793
7794 DEPTH is the current nesting depth of the query. */
7795
7796 static bool
7797 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7798 {
7799 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7800 {
7801 tree arg = gimple_phi_arg_def (stmt, i);
7802 if (!integer_valued_real_single_p (arg, depth + 1))
7803 return false;
7804 }
7805 return true;
7806 }
7807
7808 /* Return true if the floating-point value computed by STMT is known
7809 to have an integer value. We also allow +Inf, -Inf and NaN to be
7810 considered integer values. Return false for signaling NaN.
7811
7812 DEPTH is the current nesting depth of the query. */
7813
7814 bool
7815 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7816 {
7817 switch (gimple_code (stmt))
7818 {
7819 case GIMPLE_ASSIGN:
7820 return gimple_assign_integer_valued_real_p (stmt, depth);
7821 case GIMPLE_CALL:
7822 return gimple_call_integer_valued_real_p (stmt, depth);
7823 case GIMPLE_PHI:
7824 return gimple_phi_integer_valued_real_p (stmt, depth);
7825 default:
7826 return false;
7827 }
7828 }