]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "hard-reg-set.h"
31 #include "function.h"
32 #include "rtl.h"
33 #include "flags.h"
34 #include "insn-config.h"
35 #include "expmed.h"
36 #include "dojump.h"
37 #include "explow.h"
38 #include "calls.h"
39 #include "emit-rtl.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "stor-layout.h"
44 #include "dumpfile.h"
45 #include "bitmap.h"
46 #include "predict.h"
47 #include "dominance.h"
48 #include "basic-block.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
51 #include "gimple-fold.h"
52 #include "gimple-expr.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "tree-ssanames.h"
58 #include "tree-into-ssa.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "tree-ssa-propagate.h"
62 #include "target.h"
63 #include "cgraph.h"
64 #include "ipa-utils.h"
65 #include "gimple-pretty-print.h"
66 #include "tree-ssa-address.h"
67 #include "langhooks.h"
68 #include "gimplify-me.h"
69 #include "dbgcnt.h"
70 #include "builtins.h"
71 #include "output.h"
72 #include "tree-eh.h"
73 #include "gimple-match.h"
74 #include "tree-phinodes.h"
75 #include "ssa-iterators.h"
76
77 /* Return true when DECL can be referenced from current unit.
78 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
79 We can get declarations that are not possible to reference for various
80 reasons:
81
82 1) When analyzing C++ virtual tables.
83 C++ virtual tables do have known constructors even
84 when they are keyed to other compilation unit.
85 Those tables can contain pointers to methods and vars
86 in other units. Those methods have both STATIC and EXTERNAL
87 set.
88 2) In WHOPR mode devirtualization might lead to reference
89 to method that was partitioned elsehwere.
90 In this case we have static VAR_DECL or FUNCTION_DECL
91 that has no corresponding callgraph/varpool node
92 declaring the body.
93 3) COMDAT functions referred by external vtables that
94 we devirtualize only during final compilation stage.
95 At this time we already decided that we will not output
96 the function body and thus we can't reference the symbol
97 directly. */
98
99 static bool
100 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
101 {
102 varpool_node *vnode;
103 struct cgraph_node *node;
104 symtab_node *snode;
105
106 if (DECL_ABSTRACT_P (decl))
107 return false;
108
109 /* We are concerned only about static/external vars and functions. */
110 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
111 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
112 return true;
113
114 /* Static objects can be referred only if they was not optimized out yet. */
115 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
116 {
117 /* Before we start optimizing unreachable code we can be sure all
118 static objects are defined. */
119 if (symtab->function_flags_ready)
120 return true;
121 snode = symtab_node::get (decl);
122 if (!snode || !snode->definition)
123 return false;
124 node = dyn_cast <cgraph_node *> (snode);
125 return !node || !node->global.inlined_to;
126 }
127
128 /* We will later output the initializer, so we can refer to it.
129 So we are concerned only when DECL comes from initializer of
130 external var or var that has been optimized out. */
131 if (!from_decl
132 || TREE_CODE (from_decl) != VAR_DECL
133 || (!DECL_EXTERNAL (from_decl)
134 && (vnode = varpool_node::get (from_decl)) != NULL
135 && vnode->definition)
136 || (flag_ltrans
137 && (vnode = varpool_node::get (from_decl)) != NULL
138 && vnode->in_other_partition))
139 return true;
140 /* We are folding reference from external vtable. The vtable may reffer
141 to a symbol keyed to other compilation unit. The other compilation
142 unit may be in separate DSO and the symbol may be hidden. */
143 if (DECL_VISIBILITY_SPECIFIED (decl)
144 && DECL_EXTERNAL (decl)
145 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
146 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
147 return false;
148 /* When function is public, we always can introduce new reference.
149 Exception are the COMDAT functions where introducing a direct
150 reference imply need to include function body in the curren tunit. */
151 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
152 return true;
153 /* We have COMDAT. We are going to check if we still have definition
154 or if the definition is going to be output in other partition.
155 Bypass this when gimplifying; all needed functions will be produced.
156
157 As observed in PR20991 for already optimized out comdat virtual functions
158 it may be tempting to not necessarily give up because the copy will be
159 output elsewhere when corresponding vtable is output.
160 This is however not possible - ABI specify that COMDATs are output in
161 units where they are used and when the other unit was compiled with LTO
162 it is possible that vtable was kept public while the function itself
163 was privatized. */
164 if (!symtab->function_flags_ready)
165 return true;
166
167 snode = symtab_node::get (decl);
168 if (!snode
169 || ((!snode->definition || DECL_EXTERNAL (decl))
170 && (!snode->in_other_partition
171 || (!snode->forced_by_abi && !snode->force_output))))
172 return false;
173 node = dyn_cast <cgraph_node *> (snode);
174 return !node || !node->global.inlined_to;
175 }
176
177 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
178 acceptable form for is_gimple_min_invariant.
179 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
180
181 tree
182 canonicalize_constructor_val (tree cval, tree from_decl)
183 {
184 tree orig_cval = cval;
185 STRIP_NOPS (cval);
186 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
187 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
188 {
189 tree ptr = TREE_OPERAND (cval, 0);
190 if (is_gimple_min_invariant (ptr))
191 cval = build1_loc (EXPR_LOCATION (cval),
192 ADDR_EXPR, TREE_TYPE (ptr),
193 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
194 ptr,
195 fold_convert (ptr_type_node,
196 TREE_OPERAND (cval, 1))));
197 }
198 if (TREE_CODE (cval) == ADDR_EXPR)
199 {
200 tree base = NULL_TREE;
201 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
202 {
203 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
204 if (base)
205 TREE_OPERAND (cval, 0) = base;
206 }
207 else
208 base = get_base_address (TREE_OPERAND (cval, 0));
209 if (!base)
210 return NULL_TREE;
211
212 if ((TREE_CODE (base) == VAR_DECL
213 || TREE_CODE (base) == FUNCTION_DECL)
214 && !can_refer_decl_in_current_unit_p (base, from_decl))
215 return NULL_TREE;
216 if (TREE_CODE (base) == VAR_DECL)
217 TREE_ADDRESSABLE (base) = 1;
218 else if (TREE_CODE (base) == FUNCTION_DECL)
219 {
220 /* Make sure we create a cgraph node for functions we'll reference.
221 They can be non-existent if the reference comes from an entry
222 of an external vtable for example. */
223 cgraph_node::get_create (base);
224 }
225 /* Fixup types in global initializers. */
226 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
227 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
228
229 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
230 cval = fold_convert (TREE_TYPE (orig_cval), cval);
231 return cval;
232 }
233 if (TREE_OVERFLOW_P (cval))
234 return drop_tree_overflow (cval);
235 return orig_cval;
236 }
237
238 /* If SYM is a constant variable with known value, return the value.
239 NULL_TREE is returned otherwise. */
240
241 tree
242 get_symbol_constant_value (tree sym)
243 {
244 tree val = ctor_for_folding (sym);
245 if (val != error_mark_node)
246 {
247 if (val)
248 {
249 val = canonicalize_constructor_val (unshare_expr (val), sym);
250 if (val && is_gimple_min_invariant (val))
251 return val;
252 else
253 return NULL_TREE;
254 }
255 /* Variables declared 'const' without an initializer
256 have zero as the initializer if they may not be
257 overridden at link or run time. */
258 if (!val
259 && is_gimple_reg_type (TREE_TYPE (sym)))
260 return build_zero_cst (TREE_TYPE (sym));
261 }
262
263 return NULL_TREE;
264 }
265
266
267
268 /* Subroutine of fold_stmt. We perform several simplifications of the
269 memory reference tree EXPR and make sure to re-gimplify them properly
270 after propagation of constant addresses. IS_LHS is true if the
271 reference is supposed to be an lvalue. */
272
273 static tree
274 maybe_fold_reference (tree expr, bool is_lhs)
275 {
276 tree result;
277
278 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
279 || TREE_CODE (expr) == REALPART_EXPR
280 || TREE_CODE (expr) == IMAGPART_EXPR)
281 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
282 return fold_unary_loc (EXPR_LOCATION (expr),
283 TREE_CODE (expr),
284 TREE_TYPE (expr),
285 TREE_OPERAND (expr, 0));
286 else if (TREE_CODE (expr) == BIT_FIELD_REF
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_ternary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0),
292 TREE_OPERAND (expr, 1),
293 TREE_OPERAND (expr, 2));
294
295 if (!is_lhs
296 && (result = fold_const_aggregate_ref (expr))
297 && is_gimple_min_invariant (result))
298 return result;
299
300 return NULL_TREE;
301 }
302
303
304 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
305 replacement rhs for the statement or NULL_TREE if no simplification
306 could be made. It is assumed that the operands have been previously
307 folded. */
308
309 static tree
310 fold_gimple_assign (gimple_stmt_iterator *si)
311 {
312 gimple stmt = gsi_stmt (*si);
313 enum tree_code subcode = gimple_assign_rhs_code (stmt);
314 location_t loc = gimple_location (stmt);
315
316 tree result = NULL_TREE;
317
318 switch (get_gimple_rhs_class (subcode))
319 {
320 case GIMPLE_SINGLE_RHS:
321 {
322 tree rhs = gimple_assign_rhs1 (stmt);
323
324 if (TREE_CLOBBER_P (rhs))
325 return NULL_TREE;
326
327 if (REFERENCE_CLASS_P (rhs))
328 return maybe_fold_reference (rhs, false);
329
330 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
331 {
332 tree val = OBJ_TYPE_REF_EXPR (rhs);
333 if (is_gimple_min_invariant (val))
334 return val;
335 else if (flag_devirtualize && virtual_method_call_p (rhs))
336 {
337 bool final;
338 vec <cgraph_node *>targets
339 = possible_polymorphic_call_targets (rhs, stmt, &final);
340 if (final && targets.length () <= 1 && dbg_cnt (devirt))
341 {
342 if (dump_enabled_p ())
343 {
344 location_t loc = gimple_location_safe (stmt);
345 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
346 "resolving virtual function address "
347 "reference to function %s\n",
348 targets.length () == 1
349 ? targets[0]->name ()
350 : "NULL");
351 }
352 if (targets.length () == 1)
353 {
354 val = fold_convert (TREE_TYPE (val),
355 build_fold_addr_expr_loc
356 (loc, targets[0]->decl));
357 STRIP_USELESS_TYPE_CONVERSION (val);
358 }
359 else
360 /* We can not use __builtin_unreachable here because it
361 can not have address taken. */
362 val = build_int_cst (TREE_TYPE (val), 0);
363 return val;
364 }
365 }
366
367 }
368 else if (TREE_CODE (rhs) == ADDR_EXPR)
369 {
370 tree ref = TREE_OPERAND (rhs, 0);
371 tree tem = maybe_fold_reference (ref, true);
372 if (tem
373 && TREE_CODE (tem) == MEM_REF
374 && integer_zerop (TREE_OPERAND (tem, 1)))
375 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
376 else if (tem)
377 result = fold_convert (TREE_TYPE (rhs),
378 build_fold_addr_expr_loc (loc, tem));
379 else if (TREE_CODE (ref) == MEM_REF
380 && integer_zerop (TREE_OPERAND (ref, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
382 }
383
384 else if (TREE_CODE (rhs) == CONSTRUCTOR
385 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
386 && (CONSTRUCTOR_NELTS (rhs)
387 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
388 {
389 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
390 unsigned i;
391 tree val;
392
393 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
394 if (TREE_CODE (val) != INTEGER_CST
395 && TREE_CODE (val) != REAL_CST
396 && TREE_CODE (val) != FIXED_CST)
397 return NULL_TREE;
398
399 return build_vector_from_ctor (TREE_TYPE (rhs),
400 CONSTRUCTOR_ELTS (rhs));
401 }
402
403 else if (DECL_P (rhs))
404 return get_symbol_constant_value (rhs);
405
406 /* If we couldn't fold the RHS, hand over to the generic
407 fold routines. */
408 if (result == NULL_TREE)
409 result = fold (rhs);
410
411 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
412 that may have been added by fold, and "useless" type
413 conversions that might now be apparent due to propagation. */
414 STRIP_USELESS_TYPE_CONVERSION (result);
415
416 if (result != rhs && valid_gimple_rhs_p (result))
417 return result;
418
419 return NULL_TREE;
420 }
421 break;
422
423 case GIMPLE_UNARY_RHS:
424 break;
425
426 case GIMPLE_BINARY_RHS:
427 /* Try to canonicalize for boolean-typed X the comparisons
428 X == 0, X == 1, X != 0, and X != 1. */
429 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
430 || gimple_assign_rhs_code (stmt) == NE_EXPR)
431 {
432 tree lhs = gimple_assign_lhs (stmt);
433 tree op1 = gimple_assign_rhs1 (stmt);
434 tree op2 = gimple_assign_rhs2 (stmt);
435 tree type = TREE_TYPE (op1);
436
437 /* Check whether the comparison operands are of the same boolean
438 type as the result type is.
439 Check that second operand is an integer-constant with value
440 one or zero. */
441 if (TREE_CODE (op2) == INTEGER_CST
442 && (integer_zerop (op2) || integer_onep (op2))
443 && useless_type_conversion_p (TREE_TYPE (lhs), type))
444 {
445 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
446 bool is_logical_not = false;
447
448 /* X == 0 and X != 1 is a logical-not.of X
449 X == 1 and X != 0 is X */
450 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
451 || (cmp_code == NE_EXPR && integer_onep (op2)))
452 is_logical_not = true;
453
454 if (is_logical_not == false)
455 result = op1;
456 /* Only for one-bit precision typed X the transformation
457 !X -> ~X is valied. */
458 else if (TYPE_PRECISION (type) == 1)
459 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
460 type, op1);
461 /* Otherwise we use !X -> X ^ 1. */
462 else
463 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
464 type, op1, build_int_cst (type, 1));
465
466 }
467 }
468
469 if (!result)
470 result = fold_binary_loc (loc, subcode,
471 TREE_TYPE (gimple_assign_lhs (stmt)),
472 gimple_assign_rhs1 (stmt),
473 gimple_assign_rhs2 (stmt));
474
475 if (result)
476 {
477 STRIP_USELESS_TYPE_CONVERSION (result);
478 if (valid_gimple_rhs_p (result))
479 return result;
480 }
481 break;
482
483 case GIMPLE_TERNARY_RHS:
484 /* Try to fold a conditional expression. */
485 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
486 {
487 tree op0 = gimple_assign_rhs1 (stmt);
488 tree tem;
489 bool set = false;
490 location_t cond_loc = gimple_location (stmt);
491
492 if (COMPARISON_CLASS_P (op0))
493 {
494 fold_defer_overflow_warnings ();
495 tem = fold_binary_loc (cond_loc,
496 TREE_CODE (op0), TREE_TYPE (op0),
497 TREE_OPERAND (op0, 0),
498 TREE_OPERAND (op0, 1));
499 /* This is actually a conditional expression, not a GIMPLE
500 conditional statement, however, the valid_gimple_rhs_p
501 test still applies. */
502 set = (tem && is_gimple_condexpr (tem)
503 && valid_gimple_rhs_p (tem));
504 fold_undefer_overflow_warnings (set, stmt, 0);
505 }
506 else if (is_gimple_min_invariant (op0))
507 {
508 tem = op0;
509 set = true;
510 }
511 else
512 return NULL_TREE;
513
514 if (set)
515 result = fold_build3_loc (cond_loc, COND_EXPR,
516 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
517 gimple_assign_rhs2 (stmt),
518 gimple_assign_rhs3 (stmt));
519 }
520
521 if (!result)
522 result = fold_ternary_loc (loc, subcode,
523 TREE_TYPE (gimple_assign_lhs (stmt)),
524 gimple_assign_rhs1 (stmt),
525 gimple_assign_rhs2 (stmt),
526 gimple_assign_rhs3 (stmt));
527
528 if (result)
529 {
530 STRIP_USELESS_TYPE_CONVERSION (result);
531 if (valid_gimple_rhs_p (result))
532 return result;
533 }
534 break;
535
536 case GIMPLE_INVALID_RHS:
537 gcc_unreachable ();
538 }
539
540 return NULL_TREE;
541 }
542
543 /* Attempt to fold a conditional statement. Return true if any changes were
544 made. We only attempt to fold the condition expression, and do not perform
545 any transformation that would require alteration of the cfg. It is
546 assumed that the operands have been previously folded. */
547
548 static bool
549 fold_gimple_cond (gcond *stmt)
550 {
551 tree result = fold_binary_loc (gimple_location (stmt),
552 gimple_cond_code (stmt),
553 boolean_type_node,
554 gimple_cond_lhs (stmt),
555 gimple_cond_rhs (stmt));
556
557 if (result)
558 {
559 STRIP_USELESS_TYPE_CONVERSION (result);
560 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
561 {
562 gimple_cond_set_condition_from_tree (stmt, result);
563 return true;
564 }
565 }
566
567 return false;
568 }
569
570
571 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
572 adjusting the replacement stmts location and virtual operands.
573 If the statement has a lhs the last stmt in the sequence is expected
574 to assign to that lhs. */
575
576 static void
577 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
578 {
579 gimple stmt = gsi_stmt (*si_p);
580
581 if (gimple_has_location (stmt))
582 annotate_all_with_location (stmts, gimple_location (stmt));
583
584 /* First iterate over the replacement statements backward, assigning
585 virtual operands to their defining statements. */
586 gimple laststore = NULL;
587 for (gimple_stmt_iterator i = gsi_last (stmts);
588 !gsi_end_p (i); gsi_prev (&i))
589 {
590 gimple new_stmt = gsi_stmt (i);
591 if ((gimple_assign_single_p (new_stmt)
592 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
593 || (is_gimple_call (new_stmt)
594 && (gimple_call_flags (new_stmt)
595 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
596 {
597 tree vdef;
598 if (!laststore)
599 vdef = gimple_vdef (stmt);
600 else
601 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
602 gimple_set_vdef (new_stmt, vdef);
603 if (vdef && TREE_CODE (vdef) == SSA_NAME)
604 SSA_NAME_DEF_STMT (vdef) = new_stmt;
605 laststore = new_stmt;
606 }
607 }
608
609 /* Second iterate over the statements forward, assigning virtual
610 operands to their uses. */
611 tree reaching_vuse = gimple_vuse (stmt);
612 for (gimple_stmt_iterator i = gsi_start (stmts);
613 !gsi_end_p (i); gsi_next (&i))
614 {
615 gimple new_stmt = gsi_stmt (i);
616 /* If the new statement possibly has a VUSE, update it with exact SSA
617 name we know will reach this one. */
618 if (gimple_has_mem_ops (new_stmt))
619 gimple_set_vuse (new_stmt, reaching_vuse);
620 gimple_set_modified (new_stmt, true);
621 if (gimple_vdef (new_stmt))
622 reaching_vuse = gimple_vdef (new_stmt);
623 }
624
625 /* If the new sequence does not do a store release the virtual
626 definition of the original statement. */
627 if (reaching_vuse
628 && reaching_vuse == gimple_vuse (stmt))
629 {
630 tree vdef = gimple_vdef (stmt);
631 if (vdef
632 && TREE_CODE (vdef) == SSA_NAME)
633 {
634 unlink_stmt_vdef (stmt);
635 release_ssa_name (vdef);
636 }
637 }
638
639 /* Finally replace the original statement with the sequence. */
640 gsi_replace_with_seq (si_p, stmts, false);
641 }
642
643 /* Convert EXPR into a GIMPLE value suitable for substitution on the
644 RHS of an assignment. Insert the necessary statements before
645 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
646 is replaced. If the call is expected to produces a result, then it
647 is replaced by an assignment of the new RHS to the result variable.
648 If the result is to be ignored, then the call is replaced by a
649 GIMPLE_NOP. A proper VDEF chain is retained by making the first
650 VUSE and the last VDEF of the whole sequence be the same as the replaced
651 statement and using new SSA names for stores in between. */
652
653 void
654 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
655 {
656 tree lhs;
657 gimple stmt, new_stmt;
658 gimple_stmt_iterator i;
659 gimple_seq stmts = NULL;
660
661 stmt = gsi_stmt (*si_p);
662
663 gcc_assert (is_gimple_call (stmt));
664
665 push_gimplify_context (gimple_in_ssa_p (cfun));
666
667 lhs = gimple_call_lhs (stmt);
668 if (lhs == NULL_TREE)
669 {
670 gimplify_and_add (expr, &stmts);
671 /* We can end up with folding a memcpy of an empty class assignment
672 which gets optimized away by C++ gimplification. */
673 if (gimple_seq_empty_p (stmts))
674 {
675 pop_gimplify_context (NULL);
676 if (gimple_in_ssa_p (cfun))
677 {
678 unlink_stmt_vdef (stmt);
679 release_defs (stmt);
680 }
681 gsi_replace (si_p, gimple_build_nop (), true);
682 return;
683 }
684 }
685 else
686 {
687 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
688 new_stmt = gimple_build_assign (lhs, tmp);
689 i = gsi_last (stmts);
690 gsi_insert_after_without_update (&i, new_stmt,
691 GSI_CONTINUE_LINKING);
692 }
693
694 pop_gimplify_context (NULL);
695
696 gsi_replace_with_seq_vops (si_p, stmts);
697 }
698
699
700 /* Replace the call at *GSI with the gimple value VAL. */
701
702 static void
703 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
704 {
705 gimple stmt = gsi_stmt (*gsi);
706 tree lhs = gimple_call_lhs (stmt);
707 gimple repl;
708 if (lhs)
709 {
710 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
711 val = fold_convert (TREE_TYPE (lhs), val);
712 repl = gimple_build_assign (lhs, val);
713 }
714 else
715 repl = gimple_build_nop ();
716 tree vdef = gimple_vdef (stmt);
717 if (vdef && TREE_CODE (vdef) == SSA_NAME)
718 {
719 unlink_stmt_vdef (stmt);
720 release_ssa_name (vdef);
721 }
722 gsi_replace (gsi, repl, true);
723 }
724
725 /* Replace the call at *GSI with the new call REPL and fold that
726 again. */
727
728 static void
729 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
730 {
731 gimple stmt = gsi_stmt (*gsi);
732 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
733 gimple_set_location (repl, gimple_location (stmt));
734 if (gimple_vdef (stmt)
735 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
736 {
737 gimple_set_vdef (repl, gimple_vdef (stmt));
738 gimple_set_vuse (repl, gimple_vuse (stmt));
739 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
740 }
741 gsi_replace (gsi, repl, true);
742 fold_stmt (gsi);
743 }
744
745 /* Return true if VAR is a VAR_DECL or a component thereof. */
746
747 static bool
748 var_decl_component_p (tree var)
749 {
750 tree inner = var;
751 while (handled_component_p (inner))
752 inner = TREE_OPERAND (inner, 0);
753 return SSA_VAR_P (inner);
754 }
755
756 /* Fold function call to builtin mem{{,p}cpy,move}. Return
757 false if no simplification can be made.
758 If ENDP is 0, return DEST (like memcpy).
759 If ENDP is 1, return DEST+LEN (like mempcpy).
760 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
761 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
762 (memmove). */
763
764 static bool
765 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
766 tree dest, tree src, int endp)
767 {
768 gimple stmt = gsi_stmt (*gsi);
769 tree lhs = gimple_call_lhs (stmt);
770 tree len = gimple_call_arg (stmt, 2);
771 tree destvar, srcvar;
772 location_t loc = gimple_location (stmt);
773
774 /* If the LEN parameter is zero, return DEST. */
775 if (integer_zerop (len))
776 {
777 gimple repl;
778 if (gimple_call_lhs (stmt))
779 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
780 else
781 repl = gimple_build_nop ();
782 tree vdef = gimple_vdef (stmt);
783 if (vdef && TREE_CODE (vdef) == SSA_NAME)
784 {
785 unlink_stmt_vdef (stmt);
786 release_ssa_name (vdef);
787 }
788 gsi_replace (gsi, repl, true);
789 return true;
790 }
791
792 /* If SRC and DEST are the same (and not volatile), return
793 DEST{,+LEN,+LEN-1}. */
794 if (operand_equal_p (src, dest, 0))
795 {
796 unlink_stmt_vdef (stmt);
797 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
798 release_ssa_name (gimple_vdef (stmt));
799 if (!lhs)
800 {
801 gsi_replace (gsi, gimple_build_nop (), true);
802 return true;
803 }
804 goto done;
805 }
806 else
807 {
808 tree srctype, desttype;
809 unsigned int src_align, dest_align;
810 tree off0;
811
812 /* Build accesses at offset zero with a ref-all character type. */
813 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
814 ptr_mode, true), 0);
815
816 /* If we can perform the copy efficiently with first doing all loads
817 and then all stores inline it that way. Currently efficiently
818 means that we can load all the memory into a single integer
819 register which is what MOVE_MAX gives us. */
820 src_align = get_pointer_alignment (src);
821 dest_align = get_pointer_alignment (dest);
822 if (tree_fits_uhwi_p (len)
823 && compare_tree_int (len, MOVE_MAX) <= 0
824 /* ??? Don't transform copies from strings with known length this
825 confuses the tree-ssa-strlen.c. This doesn't handle
826 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
827 reason. */
828 && !c_strlen (src, 2))
829 {
830 unsigned ilen = tree_to_uhwi (len);
831 if (exact_log2 (ilen) != -1)
832 {
833 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
834 if (type
835 && TYPE_MODE (type) != BLKmode
836 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
837 == ilen * 8)
838 /* If the destination pointer is not aligned we must be able
839 to emit an unaligned store. */
840 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
841 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
842 {
843 tree srctype = type;
844 tree desttype = type;
845 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
846 srctype = build_aligned_type (type, src_align);
847 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
848 tree tem = fold_const_aggregate_ref (srcmem);
849 if (tem)
850 srcmem = tem;
851 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
852 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
853 src_align))
854 srcmem = NULL_TREE;
855 if (srcmem)
856 {
857 gimple new_stmt;
858 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
859 {
860 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
861 if (gimple_in_ssa_p (cfun))
862 srcmem = make_ssa_name (TREE_TYPE (srcmem),
863 new_stmt);
864 else
865 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
866 gimple_assign_set_lhs (new_stmt, srcmem);
867 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
868 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
869 }
870 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
871 desttype = build_aligned_type (type, dest_align);
872 new_stmt
873 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
874 dest, off0),
875 srcmem);
876 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
877 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
878 if (gimple_vdef (new_stmt)
879 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
881 if (!lhs)
882 {
883 gsi_replace (gsi, new_stmt, true);
884 return true;
885 }
886 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
887 goto done;
888 }
889 }
890 }
891 }
892
893 if (endp == 3)
894 {
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
897 really mandatory?
898
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align || !src_align)
901 return false;
902 if (readonly_data_expr (src)
903 || (tree_fits_uhwi_p (len)
904 && (MIN (src_align, dest_align) / BITS_PER_UNIT
905 >= tree_to_uhwi (len))))
906 {
907 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
915 }
916
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src) == ADDR_EXPR
919 && TREE_CODE (dest) == ADDR_EXPR)
920 {
921 tree src_base, dest_base, fn;
922 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
923 HOST_WIDE_INT size = -1;
924 HOST_WIDE_INT maxsize = -1;
925
926 srcvar = TREE_OPERAND (src, 0);
927 src_base = get_ref_base_and_extent (srcvar, &src_offset,
928 &size, &maxsize);
929 destvar = TREE_OPERAND (dest, 0);
930 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
931 &size, &maxsize);
932 if (tree_fits_uhwi_p (len))
933 maxsize = tree_to_uhwi (len);
934 else
935 maxsize = -1;
936 src_offset /= BITS_PER_UNIT;
937 dest_offset /= BITS_PER_UNIT;
938 if (SSA_VAR_P (src_base)
939 && SSA_VAR_P (dest_base))
940 {
941 if (operand_equal_p (src_base, dest_base, 0)
942 && ranges_overlap_p (src_offset, maxsize,
943 dest_offset, maxsize))
944 return false;
945 }
946 else if (TREE_CODE (src_base) == MEM_REF
947 && TREE_CODE (dest_base) == MEM_REF)
948 {
949 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
950 TREE_OPERAND (dest_base, 0), 0))
951 return false;
952 offset_int off = mem_ref_offset (src_base) + src_offset;
953 if (!wi::fits_shwi_p (off))
954 return false;
955 src_offset = off.to_shwi ();
956
957 off = mem_ref_offset (dest_base) + dest_offset;
958 if (!wi::fits_shwi_p (off))
959 return false;
960 dest_offset = off.to_shwi ();
961 if (ranges_overlap_p (src_offset, maxsize,
962 dest_offset, maxsize))
963 return false;
964 }
965 else
966 return false;
967
968 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
969 if (!fn)
970 return false;
971 gimple_call_set_fndecl (stmt, fn);
972 gimple_call_set_arg (stmt, 0, dest);
973 gimple_call_set_arg (stmt, 1, src);
974 fold_stmt (gsi);
975 return true;
976 }
977
978 /* If the destination and source do not alias optimize into
979 memcpy as well. */
980 if ((is_gimple_min_invariant (dest)
981 || TREE_CODE (dest) == SSA_NAME)
982 && (is_gimple_min_invariant (src)
983 || TREE_CODE (src) == SSA_NAME))
984 {
985 ao_ref destr, srcr;
986 ao_ref_init_from_ptr_and_size (&destr, dest, len);
987 ao_ref_init_from_ptr_and_size (&srcr, src, len);
988 if (!refs_may_alias_p_1 (&destr, &srcr, false))
989 {
990 tree fn;
991 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
992 if (!fn)
993 return false;
994 gimple_call_set_fndecl (stmt, fn);
995 gimple_call_set_arg (stmt, 0, dest);
996 gimple_call_set_arg (stmt, 1, src);
997 fold_stmt (gsi);
998 return true;
999 }
1000 }
1001
1002 return false;
1003 }
1004
1005 if (!tree_fits_shwi_p (len))
1006 return false;
1007 /* FIXME:
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1011 STRIP_NOPS (src);
1012 STRIP_NOPS (dest);
1013 if (!POINTER_TYPE_P (TREE_TYPE (src))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1015 return false;
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1024 {
1025 tree tem = TREE_OPERAND (src, 0);
1026 STRIP_NOPS (tem);
1027 if (tem != TREE_OPERAND (src, 0))
1028 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1029 }
1030 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1031 {
1032 tree tem = TREE_OPERAND (dest, 0);
1033 STRIP_NOPS (tem);
1034 if (tem != TREE_OPERAND (dest, 0))
1035 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1036 }
1037 srctype = TREE_TYPE (TREE_TYPE (src));
1038 if (TREE_CODE (srctype) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1040 {
1041 srctype = TREE_TYPE (srctype);
1042 STRIP_NOPS (src);
1043 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1044 }
1045 desttype = TREE_TYPE (TREE_TYPE (dest));
1046 if (TREE_CODE (desttype) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1048 {
1049 desttype = TREE_TYPE (desttype);
1050 STRIP_NOPS (dest);
1051 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1052 }
1053 if (TREE_ADDRESSABLE (srctype)
1054 || TREE_ADDRESSABLE (desttype))
1055 return false;
1056
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1060 || TREE_CODE (desttype) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1062 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1064 || TREE_CODE (srctype) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1066 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1067 if (!srctype)
1068 srctype = desttype;
1069 if (!desttype)
1070 desttype = srctype;
1071 if (!srctype)
1072 return false;
1073
1074 src_align = get_pointer_alignment (src);
1075 dest_align = get_pointer_alignment (dest);
1076 if (dest_align < TYPE_ALIGN (desttype)
1077 || src_align < TYPE_ALIGN (srctype))
1078 return false;
1079
1080 destvar = dest;
1081 STRIP_NOPS (destvar);
1082 if (TREE_CODE (destvar) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1085 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1086 else
1087 destvar = NULL_TREE;
1088
1089 srcvar = src;
1090 STRIP_NOPS (srcvar);
1091 if (TREE_CODE (srcvar) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1094 {
1095 if (!destvar
1096 || src_align >= TYPE_ALIGN (desttype))
1097 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1098 srcvar, off0);
1099 else if (!STRICT_ALIGNMENT)
1100 {
1101 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1102 src_align);
1103 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1104 }
1105 else
1106 srcvar = NULL_TREE;
1107 }
1108 else
1109 srcvar = NULL_TREE;
1110
1111 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1112 return false;
1113
1114 if (srcvar == NULL_TREE)
1115 {
1116 STRIP_NOPS (src);
1117 if (src_align >= TYPE_ALIGN (desttype))
1118 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1119 else
1120 {
1121 if (STRICT_ALIGNMENT)
1122 return false;
1123 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1124 src_align);
1125 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1126 }
1127 }
1128 else if (destvar == NULL_TREE)
1129 {
1130 STRIP_NOPS (dest);
1131 if (dest_align >= TYPE_ALIGN (srctype))
1132 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1133 else
1134 {
1135 if (STRICT_ALIGNMENT)
1136 return false;
1137 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1138 dest_align);
1139 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1140 }
1141 }
1142
1143 gimple new_stmt;
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1145 {
1146 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1147 if (gimple_in_ssa_p (cfun))
1148 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1149 else
1150 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1151 gimple_assign_set_lhs (new_stmt, srcvar);
1152 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1153 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1154 }
1155 new_stmt = gimple_build_assign (destvar, srcvar);
1156 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1157 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1158 if (gimple_vdef (new_stmt)
1159 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1161 if (!lhs)
1162 {
1163 gsi_replace (gsi, new_stmt, true);
1164 return true;
1165 }
1166 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1167 }
1168
1169 done:
1170 if (endp == 0 || endp == 3)
1171 len = NULL_TREE;
1172 else if (endp == 2)
1173 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1174 ssize_int (1));
1175 if (endp == 2 || endp == 1)
1176 dest = fold_build_pointer_plus_loc (loc, dest, len);
1177
1178 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1179 GSI_SAME_STMT);
1180 gimple repl = gimple_build_assign (lhs, dest);
1181 gsi_replace (gsi, repl, true);
1182 return true;
1183 }
1184
1185 /* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1187
1188 static bool
1189 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1190 {
1191 gimple stmt = gsi_stmt (*gsi);
1192 tree etype;
1193 unsigned HOST_WIDE_INT length, cval;
1194
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len))
1197 {
1198 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1199 return true;
1200 }
1201
1202 if (! tree_fits_uhwi_p (len))
1203 return false;
1204
1205 if (TREE_CODE (c) != INTEGER_CST)
1206 return false;
1207
1208 tree dest = gimple_call_arg (stmt, 0);
1209 tree var = dest;
1210 if (TREE_CODE (var) != ADDR_EXPR)
1211 return false;
1212
1213 var = TREE_OPERAND (var, 0);
1214 if (TREE_THIS_VOLATILE (var))
1215 return false;
1216
1217 etype = TREE_TYPE (var);
1218 if (TREE_CODE (etype) == ARRAY_TYPE)
1219 etype = TREE_TYPE (etype);
1220
1221 if (!INTEGRAL_TYPE_P (etype)
1222 && !POINTER_TYPE_P (etype))
1223 return NULL_TREE;
1224
1225 if (! var_decl_component_p (var))
1226 return NULL_TREE;
1227
1228 length = tree_to_uhwi (len);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1230 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1231 return NULL_TREE;
1232
1233 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1234 return NULL_TREE;
1235
1236 if (integer_zerop (c))
1237 cval = 0;
1238 else
1239 {
1240 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1241 return NULL_TREE;
1242
1243 cval = TREE_INT_CST_LOW (c);
1244 cval &= 0xff;
1245 cval |= cval << 8;
1246 cval |= cval << 16;
1247 cval |= (cval << 31) << 1;
1248 }
1249
1250 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1251 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1252 gimple_set_vuse (store, gimple_vuse (stmt));
1253 tree vdef = gimple_vdef (stmt);
1254 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1255 {
1256 gimple_set_vdef (store, gimple_vdef (stmt));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1258 }
1259 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1260 if (gimple_call_lhs (stmt))
1261 {
1262 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1263 gsi_replace (gsi, asgn, true);
1264 }
1265 else
1266 {
1267 gimple_stmt_iterator gsi2 = *gsi;
1268 gsi_prev (gsi);
1269 gsi_remove (&gsi2, true);
1270 }
1271
1272 return true;
1273 }
1274
1275
1276 /* Return the string length, maximum string length or maximum value of
1277 ARG in LENGTH.
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1284
1285 static bool
1286 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1287 {
1288 tree var, val;
1289 gimple def_stmt;
1290
1291 if (TREE_CODE (arg) != SSA_NAME)
1292 {
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1297 {
1298 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1299 if (TREE_CODE (aop0) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1301 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1302 length, visited, type);
1303 }
1304
1305 if (type == 2)
1306 {
1307 val = arg;
1308 if (TREE_CODE (val) != INTEGER_CST
1309 || tree_int_cst_sgn (val) < 0)
1310 return false;
1311 }
1312 else
1313 val = c_strlen (arg, 1);
1314 if (!val)
1315 return false;
1316
1317 if (*length)
1318 {
1319 if (type > 0)
1320 {
1321 if (TREE_CODE (*length) != INTEGER_CST
1322 || TREE_CODE (val) != INTEGER_CST)
1323 return false;
1324
1325 if (tree_int_cst_lt (*length, val))
1326 *length = val;
1327 return true;
1328 }
1329 else if (simple_cst_equal (val, *length) != 1)
1330 return false;
1331 }
1332
1333 *length = val;
1334 return true;
1335 }
1336
1337 /* If ARG is registered for SSA update we cannot look at its defining
1338 statement. */
1339 if (name_registered_for_update_p (arg))
1340 return false;
1341
1342 /* If we were already here, break the infinite cycle. */
1343 if (!*visited)
1344 *visited = BITMAP_ALLOC (NULL);
1345 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1346 return true;
1347
1348 var = arg;
1349 def_stmt = SSA_NAME_DEF_STMT (var);
1350
1351 switch (gimple_code (def_stmt))
1352 {
1353 case GIMPLE_ASSIGN:
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1356 length. */
1357 if (gimple_assign_single_p (def_stmt)
1358 || gimple_assign_unary_nop_p (def_stmt))
1359 {
1360 tree rhs = gimple_assign_rhs1 (def_stmt);
1361 return get_maxval_strlen (rhs, length, visited, type);
1362 }
1363 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1364 {
1365 tree op2 = gimple_assign_rhs2 (def_stmt);
1366 tree op3 = gimple_assign_rhs3 (def_stmt);
1367 return get_maxval_strlen (op2, length, visited, type)
1368 && get_maxval_strlen (op3, length, visited, type);
1369 }
1370 return false;
1371
1372 case GIMPLE_PHI:
1373 {
1374 /* All the arguments of the PHI node must have the same constant
1375 length. */
1376 unsigned i;
1377
1378 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1379 {
1380 tree arg = gimple_phi_arg (def_stmt, i)->def;
1381
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg == gimple_phi_result (def_stmt))
1389 continue;
1390
1391 if (!get_maxval_strlen (arg, length, visited, type))
1392 return false;
1393 }
1394 }
1395 return true;
1396
1397 default:
1398 return false;
1399 }
1400 }
1401
1402 tree
1403 get_maxval_strlen (tree arg, int type)
1404 {
1405 bitmap visited = NULL;
1406 tree len = NULL_TREE;
1407 if (!get_maxval_strlen (arg, &len, &visited, type))
1408 len = NULL_TREE;
1409 if (visited)
1410 BITMAP_FREE (visited);
1411
1412 return len;
1413 }
1414
1415
1416 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1419
1420 static bool
1421 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1422 tree dest, tree src)
1423 {
1424 location_t loc = gimple_location (gsi_stmt (*gsi));
1425 tree fn;
1426
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src, dest, 0))
1429 {
1430 replace_call_with_value (gsi, dest);
1431 return true;
1432 }
1433
1434 if (optimize_function_for_size_p (cfun))
1435 return false;
1436
1437 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1438 if (!fn)
1439 return false;
1440
1441 tree len = get_maxval_strlen (src, 0);
1442 if (!len)
1443 return false;
1444
1445 len = fold_convert_loc (loc, size_type_node, len);
1446 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1447 len = force_gimple_operand_gsi (gsi, len, true,
1448 NULL_TREE, true, GSI_SAME_STMT);
1449 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1450 replace_call_with_call_and_fold (gsi, repl);
1451 return true;
1452 }
1453
1454 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1457
1458 static bool
1459 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1460 tree dest, tree src, tree len)
1461 {
1462 location_t loc = gimple_location (gsi_stmt (*gsi));
1463 tree fn;
1464
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len))
1467 {
1468 replace_call_with_value (gsi, dest);
1469 return true;
1470 }
1471
1472 /* We can't compare slen with len as constants below if len is not a
1473 constant. */
1474 if (TREE_CODE (len) != INTEGER_CST)
1475 return false;
1476
1477 /* Now, we must be passed a constant src ptr parameter. */
1478 tree slen = get_maxval_strlen (src, 0);
1479 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1480 return false;
1481
1482 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1483
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen, len))
1488 return false;
1489
1490 /* OK transform into builtin memcpy. */
1491 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1492 if (!fn)
1493 return false;
1494
1495 len = fold_convert_loc (loc, size_type_node, len);
1496 len = force_gimple_operand_gsi (gsi, len, true,
1497 NULL_TREE, true, GSI_SAME_STMT);
1498 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1499 replace_call_with_call_and_fold (gsi, repl);
1500 return true;
1501 }
1502
1503 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1504 to the call.
1505
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1508
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1512
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1520
1521 static bool
1522 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1523 {
1524 gimple stmt = gsi_stmt (*gsi);
1525 location_t loc = gimple_location (stmt);
1526
1527 const char *p = c_getstr (src);
1528
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p && *p == '\0')
1531 {
1532 replace_call_with_value (gsi, dst);
1533 return true;
1534 }
1535
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1537 return false;
1538
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1540 tree newdst;
1541 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1542 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1543
1544 if (!strlen_fn || !memcpy_fn)
1545 return false;
1546
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
1549 tree len = get_maxval_strlen (src, 0);
1550 if (! len)
1551 return false;
1552
1553 /* Create strlen (dst). */
1554 gimple_seq stmts = NULL, stmts2;
1555 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1556 gimple_set_location (repl, loc);
1557 if (gimple_in_ssa_p (cfun))
1558 newdst = make_ssa_name (size_type_node);
1559 else
1560 newdst = create_tmp_reg (size_type_node);
1561 gimple_call_set_lhs (repl, newdst);
1562 gimple_seq_add_stmt_without_update (&stmts, repl);
1563
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1566 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1567 gimple_seq_add_seq_without_update (&stmts, stmts2);
1568
1569 len = fold_convert_loc (loc, size_type_node, len);
1570 len = size_binop_loc (loc, PLUS_EXPR, len,
1571 build_int_cst (size_type_node, 1));
1572 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1573 gimple_seq_add_seq_without_update (&stmts, stmts2);
1574
1575 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1576 gimple_seq_add_stmt_without_update (&stmts, repl);
1577 if (gimple_call_lhs (stmt))
1578 {
1579 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1580 gimple_seq_add_stmt_without_update (&stmts, repl);
1581 gsi_replace_with_seq_vops (gsi, stmts);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2 = *gsi;
1587 gsi_prev (&gsi2);
1588 fold_stmt (&gsi2);
1589 }
1590 else
1591 {
1592 gsi_replace_with_seq_vops (gsi, stmts);
1593 fold_stmt (gsi);
1594 }
1595 return true;
1596 }
1597
1598 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1600
1601 static bool
1602 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1603 {
1604 gimple stmt = gsi_stmt (*gsi);
1605 tree dest = gimple_call_arg (stmt, 0);
1606 tree src = gimple_call_arg (stmt, 1);
1607 tree size = gimple_call_arg (stmt, 2);
1608 tree fn;
1609 const char *p;
1610
1611
1612 p = c_getstr (src);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p && *p == '\0')
1615 {
1616 replace_call_with_value (gsi, dest);
1617 return true;
1618 }
1619
1620 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1621 return false;
1622
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1625 if (!fn)
1626 return false;
1627
1628 gimple repl = gimple_build_call (fn, 2, dest, src);
1629 replace_call_with_call_and_fold (gsi, repl);
1630 return true;
1631 }
1632
1633 /* Simplify a call to the strncat builtin. */
1634
1635 static bool
1636 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1637 {
1638 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1639 tree dst = gimple_call_arg (stmt, 0);
1640 tree src = gimple_call_arg (stmt, 1);
1641 tree len = gimple_call_arg (stmt, 2);
1642
1643 const char *p = c_getstr (src);
1644
1645 /* If the requested length is zero, or the src parameter string
1646 length is zero, return the dst parameter. */
1647 if (integer_zerop (len) || (p && *p == '\0'))
1648 {
1649 replace_call_with_value (gsi, dst);
1650 return true;
1651 }
1652
1653 /* If the requested len is greater than or equal to the string
1654 length, call strcat. */
1655 if (TREE_CODE (len) == INTEGER_CST && p
1656 && compare_tree_int (len, strlen (p)) >= 0)
1657 {
1658 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1659
1660 /* If the replacement _DECL isn't initialized, don't do the
1661 transformation. */
1662 if (!fn)
1663 return false;
1664
1665 gcall *repl = gimple_build_call (fn, 2, dst, src);
1666 replace_call_with_call_and_fold (gsi, repl);
1667 return true;
1668 }
1669
1670 return false;
1671 }
1672
1673 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1674 LEN, and SIZE. */
1675
1676 static bool
1677 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1678 {
1679 gimple stmt = gsi_stmt (*gsi);
1680 tree dest = gimple_call_arg (stmt, 0);
1681 tree src = gimple_call_arg (stmt, 1);
1682 tree len = gimple_call_arg (stmt, 2);
1683 tree size = gimple_call_arg (stmt, 3);
1684 tree fn;
1685 const char *p;
1686
1687 p = c_getstr (src);
1688 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1689 if ((p && *p == '\0')
1690 || integer_zerop (len))
1691 {
1692 replace_call_with_value (gsi, dest);
1693 return true;
1694 }
1695
1696 if (! tree_fits_uhwi_p (size))
1697 return false;
1698
1699 if (! integer_all_onesp (size))
1700 {
1701 tree src_len = c_strlen (src, 1);
1702 if (src_len
1703 && tree_fits_uhwi_p (src_len)
1704 && tree_fits_uhwi_p (len)
1705 && ! tree_int_cst_lt (len, src_len))
1706 {
1707 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1708 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1709 if (!fn)
1710 return false;
1711
1712 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1713 replace_call_with_call_and_fold (gsi, repl);
1714 return true;
1715 }
1716 return false;
1717 }
1718
1719 /* If __builtin_strncat_chk is used, assume strncat is available. */
1720 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1721 if (!fn)
1722 return false;
1723
1724 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1725 replace_call_with_call_and_fold (gsi, repl);
1726 return true;
1727 }
1728
1729 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1730 to the call. IGNORE is true if the value returned
1731 by the builtin will be ignored. UNLOCKED is true is true if this
1732 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1733 the known length of the string. Return NULL_TREE if no simplification
1734 was possible. */
1735
1736 static bool
1737 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1738 tree arg0, tree arg1,
1739 bool unlocked)
1740 {
1741 gimple stmt = gsi_stmt (*gsi);
1742
1743 /* If we're using an unlocked function, assume the other unlocked
1744 functions exist explicitly. */
1745 tree const fn_fputc = (unlocked
1746 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1747 : builtin_decl_implicit (BUILT_IN_FPUTC));
1748 tree const fn_fwrite = (unlocked
1749 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1750 : builtin_decl_implicit (BUILT_IN_FWRITE));
1751
1752 /* If the return value is used, don't do the transformation. */
1753 if (gimple_call_lhs (stmt))
1754 return false;
1755
1756 /* Get the length of the string passed to fputs. If the length
1757 can't be determined, punt. */
1758 tree len = get_maxval_strlen (arg0, 0);
1759 if (!len
1760 || TREE_CODE (len) != INTEGER_CST)
1761 return false;
1762
1763 switch (compare_tree_int (len, 1))
1764 {
1765 case -1: /* length is 0, delete the call entirely . */
1766 replace_call_with_value (gsi, integer_zero_node);
1767 return true;
1768
1769 case 0: /* length is 1, call fputc. */
1770 {
1771 const char *p = c_getstr (arg0);
1772 if (p != NULL)
1773 {
1774 if (!fn_fputc)
1775 return false;
1776
1777 gimple repl = gimple_build_call (fn_fputc, 2,
1778 build_int_cst
1779 (integer_type_node, p[0]), arg1);
1780 replace_call_with_call_and_fold (gsi, repl);
1781 return true;
1782 }
1783 }
1784 /* FALLTHROUGH */
1785 case 1: /* length is greater than 1, call fwrite. */
1786 {
1787 /* If optimizing for size keep fputs. */
1788 if (optimize_function_for_size_p (cfun))
1789 return false;
1790 /* New argument list transforming fputs(string, stream) to
1791 fwrite(string, 1, len, stream). */
1792 if (!fn_fwrite)
1793 return false;
1794
1795 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1796 size_one_node, len, arg1);
1797 replace_call_with_call_and_fold (gsi, repl);
1798 return true;
1799 }
1800 default:
1801 gcc_unreachable ();
1802 }
1803 return false;
1804 }
1805
1806 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1807 DEST, SRC, LEN, and SIZE are the arguments to the call.
1808 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1809 code of the builtin. If MAXLEN is not NULL, it is maximum length
1810 passed as third argument. */
1811
1812 static bool
1813 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1814 tree dest, tree src, tree len, tree size,
1815 enum built_in_function fcode)
1816 {
1817 gimple stmt = gsi_stmt (*gsi);
1818 location_t loc = gimple_location (stmt);
1819 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1820 tree fn;
1821
1822 /* If SRC and DEST are the same (and not volatile), return DEST
1823 (resp. DEST+LEN for __mempcpy_chk). */
1824 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1825 {
1826 if (fcode != BUILT_IN_MEMPCPY_CHK)
1827 {
1828 replace_call_with_value (gsi, dest);
1829 return true;
1830 }
1831 else
1832 {
1833 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1834 temp = force_gimple_operand_gsi (gsi, temp,
1835 false, NULL_TREE, true,
1836 GSI_SAME_STMT);
1837 replace_call_with_value (gsi, temp);
1838 return true;
1839 }
1840 }
1841
1842 if (! tree_fits_uhwi_p (size))
1843 return false;
1844
1845 tree maxlen = get_maxval_strlen (len, 2);
1846 if (! integer_all_onesp (size))
1847 {
1848 if (! tree_fits_uhwi_p (len))
1849 {
1850 /* If LEN is not constant, try MAXLEN too.
1851 For MAXLEN only allow optimizing into non-_ocs function
1852 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1853 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1854 {
1855 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1856 {
1857 /* (void) __mempcpy_chk () can be optimized into
1858 (void) __memcpy_chk (). */
1859 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1860 if (!fn)
1861 return false;
1862
1863 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1864 replace_call_with_call_and_fold (gsi, repl);
1865 return true;
1866 }
1867 return false;
1868 }
1869 }
1870 else
1871 maxlen = len;
1872
1873 if (tree_int_cst_lt (size, maxlen))
1874 return false;
1875 }
1876
1877 fn = NULL_TREE;
1878 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1879 mem{cpy,pcpy,move,set} is available. */
1880 switch (fcode)
1881 {
1882 case BUILT_IN_MEMCPY_CHK:
1883 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1884 break;
1885 case BUILT_IN_MEMPCPY_CHK:
1886 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1887 break;
1888 case BUILT_IN_MEMMOVE_CHK:
1889 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1890 break;
1891 case BUILT_IN_MEMSET_CHK:
1892 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1893 break;
1894 default:
1895 break;
1896 }
1897
1898 if (!fn)
1899 return false;
1900
1901 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1902 replace_call_with_call_and_fold (gsi, repl);
1903 return true;
1904 }
1905
1906 /* Fold a call to the __st[rp]cpy_chk builtin.
1907 DEST, SRC, and SIZE are the arguments to the call.
1908 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1909 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1910 strings passed as second argument. */
1911
1912 static bool
1913 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1914 tree dest,
1915 tree src, tree size,
1916 enum built_in_function fcode)
1917 {
1918 gimple stmt = gsi_stmt (*gsi);
1919 location_t loc = gimple_location (stmt);
1920 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1921 tree len, fn;
1922
1923 /* If SRC and DEST are the same (and not volatile), return DEST. */
1924 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1925 {
1926 replace_call_with_value (gsi, dest);
1927 return true;
1928 }
1929
1930 if (! tree_fits_uhwi_p (size))
1931 return false;
1932
1933 tree maxlen = get_maxval_strlen (src, 1);
1934 if (! integer_all_onesp (size))
1935 {
1936 len = c_strlen (src, 1);
1937 if (! len || ! tree_fits_uhwi_p (len))
1938 {
1939 /* If LEN is not constant, try MAXLEN too.
1940 For MAXLEN only allow optimizing into non-_ocs function
1941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1942 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1943 {
1944 if (fcode == BUILT_IN_STPCPY_CHK)
1945 {
1946 if (! ignore)
1947 return false;
1948
1949 /* If return value of __stpcpy_chk is ignored,
1950 optimize into __strcpy_chk. */
1951 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1952 if (!fn)
1953 return false;
1954
1955 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1956 replace_call_with_call_and_fold (gsi, repl);
1957 return true;
1958 }
1959
1960 if (! len || TREE_SIDE_EFFECTS (len))
1961 return false;
1962
1963 /* If c_strlen returned something, but not a constant,
1964 transform __strcpy_chk into __memcpy_chk. */
1965 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1966 if (!fn)
1967 return false;
1968
1969 len = fold_convert_loc (loc, size_type_node, len);
1970 len = size_binop_loc (loc, PLUS_EXPR, len,
1971 build_int_cst (size_type_node, 1));
1972 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1973 true, GSI_SAME_STMT);
1974 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1975 replace_call_with_call_and_fold (gsi, repl);
1976 return true;
1977 }
1978 }
1979 else
1980 maxlen = len;
1981
1982 if (! tree_int_cst_lt (maxlen, size))
1983 return false;
1984 }
1985
1986 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1987 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1988 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1989 if (!fn)
1990 return false;
1991
1992 gimple repl = gimple_build_call (fn, 2, dest, src);
1993 replace_call_with_call_and_fold (gsi, repl);
1994 return true;
1995 }
1996
1997 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1998 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1999 length passed as third argument. IGNORE is true if return value can be
2000 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2001
2002 static bool
2003 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2004 tree dest, tree src,
2005 tree len, tree size,
2006 enum built_in_function fcode)
2007 {
2008 gimple stmt = gsi_stmt (*gsi);
2009 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2010 tree fn;
2011
2012 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2013 {
2014 /* If return value of __stpncpy_chk is ignored,
2015 optimize into __strncpy_chk. */
2016 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2017 if (fn)
2018 {
2019 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2020 replace_call_with_call_and_fold (gsi, repl);
2021 return true;
2022 }
2023 }
2024
2025 if (! tree_fits_uhwi_p (size))
2026 return false;
2027
2028 tree maxlen = get_maxval_strlen (len, 2);
2029 if (! integer_all_onesp (size))
2030 {
2031 if (! tree_fits_uhwi_p (len))
2032 {
2033 /* If LEN is not constant, try MAXLEN too.
2034 For MAXLEN only allow optimizing into non-_ocs function
2035 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2036 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2037 return false;
2038 }
2039 else
2040 maxlen = len;
2041
2042 if (tree_int_cst_lt (size, maxlen))
2043 return false;
2044 }
2045
2046 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2047 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2048 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2049 if (!fn)
2050 return false;
2051
2052 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2053 replace_call_with_call_and_fold (gsi, repl);
2054 return true;
2055 }
2056
2057 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2058 Return NULL_TREE if no simplification can be made. */
2059
2060 static bool
2061 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2062 {
2063 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2064 location_t loc = gimple_location (stmt);
2065 tree dest = gimple_call_arg (stmt, 0);
2066 tree src = gimple_call_arg (stmt, 1);
2067 tree fn, len, lenp1;
2068
2069 /* If the result is unused, replace stpcpy with strcpy. */
2070 if (gimple_call_lhs (stmt) == NULL_TREE)
2071 {
2072 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2073 if (!fn)
2074 return false;
2075 gimple_call_set_fndecl (stmt, fn);
2076 fold_stmt (gsi);
2077 return true;
2078 }
2079
2080 len = c_strlen (src, 1);
2081 if (!len
2082 || TREE_CODE (len) != INTEGER_CST)
2083 return false;
2084
2085 if (optimize_function_for_size_p (cfun)
2086 /* If length is zero it's small enough. */
2087 && !integer_zerop (len))
2088 return false;
2089
2090 /* If the source has a known length replace stpcpy with memcpy. */
2091 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2092 if (!fn)
2093 return false;
2094
2095 gimple_seq stmts = NULL;
2096 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2097 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2098 tem, build_int_cst (size_type_node, 1));
2099 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2100 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2101 gimple_set_vuse (repl, gimple_vuse (stmt));
2102 gimple_set_vdef (repl, gimple_vdef (stmt));
2103 if (gimple_vdef (repl)
2104 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2105 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2106 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2107 /* Replace the result with dest + len. */
2108 stmts = NULL;
2109 tem = gimple_convert (&stmts, loc, sizetype, len);
2110 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2111 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2112 POINTER_PLUS_EXPR, dest, tem);
2113 gsi_replace (gsi, ret, true);
2114 /* Finally fold the memcpy call. */
2115 gimple_stmt_iterator gsi2 = *gsi;
2116 gsi_prev (&gsi2);
2117 fold_stmt (&gsi2);
2118 return true;
2119 }
2120
2121 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2122 NULL_TREE if a normal call should be emitted rather than expanding
2123 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2124 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2125 passed as second argument. */
2126
2127 static bool
2128 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2129 enum built_in_function fcode)
2130 {
2131 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2132 tree dest, size, len, fn, fmt, flag;
2133 const char *fmt_str;
2134
2135 /* Verify the required arguments in the original call. */
2136 if (gimple_call_num_args (stmt) < 5)
2137 return false;
2138
2139 dest = gimple_call_arg (stmt, 0);
2140 len = gimple_call_arg (stmt, 1);
2141 flag = gimple_call_arg (stmt, 2);
2142 size = gimple_call_arg (stmt, 3);
2143 fmt = gimple_call_arg (stmt, 4);
2144
2145 if (! tree_fits_uhwi_p (size))
2146 return false;
2147
2148 if (! integer_all_onesp (size))
2149 {
2150 tree maxlen = get_maxval_strlen (len, 2);
2151 if (! tree_fits_uhwi_p (len))
2152 {
2153 /* If LEN is not constant, try MAXLEN too.
2154 For MAXLEN only allow optimizing into non-_ocs function
2155 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2156 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2157 return false;
2158 }
2159 else
2160 maxlen = len;
2161
2162 if (tree_int_cst_lt (size, maxlen))
2163 return false;
2164 }
2165
2166 if (!init_target_chars ())
2167 return false;
2168
2169 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2170 or if format doesn't contain % chars or is "%s". */
2171 if (! integer_zerop (flag))
2172 {
2173 fmt_str = c_getstr (fmt);
2174 if (fmt_str == NULL)
2175 return false;
2176 if (strchr (fmt_str, target_percent) != NULL
2177 && strcmp (fmt_str, target_percent_s))
2178 return false;
2179 }
2180
2181 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2182 available. */
2183 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2184 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2185 if (!fn)
2186 return false;
2187
2188 /* Replace the called function and the first 5 argument by 3 retaining
2189 trailing varargs. */
2190 gimple_call_set_fndecl (stmt, fn);
2191 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2192 gimple_call_set_arg (stmt, 0, dest);
2193 gimple_call_set_arg (stmt, 1, len);
2194 gimple_call_set_arg (stmt, 2, fmt);
2195 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2196 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2197 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2198 fold_stmt (gsi);
2199 return true;
2200 }
2201
2202 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2203 Return NULL_TREE if a normal call should be emitted rather than
2204 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2205 or BUILT_IN_VSPRINTF_CHK. */
2206
2207 static bool
2208 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2209 enum built_in_function fcode)
2210 {
2211 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2212 tree dest, size, len, fn, fmt, flag;
2213 const char *fmt_str;
2214 unsigned nargs = gimple_call_num_args (stmt);
2215
2216 /* Verify the required arguments in the original call. */
2217 if (nargs < 4)
2218 return false;
2219 dest = gimple_call_arg (stmt, 0);
2220 flag = gimple_call_arg (stmt, 1);
2221 size = gimple_call_arg (stmt, 2);
2222 fmt = gimple_call_arg (stmt, 3);
2223
2224 if (! tree_fits_uhwi_p (size))
2225 return false;
2226
2227 len = NULL_TREE;
2228
2229 if (!init_target_chars ())
2230 return false;
2231
2232 /* Check whether the format is a literal string constant. */
2233 fmt_str = c_getstr (fmt);
2234 if (fmt_str != NULL)
2235 {
2236 /* If the format doesn't contain % args or %%, we know the size. */
2237 if (strchr (fmt_str, target_percent) == 0)
2238 {
2239 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2240 len = build_int_cstu (size_type_node, strlen (fmt_str));
2241 }
2242 /* If the format is "%s" and first ... argument is a string literal,
2243 we know the size too. */
2244 else if (fcode == BUILT_IN_SPRINTF_CHK
2245 && strcmp (fmt_str, target_percent_s) == 0)
2246 {
2247 tree arg;
2248
2249 if (nargs == 5)
2250 {
2251 arg = gimple_call_arg (stmt, 4);
2252 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2253 {
2254 len = c_strlen (arg, 1);
2255 if (! len || ! tree_fits_uhwi_p (len))
2256 len = NULL_TREE;
2257 }
2258 }
2259 }
2260 }
2261
2262 if (! integer_all_onesp (size))
2263 {
2264 if (! len || ! tree_int_cst_lt (len, size))
2265 return false;
2266 }
2267
2268 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2269 or if format doesn't contain % chars or is "%s". */
2270 if (! integer_zerop (flag))
2271 {
2272 if (fmt_str == NULL)
2273 return false;
2274 if (strchr (fmt_str, target_percent) != NULL
2275 && strcmp (fmt_str, target_percent_s))
2276 return false;
2277 }
2278
2279 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2280 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2281 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2282 if (!fn)
2283 return false;
2284
2285 /* Replace the called function and the first 4 argument by 2 retaining
2286 trailing varargs. */
2287 gimple_call_set_fndecl (stmt, fn);
2288 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2289 gimple_call_set_arg (stmt, 0, dest);
2290 gimple_call_set_arg (stmt, 1, fmt);
2291 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2292 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2293 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2294 fold_stmt (gsi);
2295 return true;
2296 }
2297
2298 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2299 ORIG may be null if this is a 2-argument call. We don't attempt to
2300 simplify calls with more than 3 arguments.
2301
2302 Return NULL_TREE if no simplification was possible, otherwise return the
2303 simplified form of the call as a tree. If IGNORED is true, it means that
2304 the caller does not use the returned value of the function. */
2305
2306 static bool
2307 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2308 {
2309 gimple stmt = gsi_stmt (*gsi);
2310 tree dest = gimple_call_arg (stmt, 0);
2311 tree fmt = gimple_call_arg (stmt, 1);
2312 tree orig = NULL_TREE;
2313 const char *fmt_str = NULL;
2314
2315 /* Verify the required arguments in the original call. We deal with two
2316 types of sprintf() calls: 'sprintf (str, fmt)' and
2317 'sprintf (dest, "%s", orig)'. */
2318 if (gimple_call_num_args (stmt) > 3)
2319 return false;
2320
2321 if (gimple_call_num_args (stmt) == 3)
2322 orig = gimple_call_arg (stmt, 2);
2323
2324 /* Check whether the format is a literal string constant. */
2325 fmt_str = c_getstr (fmt);
2326 if (fmt_str == NULL)
2327 return false;
2328
2329 if (!init_target_chars ())
2330 return false;
2331
2332 /* If the format doesn't contain % args or %%, use strcpy. */
2333 if (strchr (fmt_str, target_percent) == NULL)
2334 {
2335 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2336
2337 if (!fn)
2338 return false;
2339
2340 /* Don't optimize sprintf (buf, "abc", ptr++). */
2341 if (orig)
2342 return false;
2343
2344 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2345 'format' is known to contain no % formats. */
2346 gimple_seq stmts = NULL;
2347 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2348 gimple_seq_add_stmt_without_update (&stmts, repl);
2349 if (gimple_call_lhs (stmt))
2350 {
2351 repl = gimple_build_assign (gimple_call_lhs (stmt),
2352 build_int_cst (integer_type_node,
2353 strlen (fmt_str)));
2354 gimple_seq_add_stmt_without_update (&stmts, repl);
2355 gsi_replace_with_seq_vops (gsi, stmts);
2356 /* gsi now points at the assignment to the lhs, get a
2357 stmt iterator to the memcpy call.
2358 ??? We can't use gsi_for_stmt as that doesn't work when the
2359 CFG isn't built yet. */
2360 gimple_stmt_iterator gsi2 = *gsi;
2361 gsi_prev (&gsi2);
2362 fold_stmt (&gsi2);
2363 }
2364 else
2365 {
2366 gsi_replace_with_seq_vops (gsi, stmts);
2367 fold_stmt (gsi);
2368 }
2369 return true;
2370 }
2371
2372 /* If the format is "%s", use strcpy if the result isn't used. */
2373 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2374 {
2375 tree fn;
2376 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2377
2378 if (!fn)
2379 return false;
2380
2381 /* Don't crash on sprintf (str1, "%s"). */
2382 if (!orig)
2383 return false;
2384
2385 tree orig_len = NULL_TREE;
2386 if (gimple_call_lhs (stmt))
2387 {
2388 orig_len = get_maxval_strlen (orig, 0);
2389 if (!orig_len)
2390 return false;
2391 }
2392
2393 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2394 gimple_seq stmts = NULL;
2395 gimple repl = gimple_build_call (fn, 2, dest, orig);
2396 gimple_seq_add_stmt_without_update (&stmts, repl);
2397 if (gimple_call_lhs (stmt))
2398 {
2399 if (!useless_type_conversion_p (integer_type_node,
2400 TREE_TYPE (orig_len)))
2401 orig_len = fold_convert (integer_type_node, orig_len);
2402 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2403 gimple_seq_add_stmt_without_update (&stmts, repl);
2404 gsi_replace_with_seq_vops (gsi, stmts);
2405 /* gsi now points at the assignment to the lhs, get a
2406 stmt iterator to the memcpy call.
2407 ??? We can't use gsi_for_stmt as that doesn't work when the
2408 CFG isn't built yet. */
2409 gimple_stmt_iterator gsi2 = *gsi;
2410 gsi_prev (&gsi2);
2411 fold_stmt (&gsi2);
2412 }
2413 else
2414 {
2415 gsi_replace_with_seq_vops (gsi, stmts);
2416 fold_stmt (gsi);
2417 }
2418 return true;
2419 }
2420 return false;
2421 }
2422
2423 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2424 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2425 attempt to simplify calls with more than 4 arguments.
2426
2427 Return NULL_TREE if no simplification was possible, otherwise return the
2428 simplified form of the call as a tree. If IGNORED is true, it means that
2429 the caller does not use the returned value of the function. */
2430
2431 static bool
2432 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2433 {
2434 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2435 tree dest = gimple_call_arg (stmt, 0);
2436 tree destsize = gimple_call_arg (stmt, 1);
2437 tree fmt = gimple_call_arg (stmt, 2);
2438 tree orig = NULL_TREE;
2439 const char *fmt_str = NULL;
2440
2441 if (gimple_call_num_args (stmt) > 4)
2442 return false;
2443
2444 if (gimple_call_num_args (stmt) == 4)
2445 orig = gimple_call_arg (stmt, 3);
2446
2447 if (!tree_fits_uhwi_p (destsize))
2448 return false;
2449 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2450
2451 /* Check whether the format is a literal string constant. */
2452 fmt_str = c_getstr (fmt);
2453 if (fmt_str == NULL)
2454 return false;
2455
2456 if (!init_target_chars ())
2457 return false;
2458
2459 /* If the format doesn't contain % args or %%, use strcpy. */
2460 if (strchr (fmt_str, target_percent) == NULL)
2461 {
2462 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2463 if (!fn)
2464 return false;
2465
2466 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2467 if (orig)
2468 return false;
2469
2470 /* We could expand this as
2471 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2472 or to
2473 memcpy (str, fmt_with_nul_at_cstm1, cst);
2474 but in the former case that might increase code size
2475 and in the latter case grow .rodata section too much.
2476 So punt for now. */
2477 size_t len = strlen (fmt_str);
2478 if (len >= destlen)
2479 return false;
2480
2481 gimple_seq stmts = NULL;
2482 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2483 gimple_seq_add_stmt_without_update (&stmts, repl);
2484 if (gimple_call_lhs (stmt))
2485 {
2486 repl = gimple_build_assign (gimple_call_lhs (stmt),
2487 build_int_cst (integer_type_node, len));
2488 gimple_seq_add_stmt_without_update (&stmts, repl);
2489 gsi_replace_with_seq_vops (gsi, stmts);
2490 /* gsi now points at the assignment to the lhs, get a
2491 stmt iterator to the memcpy call.
2492 ??? We can't use gsi_for_stmt as that doesn't work when the
2493 CFG isn't built yet. */
2494 gimple_stmt_iterator gsi2 = *gsi;
2495 gsi_prev (&gsi2);
2496 fold_stmt (&gsi2);
2497 }
2498 else
2499 {
2500 gsi_replace_with_seq_vops (gsi, stmts);
2501 fold_stmt (gsi);
2502 }
2503 return true;
2504 }
2505
2506 /* If the format is "%s", use strcpy if the result isn't used. */
2507 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2508 {
2509 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2510 if (!fn)
2511 return false;
2512
2513 /* Don't crash on snprintf (str1, cst, "%s"). */
2514 if (!orig)
2515 return false;
2516
2517 tree orig_len = get_maxval_strlen (orig, 0);
2518 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2519 return false;
2520
2521 /* We could expand this as
2522 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2523 or to
2524 memcpy (str1, str2_with_nul_at_cstm1, cst);
2525 but in the former case that might increase code size
2526 and in the latter case grow .rodata section too much.
2527 So punt for now. */
2528 if (compare_tree_int (orig_len, destlen) >= 0)
2529 return false;
2530
2531 /* Convert snprintf (str1, cst, "%s", str2) into
2532 strcpy (str1, str2) if strlen (str2) < cst. */
2533 gimple_seq stmts = NULL;
2534 gimple repl = gimple_build_call (fn, 2, dest, orig);
2535 gimple_seq_add_stmt_without_update (&stmts, repl);
2536 if (gimple_call_lhs (stmt))
2537 {
2538 if (!useless_type_conversion_p (integer_type_node,
2539 TREE_TYPE (orig_len)))
2540 orig_len = fold_convert (integer_type_node, orig_len);
2541 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2542 gimple_seq_add_stmt_without_update (&stmts, repl);
2543 gsi_replace_with_seq_vops (gsi, stmts);
2544 /* gsi now points at the assignment to the lhs, get a
2545 stmt iterator to the memcpy call.
2546 ??? We can't use gsi_for_stmt as that doesn't work when the
2547 CFG isn't built yet. */
2548 gimple_stmt_iterator gsi2 = *gsi;
2549 gsi_prev (&gsi2);
2550 fold_stmt (&gsi2);
2551 }
2552 else
2553 {
2554 gsi_replace_with_seq_vops (gsi, stmts);
2555 fold_stmt (gsi);
2556 }
2557 return true;
2558 }
2559 return false;
2560 }
2561
2562 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2563 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2564 more than 3 arguments, and ARG may be null in the 2-argument case.
2565
2566 Return NULL_TREE if no simplification was possible, otherwise return the
2567 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2568 code of the function to be simplified. */
2569
2570 static bool
2571 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2572 tree fp, tree fmt, tree arg,
2573 enum built_in_function fcode)
2574 {
2575 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2576 tree fn_fputc, fn_fputs;
2577 const char *fmt_str = NULL;
2578
2579 /* If the return value is used, don't do the transformation. */
2580 if (gimple_call_lhs (stmt) != NULL_TREE)
2581 return false;
2582
2583 /* Check whether the format is a literal string constant. */
2584 fmt_str = c_getstr (fmt);
2585 if (fmt_str == NULL)
2586 return false;
2587
2588 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2589 {
2590 /* If we're using an unlocked function, assume the other
2591 unlocked functions exist explicitly. */
2592 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2593 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2594 }
2595 else
2596 {
2597 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2598 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2599 }
2600
2601 if (!init_target_chars ())
2602 return false;
2603
2604 /* If the format doesn't contain % args or %%, use strcpy. */
2605 if (strchr (fmt_str, target_percent) == NULL)
2606 {
2607 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2608 && arg)
2609 return false;
2610
2611 /* If the format specifier was "", fprintf does nothing. */
2612 if (fmt_str[0] == '\0')
2613 {
2614 replace_call_with_value (gsi, NULL_TREE);
2615 return true;
2616 }
2617
2618 /* When "string" doesn't contain %, replace all cases of
2619 fprintf (fp, string) with fputs (string, fp). The fputs
2620 builtin will take care of special cases like length == 1. */
2621 if (fn_fputs)
2622 {
2623 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2624 replace_call_with_call_and_fold (gsi, repl);
2625 return true;
2626 }
2627 }
2628
2629 /* The other optimizations can be done only on the non-va_list variants. */
2630 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2631 return false;
2632
2633 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2634 else if (strcmp (fmt_str, target_percent_s) == 0)
2635 {
2636 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2637 return false;
2638 if (fn_fputs)
2639 {
2640 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2641 replace_call_with_call_and_fold (gsi, repl);
2642 return true;
2643 }
2644 }
2645
2646 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2647 else if (strcmp (fmt_str, target_percent_c) == 0)
2648 {
2649 if (!arg
2650 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2651 return false;
2652 if (fn_fputc)
2653 {
2654 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2655 replace_call_with_call_and_fold (gsi, repl);
2656 return true;
2657 }
2658 }
2659
2660 return false;
2661 }
2662
2663 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2664 FMT and ARG are the arguments to the call; we don't fold cases with
2665 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2666
2667 Return NULL_TREE if no simplification was possible, otherwise return the
2668 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2669 code of the function to be simplified. */
2670
2671 static bool
2672 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2673 tree arg, enum built_in_function fcode)
2674 {
2675 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2676 tree fn_putchar, fn_puts, newarg;
2677 const char *fmt_str = NULL;
2678
2679 /* If the return value is used, don't do the transformation. */
2680 if (gimple_call_lhs (stmt) != NULL_TREE)
2681 return false;
2682
2683 /* Check whether the format is a literal string constant. */
2684 fmt_str = c_getstr (fmt);
2685 if (fmt_str == NULL)
2686 return false;
2687
2688 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2689 {
2690 /* If we're using an unlocked function, assume the other
2691 unlocked functions exist explicitly. */
2692 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2693 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2694 }
2695 else
2696 {
2697 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2698 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2699 }
2700
2701 if (!init_target_chars ())
2702 return false;
2703
2704 if (strcmp (fmt_str, target_percent_s) == 0
2705 || strchr (fmt_str, target_percent) == NULL)
2706 {
2707 const char *str;
2708
2709 if (strcmp (fmt_str, target_percent_s) == 0)
2710 {
2711 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2712 return false;
2713
2714 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2715 return false;
2716
2717 str = c_getstr (arg);
2718 if (str == NULL)
2719 return false;
2720 }
2721 else
2722 {
2723 /* The format specifier doesn't contain any '%' characters. */
2724 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2725 && arg)
2726 return false;
2727 str = fmt_str;
2728 }
2729
2730 /* If the string was "", printf does nothing. */
2731 if (str[0] == '\0')
2732 {
2733 replace_call_with_value (gsi, NULL_TREE);
2734 return true;
2735 }
2736
2737 /* If the string has length of 1, call putchar. */
2738 if (str[1] == '\0')
2739 {
2740 /* Given printf("c"), (where c is any one character,)
2741 convert "c"[0] to an int and pass that to the replacement
2742 function. */
2743 newarg = build_int_cst (integer_type_node, str[0]);
2744 if (fn_putchar)
2745 {
2746 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2747 replace_call_with_call_and_fold (gsi, repl);
2748 return true;
2749 }
2750 }
2751 else
2752 {
2753 /* If the string was "string\n", call puts("string"). */
2754 size_t len = strlen (str);
2755 if ((unsigned char)str[len - 1] == target_newline
2756 && (size_t) (int) len == len
2757 && (int) len > 0)
2758 {
2759 char *newstr;
2760 tree offset_node, string_cst;
2761
2762 /* Create a NUL-terminated string that's one char shorter
2763 than the original, stripping off the trailing '\n'. */
2764 newarg = build_string_literal (len, str);
2765 string_cst = string_constant (newarg, &offset_node);
2766 gcc_checking_assert (string_cst
2767 && (TREE_STRING_LENGTH (string_cst)
2768 == (int) len)
2769 && integer_zerop (offset_node)
2770 && (unsigned char)
2771 TREE_STRING_POINTER (string_cst)[len - 1]
2772 == target_newline);
2773 /* build_string_literal creates a new STRING_CST,
2774 modify it in place to avoid double copying. */
2775 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2776 newstr[len - 1] = '\0';
2777 if (fn_puts)
2778 {
2779 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2780 replace_call_with_call_and_fold (gsi, repl);
2781 return true;
2782 }
2783 }
2784 else
2785 /* We'd like to arrange to call fputs(string,stdout) here,
2786 but we need stdout and don't have a way to get it yet. */
2787 return false;
2788 }
2789 }
2790
2791 /* The other optimizations can be done only on the non-va_list variants. */
2792 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2793 return false;
2794
2795 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2796 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2797 {
2798 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2799 return false;
2800 if (fn_puts)
2801 {
2802 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2803 replace_call_with_call_and_fold (gsi, repl);
2804 return true;
2805 }
2806 }
2807
2808 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2809 else if (strcmp (fmt_str, target_percent_c) == 0)
2810 {
2811 if (!arg || ! useless_type_conversion_p (integer_type_node,
2812 TREE_TYPE (arg)))
2813 return false;
2814 if (fn_putchar)
2815 {
2816 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2817 replace_call_with_call_and_fold (gsi, repl);
2818 return true;
2819 }
2820 }
2821
2822 return false;
2823 }
2824
2825
2826
2827 /* Fold a call to __builtin_strlen with known length LEN. */
2828
2829 static bool
2830 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2831 {
2832 gimple stmt = gsi_stmt (*gsi);
2833 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2834 if (!len)
2835 return false;
2836 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2837 replace_call_with_value (gsi, len);
2838 return true;
2839 }
2840
2841
2842 /* Fold the non-target builtin at *GSI and return whether any simplification
2843 was made. */
2844
2845 static bool
2846 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2847 {
2848 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2849 tree callee = gimple_call_fndecl (stmt);
2850
2851 /* Give up for always_inline inline builtins until they are
2852 inlined. */
2853 if (avoid_folding_inline_builtin (callee))
2854 return false;
2855
2856 unsigned n = gimple_call_num_args (stmt);
2857 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2858 switch (fcode)
2859 {
2860 case BUILT_IN_BZERO:
2861 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2862 gimple_call_arg (stmt, 1));
2863 case BUILT_IN_MEMSET:
2864 return gimple_fold_builtin_memset (gsi,
2865 gimple_call_arg (stmt, 1),
2866 gimple_call_arg (stmt, 2));
2867 case BUILT_IN_BCOPY:
2868 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2869 gimple_call_arg (stmt, 0), 3);
2870 case BUILT_IN_MEMCPY:
2871 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2872 gimple_call_arg (stmt, 1), 0);
2873 case BUILT_IN_MEMPCPY:
2874 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2875 gimple_call_arg (stmt, 1), 1);
2876 case BUILT_IN_MEMMOVE:
2877 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2878 gimple_call_arg (stmt, 1), 3);
2879 case BUILT_IN_SPRINTF_CHK:
2880 case BUILT_IN_VSPRINTF_CHK:
2881 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2882 case BUILT_IN_STRCAT_CHK:
2883 return gimple_fold_builtin_strcat_chk (gsi);
2884 case BUILT_IN_STRNCAT_CHK:
2885 return gimple_fold_builtin_strncat_chk (gsi);
2886 case BUILT_IN_STRLEN:
2887 return gimple_fold_builtin_strlen (gsi);
2888 case BUILT_IN_STRCPY:
2889 return gimple_fold_builtin_strcpy (gsi,
2890 gimple_call_arg (stmt, 0),
2891 gimple_call_arg (stmt, 1));
2892 case BUILT_IN_STRNCPY:
2893 return gimple_fold_builtin_strncpy (gsi,
2894 gimple_call_arg (stmt, 0),
2895 gimple_call_arg (stmt, 1),
2896 gimple_call_arg (stmt, 2));
2897 case BUILT_IN_STRCAT:
2898 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2899 gimple_call_arg (stmt, 1));
2900 case BUILT_IN_STRNCAT:
2901 return gimple_fold_builtin_strncat (gsi);
2902 case BUILT_IN_FPUTS:
2903 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2904 gimple_call_arg (stmt, 1), false);
2905 case BUILT_IN_FPUTS_UNLOCKED:
2906 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2907 gimple_call_arg (stmt, 1), true);
2908 case BUILT_IN_MEMCPY_CHK:
2909 case BUILT_IN_MEMPCPY_CHK:
2910 case BUILT_IN_MEMMOVE_CHK:
2911 case BUILT_IN_MEMSET_CHK:
2912 return gimple_fold_builtin_memory_chk (gsi,
2913 gimple_call_arg (stmt, 0),
2914 gimple_call_arg (stmt, 1),
2915 gimple_call_arg (stmt, 2),
2916 gimple_call_arg (stmt, 3),
2917 fcode);
2918 case BUILT_IN_STPCPY:
2919 return gimple_fold_builtin_stpcpy (gsi);
2920 case BUILT_IN_STRCPY_CHK:
2921 case BUILT_IN_STPCPY_CHK:
2922 return gimple_fold_builtin_stxcpy_chk (gsi,
2923 gimple_call_arg (stmt, 0),
2924 gimple_call_arg (stmt, 1),
2925 gimple_call_arg (stmt, 2),
2926 fcode);
2927 case BUILT_IN_STRNCPY_CHK:
2928 case BUILT_IN_STPNCPY_CHK:
2929 return gimple_fold_builtin_stxncpy_chk (gsi,
2930 gimple_call_arg (stmt, 0),
2931 gimple_call_arg (stmt, 1),
2932 gimple_call_arg (stmt, 2),
2933 gimple_call_arg (stmt, 3),
2934 fcode);
2935 case BUILT_IN_SNPRINTF_CHK:
2936 case BUILT_IN_VSNPRINTF_CHK:
2937 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2938 case BUILT_IN_SNPRINTF:
2939 return gimple_fold_builtin_snprintf (gsi);
2940 case BUILT_IN_SPRINTF:
2941 return gimple_fold_builtin_sprintf (gsi);
2942 case BUILT_IN_FPRINTF:
2943 case BUILT_IN_FPRINTF_UNLOCKED:
2944 case BUILT_IN_VFPRINTF:
2945 if (n == 2 || n == 3)
2946 return gimple_fold_builtin_fprintf (gsi,
2947 gimple_call_arg (stmt, 0),
2948 gimple_call_arg (stmt, 1),
2949 n == 3
2950 ? gimple_call_arg (stmt, 2)
2951 : NULL_TREE,
2952 fcode);
2953 break;
2954 case BUILT_IN_FPRINTF_CHK:
2955 case BUILT_IN_VFPRINTF_CHK:
2956 if (n == 3 || n == 4)
2957 return gimple_fold_builtin_fprintf (gsi,
2958 gimple_call_arg (stmt, 0),
2959 gimple_call_arg (stmt, 2),
2960 n == 4
2961 ? gimple_call_arg (stmt, 3)
2962 : NULL_TREE,
2963 fcode);
2964 break;
2965 case BUILT_IN_PRINTF:
2966 case BUILT_IN_PRINTF_UNLOCKED:
2967 case BUILT_IN_VPRINTF:
2968 if (n == 1 || n == 2)
2969 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2970 n == 2
2971 ? gimple_call_arg (stmt, 1)
2972 : NULL_TREE, fcode);
2973 break;
2974 case BUILT_IN_PRINTF_CHK:
2975 case BUILT_IN_VPRINTF_CHK:
2976 if (n == 2 || n == 3)
2977 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2978 n == 3
2979 ? gimple_call_arg (stmt, 2)
2980 : NULL_TREE, fcode);
2981 default:;
2982 }
2983
2984 /* Try the generic builtin folder. */
2985 bool ignore = (gimple_call_lhs (stmt) == NULL);
2986 tree result = fold_call_stmt (stmt, ignore);
2987 if (result)
2988 {
2989 if (ignore)
2990 STRIP_NOPS (result);
2991 else
2992 result = fold_convert (gimple_call_return_type (stmt), result);
2993 if (!update_call_from_tree (gsi, result))
2994 gimplify_and_update_call_from_tree (gsi, result);
2995 return true;
2996 }
2997
2998 return false;
2999 }
3000
3001 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3002 doesn't fit into TYPE. The test for overflow should be regardless of
3003 -fwrapv, and even for unsigned types. */
3004
3005 bool
3006 arith_overflowed_p (enum tree_code code, const_tree type,
3007 const_tree arg0, const_tree arg1)
3008 {
3009 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3010 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3011 widest2_int_cst;
3012 widest2_int warg0 = widest2_int_cst (arg0);
3013 widest2_int warg1 = widest2_int_cst (arg1);
3014 widest2_int wres;
3015 switch (code)
3016 {
3017 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3018 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3019 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3020 default: gcc_unreachable ();
3021 }
3022 signop sign = TYPE_SIGN (type);
3023 if (sign == UNSIGNED && wi::neg_p (wres))
3024 return true;
3025 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3026 }
3027
3028 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3029 The statement may be replaced by another statement, e.g., if the call
3030 simplifies to a constant value. Return true if any changes were made.
3031 It is assumed that the operands have been previously folded. */
3032
3033 static bool
3034 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3035 {
3036 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3037 tree callee;
3038 bool changed = false;
3039 unsigned i;
3040
3041 /* Fold *& in call arguments. */
3042 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3043 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3044 {
3045 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3046 if (tmp)
3047 {
3048 gimple_call_set_arg (stmt, i, tmp);
3049 changed = true;
3050 }
3051 }
3052
3053 /* Check for virtual calls that became direct calls. */
3054 callee = gimple_call_fn (stmt);
3055 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3056 {
3057 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3058 {
3059 if (dump_file && virtual_method_call_p (callee)
3060 && !possible_polymorphic_call_target_p
3061 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3062 (OBJ_TYPE_REF_EXPR (callee)))))
3063 {
3064 fprintf (dump_file,
3065 "Type inheritance inconsistent devirtualization of ");
3066 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3067 fprintf (dump_file, " to ");
3068 print_generic_expr (dump_file, callee, TDF_SLIM);
3069 fprintf (dump_file, "\n");
3070 }
3071
3072 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3073 changed = true;
3074 }
3075 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3076 {
3077 bool final;
3078 vec <cgraph_node *>targets
3079 = possible_polymorphic_call_targets (callee, stmt, &final);
3080 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3081 {
3082 tree lhs = gimple_call_lhs (stmt);
3083 if (dump_enabled_p ())
3084 {
3085 location_t loc = gimple_location_safe (stmt);
3086 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3087 "folding virtual function call to %s\n",
3088 targets.length () == 1
3089 ? targets[0]->name ()
3090 : "__builtin_unreachable");
3091 }
3092 if (targets.length () == 1)
3093 {
3094 gimple_call_set_fndecl (stmt, targets[0]->decl);
3095 changed = true;
3096 /* If the call becomes noreturn, remove the lhs. */
3097 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3098 {
3099 if (TREE_CODE (lhs) == SSA_NAME)
3100 {
3101 tree var = create_tmp_var (TREE_TYPE (lhs));
3102 tree def = get_or_create_ssa_default_def (cfun, var);
3103 gimple new_stmt = gimple_build_assign (lhs, def);
3104 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3105 }
3106 gimple_call_set_lhs (stmt, NULL_TREE);
3107 }
3108 maybe_remove_unused_call_args (cfun, stmt);
3109 }
3110 else
3111 {
3112 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3113 gimple new_stmt = gimple_build_call (fndecl, 0);
3114 gimple_set_location (new_stmt, gimple_location (stmt));
3115 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3116 {
3117 tree var = create_tmp_var (TREE_TYPE (lhs));
3118 tree def = get_or_create_ssa_default_def (cfun, var);
3119
3120 /* To satisfy condition for
3121 cgraph_update_edges_for_call_stmt_node,
3122 we need to preserve GIMPLE_CALL statement
3123 at position of GSI iterator. */
3124 update_call_from_tree (gsi, def);
3125 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3126 }
3127 else
3128 {
3129 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3130 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3131 gsi_replace (gsi, new_stmt, false);
3132 }
3133 return true;
3134 }
3135 }
3136 }
3137 }
3138
3139 /* Check for indirect calls that became direct calls, and then
3140 no longer require a static chain. */
3141 if (gimple_call_chain (stmt))
3142 {
3143 tree fn = gimple_call_fndecl (stmt);
3144 if (fn && !DECL_STATIC_CHAIN (fn))
3145 {
3146 gimple_call_set_chain (stmt, NULL);
3147 changed = true;
3148 }
3149 else
3150 {
3151 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3152 if (tmp)
3153 {
3154 gimple_call_set_chain (stmt, tmp);
3155 changed = true;
3156 }
3157 }
3158 }
3159
3160 if (inplace)
3161 return changed;
3162
3163 /* Check for builtins that CCP can handle using information not
3164 available in the generic fold routines. */
3165 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3166 {
3167 if (gimple_fold_builtin (gsi))
3168 changed = true;
3169 }
3170 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3171 {
3172 changed |= targetm.gimple_fold_builtin (gsi);
3173 }
3174 else if (gimple_call_internal_p (stmt))
3175 {
3176 enum tree_code subcode = ERROR_MARK;
3177 tree result = NULL_TREE;
3178 bool cplx_result = false;
3179 tree overflow = NULL_TREE;
3180 switch (gimple_call_internal_fn (stmt))
3181 {
3182 case IFN_BUILTIN_EXPECT:
3183 result = fold_builtin_expect (gimple_location (stmt),
3184 gimple_call_arg (stmt, 0),
3185 gimple_call_arg (stmt, 1),
3186 gimple_call_arg (stmt, 2));
3187 break;
3188 case IFN_UBSAN_OBJECT_SIZE:
3189 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3190 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3191 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3192 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3193 gimple_call_arg (stmt, 2))))
3194 {
3195 gsi_replace (gsi, gimple_build_nop (), true);
3196 unlink_stmt_vdef (stmt);
3197 release_defs (stmt);
3198 return true;
3199 }
3200 break;
3201 case IFN_UBSAN_CHECK_ADD:
3202 subcode = PLUS_EXPR;
3203 break;
3204 case IFN_UBSAN_CHECK_SUB:
3205 subcode = MINUS_EXPR;
3206 break;
3207 case IFN_UBSAN_CHECK_MUL:
3208 subcode = MULT_EXPR;
3209 break;
3210 case IFN_ADD_OVERFLOW:
3211 subcode = PLUS_EXPR;
3212 cplx_result = true;
3213 break;
3214 case IFN_SUB_OVERFLOW:
3215 subcode = MINUS_EXPR;
3216 cplx_result = true;
3217 break;
3218 case IFN_MUL_OVERFLOW:
3219 subcode = MULT_EXPR;
3220 cplx_result = true;
3221 break;
3222 default:
3223 break;
3224 }
3225 if (subcode != ERROR_MARK)
3226 {
3227 tree arg0 = gimple_call_arg (stmt, 0);
3228 tree arg1 = gimple_call_arg (stmt, 1);
3229 tree type = TREE_TYPE (arg0);
3230 if (cplx_result)
3231 {
3232 tree lhs = gimple_call_lhs (stmt);
3233 if (lhs == NULL_TREE)
3234 type = NULL_TREE;
3235 else
3236 type = TREE_TYPE (TREE_TYPE (lhs));
3237 }
3238 if (type == NULL_TREE)
3239 ;
3240 /* x = y + 0; x = y - 0; x = y * 0; */
3241 else if (integer_zerop (arg1))
3242 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3243 /* x = 0 + y; x = 0 * y; */
3244 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3245 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3246 /* x = y - y; */
3247 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3248 result = integer_zero_node;
3249 /* x = y * 1; x = 1 * y; */
3250 else if (subcode == MULT_EXPR && integer_onep (arg1))
3251 result = arg0;
3252 else if (subcode == MULT_EXPR && integer_onep (arg0))
3253 result = arg1;
3254 else if (TREE_CODE (arg0) == INTEGER_CST
3255 && TREE_CODE (arg1) == INTEGER_CST)
3256 {
3257 if (cplx_result)
3258 result = int_const_binop (subcode, fold_convert (type, arg0),
3259 fold_convert (type, arg1));
3260 else
3261 result = int_const_binop (subcode, arg0, arg1);
3262 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3263 {
3264 if (cplx_result)
3265 overflow = build_one_cst (type);
3266 else
3267 result = NULL_TREE;
3268 }
3269 }
3270 if (result)
3271 {
3272 if (result == integer_zero_node)
3273 result = build_zero_cst (type);
3274 else if (cplx_result && TREE_TYPE (result) != type)
3275 {
3276 if (TREE_CODE (result) == INTEGER_CST)
3277 {
3278 if (arith_overflowed_p (PLUS_EXPR, type, result,
3279 integer_zero_node))
3280 overflow = build_one_cst (type);
3281 }
3282 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3283 && TYPE_UNSIGNED (type))
3284 || (TYPE_PRECISION (type)
3285 < (TYPE_PRECISION (TREE_TYPE (result))
3286 + (TYPE_UNSIGNED (TREE_TYPE (result))
3287 && !TYPE_UNSIGNED (type)))))
3288 result = NULL_TREE;
3289 if (result)
3290 result = fold_convert (type, result);
3291 }
3292 }
3293 }
3294
3295 if (result)
3296 {
3297 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3298 result = drop_tree_overflow (result);
3299 if (cplx_result)
3300 {
3301 if (overflow == NULL_TREE)
3302 overflow = build_zero_cst (TREE_TYPE (result));
3303 tree ctype = build_complex_type (TREE_TYPE (result));
3304 if (TREE_CODE (result) == INTEGER_CST
3305 && TREE_CODE (overflow) == INTEGER_CST)
3306 result = build_complex (ctype, result, overflow);
3307 else
3308 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3309 ctype, result, overflow);
3310 }
3311 if (!update_call_from_tree (gsi, result))
3312 gimplify_and_update_call_from_tree (gsi, result);
3313 changed = true;
3314 }
3315 }
3316
3317 return changed;
3318 }
3319
3320
3321 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3322 gimple_simplify.
3323
3324 Replaces *GSI with the simplification result in RCODE and OPS
3325 and the associated statements in *SEQ. Does the replacement
3326 according to INPLACE and returns true if the operation succeeded. */
3327
3328 static bool
3329 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3330 code_helper rcode, tree *ops,
3331 gimple_seq *seq, bool inplace)
3332 {
3333 gimple stmt = gsi_stmt (*gsi);
3334
3335 /* Play safe and do not allow abnormals to be mentioned in
3336 newly created statements. See also maybe_push_res_to_seq. */
3337 if ((TREE_CODE (ops[0]) == SSA_NAME
3338 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3339 || (ops[1]
3340 && TREE_CODE (ops[1]) == SSA_NAME
3341 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3342 || (ops[2]
3343 && TREE_CODE (ops[2]) == SSA_NAME
3344 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3345 return false;
3346
3347 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3348 {
3349 gcc_assert (rcode.is_tree_code ());
3350 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3351 /* GIMPLE_CONDs condition may not throw. */
3352 && (!flag_exceptions
3353 || !cfun->can_throw_non_call_exceptions
3354 || !operation_could_trap_p (rcode,
3355 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3356 false, NULL_TREE)))
3357 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3358 else if (rcode == SSA_NAME)
3359 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3360 build_zero_cst (TREE_TYPE (ops[0])));
3361 else if (rcode == INTEGER_CST)
3362 {
3363 if (integer_zerop (ops[0]))
3364 gimple_cond_make_false (cond_stmt);
3365 else
3366 gimple_cond_make_true (cond_stmt);
3367 }
3368 else if (!inplace)
3369 {
3370 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3371 ops, seq);
3372 if (!res)
3373 return false;
3374 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3375 build_zero_cst (TREE_TYPE (res)));
3376 }
3377 else
3378 return false;
3379 if (dump_file && (dump_flags & TDF_DETAILS))
3380 {
3381 fprintf (dump_file, "gimple_simplified to ");
3382 if (!gimple_seq_empty_p (*seq))
3383 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3384 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3385 0, TDF_SLIM);
3386 }
3387 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3388 return true;
3389 }
3390 else if (is_gimple_assign (stmt)
3391 && rcode.is_tree_code ())
3392 {
3393 if (!inplace
3394 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3395 {
3396 maybe_build_generic_op (rcode,
3397 TREE_TYPE (gimple_assign_lhs (stmt)),
3398 &ops[0], ops[1], ops[2]);
3399 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3400 if (dump_file && (dump_flags & TDF_DETAILS))
3401 {
3402 fprintf (dump_file, "gimple_simplified to ");
3403 if (!gimple_seq_empty_p (*seq))
3404 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3405 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3406 0, TDF_SLIM);
3407 }
3408 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3409 return true;
3410 }
3411 }
3412 else if (!inplace)
3413 {
3414 if (gimple_has_lhs (stmt))
3415 {
3416 tree lhs = gimple_get_lhs (stmt);
3417 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3418 ops, seq, lhs))
3419 return false;
3420 if (dump_file && (dump_flags & TDF_DETAILS))
3421 {
3422 fprintf (dump_file, "gimple_simplified to ");
3423 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3424 }
3425 gsi_replace_with_seq_vops (gsi, *seq);
3426 return true;
3427 }
3428 else
3429 gcc_unreachable ();
3430 }
3431
3432 return false;
3433 }
3434
3435 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3436
3437 static bool
3438 maybe_canonicalize_mem_ref_addr (tree *t)
3439 {
3440 bool res = false;
3441
3442 if (TREE_CODE (*t) == ADDR_EXPR)
3443 t = &TREE_OPERAND (*t, 0);
3444
3445 while (handled_component_p (*t))
3446 t = &TREE_OPERAND (*t, 0);
3447
3448 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3449 of invariant addresses into a SSA name MEM_REF address. */
3450 if (TREE_CODE (*t) == MEM_REF
3451 || TREE_CODE (*t) == TARGET_MEM_REF)
3452 {
3453 tree addr = TREE_OPERAND (*t, 0);
3454 if (TREE_CODE (addr) == ADDR_EXPR
3455 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3456 || handled_component_p (TREE_OPERAND (addr, 0))))
3457 {
3458 tree base;
3459 HOST_WIDE_INT coffset;
3460 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3461 &coffset);
3462 if (!base)
3463 gcc_unreachable ();
3464
3465 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3466 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3467 TREE_OPERAND (*t, 1),
3468 size_int (coffset));
3469 res = true;
3470 }
3471 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3472 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3473 }
3474
3475 /* Canonicalize back MEM_REFs to plain reference trees if the object
3476 accessed is a decl that has the same access semantics as the MEM_REF. */
3477 if (TREE_CODE (*t) == MEM_REF
3478 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3479 && integer_zerop (TREE_OPERAND (*t, 1))
3480 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3481 {
3482 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3483 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3484 if (/* Same volatile qualification. */
3485 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3486 /* Same TBAA behavior with -fstrict-aliasing. */
3487 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3488 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3489 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3490 /* Same alignment. */
3491 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3492 /* We have to look out here to not drop a required conversion
3493 from the rhs to the lhs if *t appears on the lhs or vice-versa
3494 if it appears on the rhs. Thus require strict type
3495 compatibility. */
3496 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3497 {
3498 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3499 res = true;
3500 }
3501 }
3502
3503 /* Canonicalize TARGET_MEM_REF in particular with respect to
3504 the indexes becoming constant. */
3505 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3506 {
3507 tree tem = maybe_fold_tmr (*t);
3508 if (tem)
3509 {
3510 *t = tem;
3511 res = true;
3512 }
3513 }
3514
3515 return res;
3516 }
3517
3518 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3519 distinguishes both cases. */
3520
3521 static bool
3522 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3523 {
3524 bool changed = false;
3525 gimple stmt = gsi_stmt (*gsi);
3526 unsigned i;
3527
3528 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3529 after propagation.
3530 ??? This shouldn't be done in generic folding but in the
3531 propagation helpers which also know whether an address was
3532 propagated. */
3533 switch (gimple_code (stmt))
3534 {
3535 case GIMPLE_ASSIGN:
3536 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3537 {
3538 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3539 if ((REFERENCE_CLASS_P (*rhs)
3540 || TREE_CODE (*rhs) == ADDR_EXPR)
3541 && maybe_canonicalize_mem_ref_addr (rhs))
3542 changed = true;
3543 tree *lhs = gimple_assign_lhs_ptr (stmt);
3544 if (REFERENCE_CLASS_P (*lhs)
3545 && maybe_canonicalize_mem_ref_addr (lhs))
3546 changed = true;
3547 }
3548 break;
3549 case GIMPLE_CALL:
3550 {
3551 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3552 {
3553 tree *arg = gimple_call_arg_ptr (stmt, i);
3554 if (REFERENCE_CLASS_P (*arg)
3555 && maybe_canonicalize_mem_ref_addr (arg))
3556 changed = true;
3557 }
3558 tree *lhs = gimple_call_lhs_ptr (stmt);
3559 if (*lhs
3560 && REFERENCE_CLASS_P (*lhs)
3561 && maybe_canonicalize_mem_ref_addr (lhs))
3562 changed = true;
3563 break;
3564 }
3565 case GIMPLE_ASM:
3566 {
3567 gasm *asm_stmt = as_a <gasm *> (stmt);
3568 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3569 {
3570 tree link = gimple_asm_output_op (asm_stmt, i);
3571 tree op = TREE_VALUE (link);
3572 if (REFERENCE_CLASS_P (op)
3573 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3574 changed = true;
3575 }
3576 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3577 {
3578 tree link = gimple_asm_input_op (asm_stmt, i);
3579 tree op = TREE_VALUE (link);
3580 if ((REFERENCE_CLASS_P (op)
3581 || TREE_CODE (op) == ADDR_EXPR)
3582 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3583 changed = true;
3584 }
3585 }
3586 break;
3587 case GIMPLE_DEBUG:
3588 if (gimple_debug_bind_p (stmt))
3589 {
3590 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3591 if (*val
3592 && (REFERENCE_CLASS_P (*val)
3593 || TREE_CODE (*val) == ADDR_EXPR)
3594 && maybe_canonicalize_mem_ref_addr (val))
3595 changed = true;
3596 }
3597 break;
3598 default:;
3599 }
3600
3601 /* Dispatch to pattern-based folding. */
3602 if (!inplace
3603 || is_gimple_assign (stmt)
3604 || gimple_code (stmt) == GIMPLE_COND)
3605 {
3606 gimple_seq seq = NULL;
3607 code_helper rcode;
3608 tree ops[3] = {};
3609 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3610 valueize, valueize))
3611 {
3612 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3613 changed = true;
3614 else
3615 gimple_seq_discard (seq);
3616 }
3617 }
3618
3619 stmt = gsi_stmt (*gsi);
3620
3621 /* Fold the main computation performed by the statement. */
3622 switch (gimple_code (stmt))
3623 {
3624 case GIMPLE_ASSIGN:
3625 {
3626 unsigned old_num_ops = gimple_num_ops (stmt);
3627 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3628 tree lhs = gimple_assign_lhs (stmt);
3629 tree new_rhs;
3630 /* First canonicalize operand order. This avoids building new
3631 trees if this is the only thing fold would later do. */
3632 if ((commutative_tree_code (subcode)
3633 || commutative_ternary_tree_code (subcode))
3634 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3635 gimple_assign_rhs2 (stmt), false))
3636 {
3637 tree tem = gimple_assign_rhs1 (stmt);
3638 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3639 gimple_assign_set_rhs2 (stmt, tem);
3640 changed = true;
3641 }
3642 new_rhs = fold_gimple_assign (gsi);
3643 if (new_rhs
3644 && !useless_type_conversion_p (TREE_TYPE (lhs),
3645 TREE_TYPE (new_rhs)))
3646 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3647 if (new_rhs
3648 && (!inplace
3649 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3650 {
3651 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3652 changed = true;
3653 }
3654 break;
3655 }
3656
3657 case GIMPLE_COND:
3658 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3659 break;
3660
3661 case GIMPLE_CALL:
3662 changed |= gimple_fold_call (gsi, inplace);
3663 break;
3664
3665 case GIMPLE_ASM:
3666 /* Fold *& in asm operands. */
3667 {
3668 gasm *asm_stmt = as_a <gasm *> (stmt);
3669 size_t noutputs;
3670 const char **oconstraints;
3671 const char *constraint;
3672 bool allows_mem, allows_reg;
3673
3674 noutputs = gimple_asm_noutputs (asm_stmt);
3675 oconstraints = XALLOCAVEC (const char *, noutputs);
3676
3677 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3678 {
3679 tree link = gimple_asm_output_op (asm_stmt, i);
3680 tree op = TREE_VALUE (link);
3681 oconstraints[i]
3682 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3683 if (REFERENCE_CLASS_P (op)
3684 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3685 {
3686 TREE_VALUE (link) = op;
3687 changed = true;
3688 }
3689 }
3690 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3691 {
3692 tree link = gimple_asm_input_op (asm_stmt, i);
3693 tree op = TREE_VALUE (link);
3694 constraint
3695 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3696 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3697 oconstraints, &allows_mem, &allows_reg);
3698 if (REFERENCE_CLASS_P (op)
3699 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3700 != NULL_TREE)
3701 {
3702 TREE_VALUE (link) = op;
3703 changed = true;
3704 }
3705 }
3706 }
3707 break;
3708
3709 case GIMPLE_DEBUG:
3710 if (gimple_debug_bind_p (stmt))
3711 {
3712 tree val = gimple_debug_bind_get_value (stmt);
3713 if (val
3714 && REFERENCE_CLASS_P (val))
3715 {
3716 tree tem = maybe_fold_reference (val, false);
3717 if (tem)
3718 {
3719 gimple_debug_bind_set_value (stmt, tem);
3720 changed = true;
3721 }
3722 }
3723 else if (val
3724 && TREE_CODE (val) == ADDR_EXPR)
3725 {
3726 tree ref = TREE_OPERAND (val, 0);
3727 tree tem = maybe_fold_reference (ref, false);
3728 if (tem)
3729 {
3730 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3731 gimple_debug_bind_set_value (stmt, tem);
3732 changed = true;
3733 }
3734 }
3735 }
3736 break;
3737
3738 default:;
3739 }
3740
3741 stmt = gsi_stmt (*gsi);
3742
3743 /* Fold *& on the lhs. */
3744 if (gimple_has_lhs (stmt))
3745 {
3746 tree lhs = gimple_get_lhs (stmt);
3747 if (lhs && REFERENCE_CLASS_P (lhs))
3748 {
3749 tree new_lhs = maybe_fold_reference (lhs, true);
3750 if (new_lhs)
3751 {
3752 gimple_set_lhs (stmt, new_lhs);
3753 changed = true;
3754 }
3755 }
3756 }
3757
3758 return changed;
3759 }
3760
3761 /* Valueziation callback that ends up not following SSA edges. */
3762
3763 tree
3764 no_follow_ssa_edges (tree)
3765 {
3766 return NULL_TREE;
3767 }
3768
3769 /* Valueization callback that ends up following single-use SSA edges only. */
3770
3771 tree
3772 follow_single_use_edges (tree val)
3773 {
3774 if (TREE_CODE (val) == SSA_NAME
3775 && !has_single_use (val))
3776 return NULL_TREE;
3777 return val;
3778 }
3779
3780 /* Fold the statement pointed to by GSI. In some cases, this function may
3781 replace the whole statement with a new one. Returns true iff folding
3782 makes any changes.
3783 The statement pointed to by GSI should be in valid gimple form but may
3784 be in unfolded state as resulting from for example constant propagation
3785 which can produce *&x = 0. */
3786
3787 bool
3788 fold_stmt (gimple_stmt_iterator *gsi)
3789 {
3790 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3791 }
3792
3793 bool
3794 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3795 {
3796 return fold_stmt_1 (gsi, false, valueize);
3797 }
3798
3799 /* Perform the minimal folding on statement *GSI. Only operations like
3800 *&x created by constant propagation are handled. The statement cannot
3801 be replaced with a new one. Return true if the statement was
3802 changed, false otherwise.
3803 The statement *GSI should be in valid gimple form but may
3804 be in unfolded state as resulting from for example constant propagation
3805 which can produce *&x = 0. */
3806
3807 bool
3808 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3809 {
3810 gimple stmt = gsi_stmt (*gsi);
3811 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3812 gcc_assert (gsi_stmt (*gsi) == stmt);
3813 return changed;
3814 }
3815
3816 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3817 if EXPR is null or we don't know how.
3818 If non-null, the result always has boolean type. */
3819
3820 static tree
3821 canonicalize_bool (tree expr, bool invert)
3822 {
3823 if (!expr)
3824 return NULL_TREE;
3825 else if (invert)
3826 {
3827 if (integer_nonzerop (expr))
3828 return boolean_false_node;
3829 else if (integer_zerop (expr))
3830 return boolean_true_node;
3831 else if (TREE_CODE (expr) == SSA_NAME)
3832 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3833 build_int_cst (TREE_TYPE (expr), 0));
3834 else if (COMPARISON_CLASS_P (expr))
3835 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3836 boolean_type_node,
3837 TREE_OPERAND (expr, 0),
3838 TREE_OPERAND (expr, 1));
3839 else
3840 return NULL_TREE;
3841 }
3842 else
3843 {
3844 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3845 return expr;
3846 if (integer_nonzerop (expr))
3847 return boolean_true_node;
3848 else if (integer_zerop (expr))
3849 return boolean_false_node;
3850 else if (TREE_CODE (expr) == SSA_NAME)
3851 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3852 build_int_cst (TREE_TYPE (expr), 0));
3853 else if (COMPARISON_CLASS_P (expr))
3854 return fold_build2 (TREE_CODE (expr),
3855 boolean_type_node,
3856 TREE_OPERAND (expr, 0),
3857 TREE_OPERAND (expr, 1));
3858 else
3859 return NULL_TREE;
3860 }
3861 }
3862
3863 /* Check to see if a boolean expression EXPR is logically equivalent to the
3864 comparison (OP1 CODE OP2). Check for various identities involving
3865 SSA_NAMEs. */
3866
3867 static bool
3868 same_bool_comparison_p (const_tree expr, enum tree_code code,
3869 const_tree op1, const_tree op2)
3870 {
3871 gimple s;
3872
3873 /* The obvious case. */
3874 if (TREE_CODE (expr) == code
3875 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3876 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3877 return true;
3878
3879 /* Check for comparing (name, name != 0) and the case where expr
3880 is an SSA_NAME with a definition matching the comparison. */
3881 if (TREE_CODE (expr) == SSA_NAME
3882 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3883 {
3884 if (operand_equal_p (expr, op1, 0))
3885 return ((code == NE_EXPR && integer_zerop (op2))
3886 || (code == EQ_EXPR && integer_nonzerop (op2)));
3887 s = SSA_NAME_DEF_STMT (expr);
3888 if (is_gimple_assign (s)
3889 && gimple_assign_rhs_code (s) == code
3890 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3891 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3892 return true;
3893 }
3894
3895 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3896 of name is a comparison, recurse. */
3897 if (TREE_CODE (op1) == SSA_NAME
3898 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3899 {
3900 s = SSA_NAME_DEF_STMT (op1);
3901 if (is_gimple_assign (s)
3902 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3903 {
3904 enum tree_code c = gimple_assign_rhs_code (s);
3905 if ((c == NE_EXPR && integer_zerop (op2))
3906 || (c == EQ_EXPR && integer_nonzerop (op2)))
3907 return same_bool_comparison_p (expr, c,
3908 gimple_assign_rhs1 (s),
3909 gimple_assign_rhs2 (s));
3910 if ((c == EQ_EXPR && integer_zerop (op2))
3911 || (c == NE_EXPR && integer_nonzerop (op2)))
3912 return same_bool_comparison_p (expr,
3913 invert_tree_comparison (c, false),
3914 gimple_assign_rhs1 (s),
3915 gimple_assign_rhs2 (s));
3916 }
3917 }
3918 return false;
3919 }
3920
3921 /* Check to see if two boolean expressions OP1 and OP2 are logically
3922 equivalent. */
3923
3924 static bool
3925 same_bool_result_p (const_tree op1, const_tree op2)
3926 {
3927 /* Simple cases first. */
3928 if (operand_equal_p (op1, op2, 0))
3929 return true;
3930
3931 /* Check the cases where at least one of the operands is a comparison.
3932 These are a bit smarter than operand_equal_p in that they apply some
3933 identifies on SSA_NAMEs. */
3934 if (COMPARISON_CLASS_P (op2)
3935 && same_bool_comparison_p (op1, TREE_CODE (op2),
3936 TREE_OPERAND (op2, 0),
3937 TREE_OPERAND (op2, 1)))
3938 return true;
3939 if (COMPARISON_CLASS_P (op1)
3940 && same_bool_comparison_p (op2, TREE_CODE (op1),
3941 TREE_OPERAND (op1, 0),
3942 TREE_OPERAND (op1, 1)))
3943 return true;
3944
3945 /* Default case. */
3946 return false;
3947 }
3948
3949 /* Forward declarations for some mutually recursive functions. */
3950
3951 static tree
3952 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3953 enum tree_code code2, tree op2a, tree op2b);
3954 static tree
3955 and_var_with_comparison (tree var, bool invert,
3956 enum tree_code code2, tree op2a, tree op2b);
3957 static tree
3958 and_var_with_comparison_1 (gimple stmt,
3959 enum tree_code code2, tree op2a, tree op2b);
3960 static tree
3961 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3962 enum tree_code code2, tree op2a, tree op2b);
3963 static tree
3964 or_var_with_comparison (tree var, bool invert,
3965 enum tree_code code2, tree op2a, tree op2b);
3966 static tree
3967 or_var_with_comparison_1 (gimple stmt,
3968 enum tree_code code2, tree op2a, tree op2b);
3969
3970 /* Helper function for and_comparisons_1: try to simplify the AND of the
3971 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3972 If INVERT is true, invert the value of the VAR before doing the AND.
3973 Return NULL_EXPR if we can't simplify this to a single expression. */
3974
3975 static tree
3976 and_var_with_comparison (tree var, bool invert,
3977 enum tree_code code2, tree op2a, tree op2b)
3978 {
3979 tree t;
3980 gimple stmt = SSA_NAME_DEF_STMT (var);
3981
3982 /* We can only deal with variables whose definitions are assignments. */
3983 if (!is_gimple_assign (stmt))
3984 return NULL_TREE;
3985
3986 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3987 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3988 Then we only have to consider the simpler non-inverted cases. */
3989 if (invert)
3990 t = or_var_with_comparison_1 (stmt,
3991 invert_tree_comparison (code2, false),
3992 op2a, op2b);
3993 else
3994 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3995 return canonicalize_bool (t, invert);
3996 }
3997
3998 /* Try to simplify the AND of the ssa variable defined by the assignment
3999 STMT with the comparison specified by (OP2A CODE2 OP2B).
4000 Return NULL_EXPR if we can't simplify this to a single expression. */
4001
4002 static tree
4003 and_var_with_comparison_1 (gimple stmt,
4004 enum tree_code code2, tree op2a, tree op2b)
4005 {
4006 tree var = gimple_assign_lhs (stmt);
4007 tree true_test_var = NULL_TREE;
4008 tree false_test_var = NULL_TREE;
4009 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4010
4011 /* Check for identities like (var AND (var == 0)) => false. */
4012 if (TREE_CODE (op2a) == SSA_NAME
4013 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4014 {
4015 if ((code2 == NE_EXPR && integer_zerop (op2b))
4016 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4017 {
4018 true_test_var = op2a;
4019 if (var == true_test_var)
4020 return var;
4021 }
4022 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4023 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4024 {
4025 false_test_var = op2a;
4026 if (var == false_test_var)
4027 return boolean_false_node;
4028 }
4029 }
4030
4031 /* If the definition is a comparison, recurse on it. */
4032 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4033 {
4034 tree t = and_comparisons_1 (innercode,
4035 gimple_assign_rhs1 (stmt),
4036 gimple_assign_rhs2 (stmt),
4037 code2,
4038 op2a,
4039 op2b);
4040 if (t)
4041 return t;
4042 }
4043
4044 /* If the definition is an AND or OR expression, we may be able to
4045 simplify by reassociating. */
4046 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4047 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4048 {
4049 tree inner1 = gimple_assign_rhs1 (stmt);
4050 tree inner2 = gimple_assign_rhs2 (stmt);
4051 gimple s;
4052 tree t;
4053 tree partial = NULL_TREE;
4054 bool is_and = (innercode == BIT_AND_EXPR);
4055
4056 /* Check for boolean identities that don't require recursive examination
4057 of inner1/inner2:
4058 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4059 inner1 AND (inner1 OR inner2) => inner1
4060 !inner1 AND (inner1 AND inner2) => false
4061 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4062 Likewise for similar cases involving inner2. */
4063 if (inner1 == true_test_var)
4064 return (is_and ? var : inner1);
4065 else if (inner2 == true_test_var)
4066 return (is_and ? var : inner2);
4067 else if (inner1 == false_test_var)
4068 return (is_and
4069 ? boolean_false_node
4070 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4071 else if (inner2 == false_test_var)
4072 return (is_and
4073 ? boolean_false_node
4074 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4075
4076 /* Next, redistribute/reassociate the AND across the inner tests.
4077 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4078 if (TREE_CODE (inner1) == SSA_NAME
4079 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4080 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4081 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4082 gimple_assign_rhs1 (s),
4083 gimple_assign_rhs2 (s),
4084 code2, op2a, op2b)))
4085 {
4086 /* Handle the AND case, where we are reassociating:
4087 (inner1 AND inner2) AND (op2a code2 op2b)
4088 => (t AND inner2)
4089 If the partial result t is a constant, we win. Otherwise
4090 continue on to try reassociating with the other inner test. */
4091 if (is_and)
4092 {
4093 if (integer_onep (t))
4094 return inner2;
4095 else if (integer_zerop (t))
4096 return boolean_false_node;
4097 }
4098
4099 /* Handle the OR case, where we are redistributing:
4100 (inner1 OR inner2) AND (op2a code2 op2b)
4101 => (t OR (inner2 AND (op2a code2 op2b))) */
4102 else if (integer_onep (t))
4103 return boolean_true_node;
4104
4105 /* Save partial result for later. */
4106 partial = t;
4107 }
4108
4109 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4110 if (TREE_CODE (inner2) == SSA_NAME
4111 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4112 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4113 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4114 gimple_assign_rhs1 (s),
4115 gimple_assign_rhs2 (s),
4116 code2, op2a, op2b)))
4117 {
4118 /* Handle the AND case, where we are reassociating:
4119 (inner1 AND inner2) AND (op2a code2 op2b)
4120 => (inner1 AND t) */
4121 if (is_and)
4122 {
4123 if (integer_onep (t))
4124 return inner1;
4125 else if (integer_zerop (t))
4126 return boolean_false_node;
4127 /* If both are the same, we can apply the identity
4128 (x AND x) == x. */
4129 else if (partial && same_bool_result_p (t, partial))
4130 return t;
4131 }
4132
4133 /* Handle the OR case. where we are redistributing:
4134 (inner1 OR inner2) AND (op2a code2 op2b)
4135 => (t OR (inner1 AND (op2a code2 op2b)))
4136 => (t OR partial) */
4137 else
4138 {
4139 if (integer_onep (t))
4140 return boolean_true_node;
4141 else if (partial)
4142 {
4143 /* We already got a simplification for the other
4144 operand to the redistributed OR expression. The
4145 interesting case is when at least one is false.
4146 Or, if both are the same, we can apply the identity
4147 (x OR x) == x. */
4148 if (integer_zerop (partial))
4149 return t;
4150 else if (integer_zerop (t))
4151 return partial;
4152 else if (same_bool_result_p (t, partial))
4153 return t;
4154 }
4155 }
4156 }
4157 }
4158 return NULL_TREE;
4159 }
4160
4161 /* Try to simplify the AND of two comparisons defined by
4162 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4163 If this can be done without constructing an intermediate value,
4164 return the resulting tree; otherwise NULL_TREE is returned.
4165 This function is deliberately asymmetric as it recurses on SSA_DEFs
4166 in the first comparison but not the second. */
4167
4168 static tree
4169 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4170 enum tree_code code2, tree op2a, tree op2b)
4171 {
4172 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4173
4174 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4175 if (operand_equal_p (op1a, op2a, 0)
4176 && operand_equal_p (op1b, op2b, 0))
4177 {
4178 /* Result will be either NULL_TREE, or a combined comparison. */
4179 tree t = combine_comparisons (UNKNOWN_LOCATION,
4180 TRUTH_ANDIF_EXPR, code1, code2,
4181 truth_type, op1a, op1b);
4182 if (t)
4183 return t;
4184 }
4185
4186 /* Likewise the swapped case of the above. */
4187 if (operand_equal_p (op1a, op2b, 0)
4188 && operand_equal_p (op1b, op2a, 0))
4189 {
4190 /* Result will be either NULL_TREE, or a combined comparison. */
4191 tree t = combine_comparisons (UNKNOWN_LOCATION,
4192 TRUTH_ANDIF_EXPR, code1,
4193 swap_tree_comparison (code2),
4194 truth_type, op1a, op1b);
4195 if (t)
4196 return t;
4197 }
4198
4199 /* If both comparisons are of the same value against constants, we might
4200 be able to merge them. */
4201 if (operand_equal_p (op1a, op2a, 0)
4202 && TREE_CODE (op1b) == INTEGER_CST
4203 && TREE_CODE (op2b) == INTEGER_CST)
4204 {
4205 int cmp = tree_int_cst_compare (op1b, op2b);
4206
4207 /* If we have (op1a == op1b), we should either be able to
4208 return that or FALSE, depending on whether the constant op1b
4209 also satisfies the other comparison against op2b. */
4210 if (code1 == EQ_EXPR)
4211 {
4212 bool done = true;
4213 bool val;
4214 switch (code2)
4215 {
4216 case EQ_EXPR: val = (cmp == 0); break;
4217 case NE_EXPR: val = (cmp != 0); break;
4218 case LT_EXPR: val = (cmp < 0); break;
4219 case GT_EXPR: val = (cmp > 0); break;
4220 case LE_EXPR: val = (cmp <= 0); break;
4221 case GE_EXPR: val = (cmp >= 0); break;
4222 default: done = false;
4223 }
4224 if (done)
4225 {
4226 if (val)
4227 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4228 else
4229 return boolean_false_node;
4230 }
4231 }
4232 /* Likewise if the second comparison is an == comparison. */
4233 else if (code2 == EQ_EXPR)
4234 {
4235 bool done = true;
4236 bool val;
4237 switch (code1)
4238 {
4239 case EQ_EXPR: val = (cmp == 0); break;
4240 case NE_EXPR: val = (cmp != 0); break;
4241 case LT_EXPR: val = (cmp > 0); break;
4242 case GT_EXPR: val = (cmp < 0); break;
4243 case LE_EXPR: val = (cmp >= 0); break;
4244 case GE_EXPR: val = (cmp <= 0); break;
4245 default: done = false;
4246 }
4247 if (done)
4248 {
4249 if (val)
4250 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4251 else
4252 return boolean_false_node;
4253 }
4254 }
4255
4256 /* Same business with inequality tests. */
4257 else if (code1 == NE_EXPR)
4258 {
4259 bool val;
4260 switch (code2)
4261 {
4262 case EQ_EXPR: val = (cmp != 0); break;
4263 case NE_EXPR: val = (cmp == 0); break;
4264 case LT_EXPR: val = (cmp >= 0); break;
4265 case GT_EXPR: val = (cmp <= 0); break;
4266 case LE_EXPR: val = (cmp > 0); break;
4267 case GE_EXPR: val = (cmp < 0); break;
4268 default:
4269 val = false;
4270 }
4271 if (val)
4272 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4273 }
4274 else if (code2 == NE_EXPR)
4275 {
4276 bool val;
4277 switch (code1)
4278 {
4279 case EQ_EXPR: val = (cmp == 0); break;
4280 case NE_EXPR: val = (cmp != 0); break;
4281 case LT_EXPR: val = (cmp <= 0); break;
4282 case GT_EXPR: val = (cmp >= 0); break;
4283 case LE_EXPR: val = (cmp < 0); break;
4284 case GE_EXPR: val = (cmp > 0); break;
4285 default:
4286 val = false;
4287 }
4288 if (val)
4289 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4290 }
4291
4292 /* Chose the more restrictive of two < or <= comparisons. */
4293 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4294 && (code2 == LT_EXPR || code2 == LE_EXPR))
4295 {
4296 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4297 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4298 else
4299 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4300 }
4301
4302 /* Likewise chose the more restrictive of two > or >= comparisons. */
4303 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4304 && (code2 == GT_EXPR || code2 == GE_EXPR))
4305 {
4306 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4307 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4308 else
4309 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4310 }
4311
4312 /* Check for singleton ranges. */
4313 else if (cmp == 0
4314 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4315 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4316 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4317
4318 /* Check for disjoint ranges. */
4319 else if (cmp <= 0
4320 && (code1 == LT_EXPR || code1 == LE_EXPR)
4321 && (code2 == GT_EXPR || code2 == GE_EXPR))
4322 return boolean_false_node;
4323 else if (cmp >= 0
4324 && (code1 == GT_EXPR || code1 == GE_EXPR)
4325 && (code2 == LT_EXPR || code2 == LE_EXPR))
4326 return boolean_false_node;
4327 }
4328
4329 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4330 NAME's definition is a truth value. See if there are any simplifications
4331 that can be done against the NAME's definition. */
4332 if (TREE_CODE (op1a) == SSA_NAME
4333 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4334 && (integer_zerop (op1b) || integer_onep (op1b)))
4335 {
4336 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4337 || (code1 == NE_EXPR && integer_onep (op1b)));
4338 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4339 switch (gimple_code (stmt))
4340 {
4341 case GIMPLE_ASSIGN:
4342 /* Try to simplify by copy-propagating the definition. */
4343 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4344
4345 case GIMPLE_PHI:
4346 /* If every argument to the PHI produces the same result when
4347 ANDed with the second comparison, we win.
4348 Do not do this unless the type is bool since we need a bool
4349 result here anyway. */
4350 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4351 {
4352 tree result = NULL_TREE;
4353 unsigned i;
4354 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4355 {
4356 tree arg = gimple_phi_arg_def (stmt, i);
4357
4358 /* If this PHI has itself as an argument, ignore it.
4359 If all the other args produce the same result,
4360 we're still OK. */
4361 if (arg == gimple_phi_result (stmt))
4362 continue;
4363 else if (TREE_CODE (arg) == INTEGER_CST)
4364 {
4365 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4366 {
4367 if (!result)
4368 result = boolean_false_node;
4369 else if (!integer_zerop (result))
4370 return NULL_TREE;
4371 }
4372 else if (!result)
4373 result = fold_build2 (code2, boolean_type_node,
4374 op2a, op2b);
4375 else if (!same_bool_comparison_p (result,
4376 code2, op2a, op2b))
4377 return NULL_TREE;
4378 }
4379 else if (TREE_CODE (arg) == SSA_NAME
4380 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4381 {
4382 tree temp;
4383 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4384 /* In simple cases we can look through PHI nodes,
4385 but we have to be careful with loops.
4386 See PR49073. */
4387 if (! dom_info_available_p (CDI_DOMINATORS)
4388 || gimple_bb (def_stmt) == gimple_bb (stmt)
4389 || dominated_by_p (CDI_DOMINATORS,
4390 gimple_bb (def_stmt),
4391 gimple_bb (stmt)))
4392 return NULL_TREE;
4393 temp = and_var_with_comparison (arg, invert, code2,
4394 op2a, op2b);
4395 if (!temp)
4396 return NULL_TREE;
4397 else if (!result)
4398 result = temp;
4399 else if (!same_bool_result_p (result, temp))
4400 return NULL_TREE;
4401 }
4402 else
4403 return NULL_TREE;
4404 }
4405 return result;
4406 }
4407
4408 default:
4409 break;
4410 }
4411 }
4412 return NULL_TREE;
4413 }
4414
4415 /* Try to simplify the AND of two comparisons, specified by
4416 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4417 If this can be simplified to a single expression (without requiring
4418 introducing more SSA variables to hold intermediate values),
4419 return the resulting tree. Otherwise return NULL_TREE.
4420 If the result expression is non-null, it has boolean type. */
4421
4422 tree
4423 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4424 enum tree_code code2, tree op2a, tree op2b)
4425 {
4426 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4427 if (t)
4428 return t;
4429 else
4430 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4431 }
4432
4433 /* Helper function for or_comparisons_1: try to simplify the OR of the
4434 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4435 If INVERT is true, invert the value of VAR before doing the OR.
4436 Return NULL_EXPR if we can't simplify this to a single expression. */
4437
4438 static tree
4439 or_var_with_comparison (tree var, bool invert,
4440 enum tree_code code2, tree op2a, tree op2b)
4441 {
4442 tree t;
4443 gimple stmt = SSA_NAME_DEF_STMT (var);
4444
4445 /* We can only deal with variables whose definitions are assignments. */
4446 if (!is_gimple_assign (stmt))
4447 return NULL_TREE;
4448
4449 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4450 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4451 Then we only have to consider the simpler non-inverted cases. */
4452 if (invert)
4453 t = and_var_with_comparison_1 (stmt,
4454 invert_tree_comparison (code2, false),
4455 op2a, op2b);
4456 else
4457 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4458 return canonicalize_bool (t, invert);
4459 }
4460
4461 /* Try to simplify the OR of the ssa variable defined by the assignment
4462 STMT with the comparison specified by (OP2A CODE2 OP2B).
4463 Return NULL_EXPR if we can't simplify this to a single expression. */
4464
4465 static tree
4466 or_var_with_comparison_1 (gimple stmt,
4467 enum tree_code code2, tree op2a, tree op2b)
4468 {
4469 tree var = gimple_assign_lhs (stmt);
4470 tree true_test_var = NULL_TREE;
4471 tree false_test_var = NULL_TREE;
4472 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4473
4474 /* Check for identities like (var OR (var != 0)) => true . */
4475 if (TREE_CODE (op2a) == SSA_NAME
4476 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4477 {
4478 if ((code2 == NE_EXPR && integer_zerop (op2b))
4479 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4480 {
4481 true_test_var = op2a;
4482 if (var == true_test_var)
4483 return var;
4484 }
4485 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4486 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4487 {
4488 false_test_var = op2a;
4489 if (var == false_test_var)
4490 return boolean_true_node;
4491 }
4492 }
4493
4494 /* If the definition is a comparison, recurse on it. */
4495 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4496 {
4497 tree t = or_comparisons_1 (innercode,
4498 gimple_assign_rhs1 (stmt),
4499 gimple_assign_rhs2 (stmt),
4500 code2,
4501 op2a,
4502 op2b);
4503 if (t)
4504 return t;
4505 }
4506
4507 /* If the definition is an AND or OR expression, we may be able to
4508 simplify by reassociating. */
4509 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4510 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4511 {
4512 tree inner1 = gimple_assign_rhs1 (stmt);
4513 tree inner2 = gimple_assign_rhs2 (stmt);
4514 gimple s;
4515 tree t;
4516 tree partial = NULL_TREE;
4517 bool is_or = (innercode == BIT_IOR_EXPR);
4518
4519 /* Check for boolean identities that don't require recursive examination
4520 of inner1/inner2:
4521 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4522 inner1 OR (inner1 AND inner2) => inner1
4523 !inner1 OR (inner1 OR inner2) => true
4524 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4525 */
4526 if (inner1 == true_test_var)
4527 return (is_or ? var : inner1);
4528 else if (inner2 == true_test_var)
4529 return (is_or ? var : inner2);
4530 else if (inner1 == false_test_var)
4531 return (is_or
4532 ? boolean_true_node
4533 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4534 else if (inner2 == false_test_var)
4535 return (is_or
4536 ? boolean_true_node
4537 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4538
4539 /* Next, redistribute/reassociate the OR across the inner tests.
4540 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4541 if (TREE_CODE (inner1) == SSA_NAME
4542 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4543 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4544 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4545 gimple_assign_rhs1 (s),
4546 gimple_assign_rhs2 (s),
4547 code2, op2a, op2b)))
4548 {
4549 /* Handle the OR case, where we are reassociating:
4550 (inner1 OR inner2) OR (op2a code2 op2b)
4551 => (t OR inner2)
4552 If the partial result t is a constant, we win. Otherwise
4553 continue on to try reassociating with the other inner test. */
4554 if (is_or)
4555 {
4556 if (integer_onep (t))
4557 return boolean_true_node;
4558 else if (integer_zerop (t))
4559 return inner2;
4560 }
4561
4562 /* Handle the AND case, where we are redistributing:
4563 (inner1 AND inner2) OR (op2a code2 op2b)
4564 => (t AND (inner2 OR (op2a code op2b))) */
4565 else if (integer_zerop (t))
4566 return boolean_false_node;
4567
4568 /* Save partial result for later. */
4569 partial = t;
4570 }
4571
4572 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4573 if (TREE_CODE (inner2) == SSA_NAME
4574 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4575 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4576 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4577 gimple_assign_rhs1 (s),
4578 gimple_assign_rhs2 (s),
4579 code2, op2a, op2b)))
4580 {
4581 /* Handle the OR case, where we are reassociating:
4582 (inner1 OR inner2) OR (op2a code2 op2b)
4583 => (inner1 OR t)
4584 => (t OR partial) */
4585 if (is_or)
4586 {
4587 if (integer_zerop (t))
4588 return inner1;
4589 else if (integer_onep (t))
4590 return boolean_true_node;
4591 /* If both are the same, we can apply the identity
4592 (x OR x) == x. */
4593 else if (partial && same_bool_result_p (t, partial))
4594 return t;
4595 }
4596
4597 /* Handle the AND case, where we are redistributing:
4598 (inner1 AND inner2) OR (op2a code2 op2b)
4599 => (t AND (inner1 OR (op2a code2 op2b)))
4600 => (t AND partial) */
4601 else
4602 {
4603 if (integer_zerop (t))
4604 return boolean_false_node;
4605 else if (partial)
4606 {
4607 /* We already got a simplification for the other
4608 operand to the redistributed AND expression. The
4609 interesting case is when at least one is true.
4610 Or, if both are the same, we can apply the identity
4611 (x AND x) == x. */
4612 if (integer_onep (partial))
4613 return t;
4614 else if (integer_onep (t))
4615 return partial;
4616 else if (same_bool_result_p (t, partial))
4617 return t;
4618 }
4619 }
4620 }
4621 }
4622 return NULL_TREE;
4623 }
4624
4625 /* Try to simplify the OR of two comparisons defined by
4626 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4627 If this can be done without constructing an intermediate value,
4628 return the resulting tree; otherwise NULL_TREE is returned.
4629 This function is deliberately asymmetric as it recurses on SSA_DEFs
4630 in the first comparison but not the second. */
4631
4632 static tree
4633 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4634 enum tree_code code2, tree op2a, tree op2b)
4635 {
4636 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4637
4638 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4639 if (operand_equal_p (op1a, op2a, 0)
4640 && operand_equal_p (op1b, op2b, 0))
4641 {
4642 /* Result will be either NULL_TREE, or a combined comparison. */
4643 tree t = combine_comparisons (UNKNOWN_LOCATION,
4644 TRUTH_ORIF_EXPR, code1, code2,
4645 truth_type, op1a, op1b);
4646 if (t)
4647 return t;
4648 }
4649
4650 /* Likewise the swapped case of the above. */
4651 if (operand_equal_p (op1a, op2b, 0)
4652 && operand_equal_p (op1b, op2a, 0))
4653 {
4654 /* Result will be either NULL_TREE, or a combined comparison. */
4655 tree t = combine_comparisons (UNKNOWN_LOCATION,
4656 TRUTH_ORIF_EXPR, code1,
4657 swap_tree_comparison (code2),
4658 truth_type, op1a, op1b);
4659 if (t)
4660 return t;
4661 }
4662
4663 /* If both comparisons are of the same value against constants, we might
4664 be able to merge them. */
4665 if (operand_equal_p (op1a, op2a, 0)
4666 && TREE_CODE (op1b) == INTEGER_CST
4667 && TREE_CODE (op2b) == INTEGER_CST)
4668 {
4669 int cmp = tree_int_cst_compare (op1b, op2b);
4670
4671 /* If we have (op1a != op1b), we should either be able to
4672 return that or TRUE, depending on whether the constant op1b
4673 also satisfies the other comparison against op2b. */
4674 if (code1 == NE_EXPR)
4675 {
4676 bool done = true;
4677 bool val;
4678 switch (code2)
4679 {
4680 case EQ_EXPR: val = (cmp == 0); break;
4681 case NE_EXPR: val = (cmp != 0); break;
4682 case LT_EXPR: val = (cmp < 0); break;
4683 case GT_EXPR: val = (cmp > 0); break;
4684 case LE_EXPR: val = (cmp <= 0); break;
4685 case GE_EXPR: val = (cmp >= 0); break;
4686 default: done = false;
4687 }
4688 if (done)
4689 {
4690 if (val)
4691 return boolean_true_node;
4692 else
4693 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4694 }
4695 }
4696 /* Likewise if the second comparison is a != comparison. */
4697 else if (code2 == NE_EXPR)
4698 {
4699 bool done = true;
4700 bool val;
4701 switch (code1)
4702 {
4703 case EQ_EXPR: val = (cmp == 0); break;
4704 case NE_EXPR: val = (cmp != 0); break;
4705 case LT_EXPR: val = (cmp > 0); break;
4706 case GT_EXPR: val = (cmp < 0); break;
4707 case LE_EXPR: val = (cmp >= 0); break;
4708 case GE_EXPR: val = (cmp <= 0); break;
4709 default: done = false;
4710 }
4711 if (done)
4712 {
4713 if (val)
4714 return boolean_true_node;
4715 else
4716 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4717 }
4718 }
4719
4720 /* See if an equality test is redundant with the other comparison. */
4721 else if (code1 == EQ_EXPR)
4722 {
4723 bool val;
4724 switch (code2)
4725 {
4726 case EQ_EXPR: val = (cmp == 0); break;
4727 case NE_EXPR: val = (cmp != 0); break;
4728 case LT_EXPR: val = (cmp < 0); break;
4729 case GT_EXPR: val = (cmp > 0); break;
4730 case LE_EXPR: val = (cmp <= 0); break;
4731 case GE_EXPR: val = (cmp >= 0); break;
4732 default:
4733 val = false;
4734 }
4735 if (val)
4736 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4737 }
4738 else if (code2 == EQ_EXPR)
4739 {
4740 bool val;
4741 switch (code1)
4742 {
4743 case EQ_EXPR: val = (cmp == 0); break;
4744 case NE_EXPR: val = (cmp != 0); break;
4745 case LT_EXPR: val = (cmp > 0); break;
4746 case GT_EXPR: val = (cmp < 0); break;
4747 case LE_EXPR: val = (cmp >= 0); break;
4748 case GE_EXPR: val = (cmp <= 0); break;
4749 default:
4750 val = false;
4751 }
4752 if (val)
4753 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4754 }
4755
4756 /* Chose the less restrictive of two < or <= comparisons. */
4757 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4758 && (code2 == LT_EXPR || code2 == LE_EXPR))
4759 {
4760 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4761 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4762 else
4763 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4764 }
4765
4766 /* Likewise chose the less restrictive of two > or >= comparisons. */
4767 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4768 && (code2 == GT_EXPR || code2 == GE_EXPR))
4769 {
4770 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4771 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4772 else
4773 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4774 }
4775
4776 /* Check for singleton ranges. */
4777 else if (cmp == 0
4778 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4779 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4780 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4781
4782 /* Check for less/greater pairs that don't restrict the range at all. */
4783 else if (cmp >= 0
4784 && (code1 == LT_EXPR || code1 == LE_EXPR)
4785 && (code2 == GT_EXPR || code2 == GE_EXPR))
4786 return boolean_true_node;
4787 else if (cmp <= 0
4788 && (code1 == GT_EXPR || code1 == GE_EXPR)
4789 && (code2 == LT_EXPR || code2 == LE_EXPR))
4790 return boolean_true_node;
4791 }
4792
4793 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4794 NAME's definition is a truth value. See if there are any simplifications
4795 that can be done against the NAME's definition. */
4796 if (TREE_CODE (op1a) == SSA_NAME
4797 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4798 && (integer_zerop (op1b) || integer_onep (op1b)))
4799 {
4800 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4801 || (code1 == NE_EXPR && integer_onep (op1b)));
4802 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4803 switch (gimple_code (stmt))
4804 {
4805 case GIMPLE_ASSIGN:
4806 /* Try to simplify by copy-propagating the definition. */
4807 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4808
4809 case GIMPLE_PHI:
4810 /* If every argument to the PHI produces the same result when
4811 ORed with the second comparison, we win.
4812 Do not do this unless the type is bool since we need a bool
4813 result here anyway. */
4814 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4815 {
4816 tree result = NULL_TREE;
4817 unsigned i;
4818 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4819 {
4820 tree arg = gimple_phi_arg_def (stmt, i);
4821
4822 /* If this PHI has itself as an argument, ignore it.
4823 If all the other args produce the same result,
4824 we're still OK. */
4825 if (arg == gimple_phi_result (stmt))
4826 continue;
4827 else if (TREE_CODE (arg) == INTEGER_CST)
4828 {
4829 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4830 {
4831 if (!result)
4832 result = boolean_true_node;
4833 else if (!integer_onep (result))
4834 return NULL_TREE;
4835 }
4836 else if (!result)
4837 result = fold_build2 (code2, boolean_type_node,
4838 op2a, op2b);
4839 else if (!same_bool_comparison_p (result,
4840 code2, op2a, op2b))
4841 return NULL_TREE;
4842 }
4843 else if (TREE_CODE (arg) == SSA_NAME
4844 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4845 {
4846 tree temp;
4847 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4848 /* In simple cases we can look through PHI nodes,
4849 but we have to be careful with loops.
4850 See PR49073. */
4851 if (! dom_info_available_p (CDI_DOMINATORS)
4852 || gimple_bb (def_stmt) == gimple_bb (stmt)
4853 || dominated_by_p (CDI_DOMINATORS,
4854 gimple_bb (def_stmt),
4855 gimple_bb (stmt)))
4856 return NULL_TREE;
4857 temp = or_var_with_comparison (arg, invert, code2,
4858 op2a, op2b);
4859 if (!temp)
4860 return NULL_TREE;
4861 else if (!result)
4862 result = temp;
4863 else if (!same_bool_result_p (result, temp))
4864 return NULL_TREE;
4865 }
4866 else
4867 return NULL_TREE;
4868 }
4869 return result;
4870 }
4871
4872 default:
4873 break;
4874 }
4875 }
4876 return NULL_TREE;
4877 }
4878
4879 /* Try to simplify the OR of two comparisons, specified by
4880 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4881 If this can be simplified to a single expression (without requiring
4882 introducing more SSA variables to hold intermediate values),
4883 return the resulting tree. Otherwise return NULL_TREE.
4884 If the result expression is non-null, it has boolean type. */
4885
4886 tree
4887 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4888 enum tree_code code2, tree op2a, tree op2b)
4889 {
4890 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4891 if (t)
4892 return t;
4893 else
4894 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4895 }
4896
4897
4898 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4899
4900 Either NULL_TREE, a simplified but non-constant or a constant
4901 is returned.
4902
4903 ??? This should go into a gimple-fold-inline.h file to be eventually
4904 privatized with the single valueize function used in the various TUs
4905 to avoid the indirect function call overhead. */
4906
4907 tree
4908 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4909 tree (*gvalueize) (tree))
4910 {
4911 code_helper rcode;
4912 tree ops[3] = {};
4913 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4914 edges if there are intermediate VARYING defs. For this reason
4915 do not follow SSA edges here even though SCCVN can technically
4916 just deal fine with that. */
4917 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)
4918 && rcode.is_tree_code ()
4919 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4920 || ((tree_code) rcode) == ADDR_EXPR)
4921 && is_gimple_val (ops[0]))
4922 {
4923 tree res = ops[0];
4924 if (dump_file && dump_flags & TDF_DETAILS)
4925 {
4926 fprintf (dump_file, "Match-and-simplified ");
4927 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4928 fprintf (dump_file, " to ");
4929 print_generic_expr (dump_file, res, 0);
4930 fprintf (dump_file, "\n");
4931 }
4932 return res;
4933 }
4934
4935 location_t loc = gimple_location (stmt);
4936 switch (gimple_code (stmt))
4937 {
4938 case GIMPLE_ASSIGN:
4939 {
4940 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4941
4942 switch (get_gimple_rhs_class (subcode))
4943 {
4944 case GIMPLE_SINGLE_RHS:
4945 {
4946 tree rhs = gimple_assign_rhs1 (stmt);
4947 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4948
4949 if (TREE_CODE (rhs) == SSA_NAME)
4950 {
4951 /* If the RHS is an SSA_NAME, return its known constant value,
4952 if any. */
4953 return (*valueize) (rhs);
4954 }
4955 /* Handle propagating invariant addresses into address
4956 operations. */
4957 else if (TREE_CODE (rhs) == ADDR_EXPR
4958 && !is_gimple_min_invariant (rhs))
4959 {
4960 HOST_WIDE_INT offset = 0;
4961 tree base;
4962 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4963 &offset,
4964 valueize);
4965 if (base
4966 && (CONSTANT_CLASS_P (base)
4967 || decl_address_invariant_p (base)))
4968 return build_invariant_address (TREE_TYPE (rhs),
4969 base, offset);
4970 }
4971 else if (TREE_CODE (rhs) == CONSTRUCTOR
4972 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4973 && (CONSTRUCTOR_NELTS (rhs)
4974 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4975 {
4976 unsigned i;
4977 tree val, *vec;
4978
4979 vec = XALLOCAVEC (tree,
4980 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4981 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4982 {
4983 val = (*valueize) (val);
4984 if (TREE_CODE (val) == INTEGER_CST
4985 || TREE_CODE (val) == REAL_CST
4986 || TREE_CODE (val) == FIXED_CST)
4987 vec[i] = val;
4988 else
4989 return NULL_TREE;
4990 }
4991
4992 return build_vector (TREE_TYPE (rhs), vec);
4993 }
4994 if (subcode == OBJ_TYPE_REF)
4995 {
4996 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4997 /* If callee is constant, we can fold away the wrapper. */
4998 if (is_gimple_min_invariant (val))
4999 return val;
5000 }
5001
5002 if (kind == tcc_reference)
5003 {
5004 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5005 || TREE_CODE (rhs) == REALPART_EXPR
5006 || TREE_CODE (rhs) == IMAGPART_EXPR)
5007 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5008 {
5009 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5010 return fold_unary_loc (EXPR_LOCATION (rhs),
5011 TREE_CODE (rhs),
5012 TREE_TYPE (rhs), val);
5013 }
5014 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5015 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5016 {
5017 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5018 return fold_ternary_loc (EXPR_LOCATION (rhs),
5019 TREE_CODE (rhs),
5020 TREE_TYPE (rhs), val,
5021 TREE_OPERAND (rhs, 1),
5022 TREE_OPERAND (rhs, 2));
5023 }
5024 else if (TREE_CODE (rhs) == MEM_REF
5025 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5026 {
5027 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5028 if (TREE_CODE (val) == ADDR_EXPR
5029 && is_gimple_min_invariant (val))
5030 {
5031 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5032 unshare_expr (val),
5033 TREE_OPERAND (rhs, 1));
5034 if (tem)
5035 rhs = tem;
5036 }
5037 }
5038 return fold_const_aggregate_ref_1 (rhs, valueize);
5039 }
5040 else if (kind == tcc_declaration)
5041 return get_symbol_constant_value (rhs);
5042 return rhs;
5043 }
5044
5045 case GIMPLE_UNARY_RHS:
5046 return NULL_TREE;
5047
5048 case GIMPLE_BINARY_RHS:
5049 {
5050 /* Handle binary operators that can appear in GIMPLE form. */
5051 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5052 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5053
5054 /* Translate &x + CST into an invariant form suitable for
5055 further propagation. */
5056 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5057 && TREE_CODE (op0) == ADDR_EXPR
5058 && TREE_CODE (op1) == INTEGER_CST)
5059 {
5060 tree off = fold_convert (ptr_type_node, op1);
5061 return build_fold_addr_expr_loc
5062 (loc,
5063 fold_build2 (MEM_REF,
5064 TREE_TYPE (TREE_TYPE (op0)),
5065 unshare_expr (op0), off));
5066 }
5067
5068 return fold_binary_loc (loc, subcode,
5069 gimple_expr_type (stmt), op0, op1);
5070 }
5071
5072 case GIMPLE_TERNARY_RHS:
5073 {
5074 /* Handle ternary operators that can appear in GIMPLE form. */
5075 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5076 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5077 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5078
5079 /* Fold embedded expressions in ternary codes. */
5080 if ((subcode == COND_EXPR
5081 || subcode == VEC_COND_EXPR)
5082 && COMPARISON_CLASS_P (op0))
5083 {
5084 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5085 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5086 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5087 TREE_TYPE (op0), op00, op01);
5088 if (tem)
5089 op0 = tem;
5090 }
5091
5092 return fold_ternary_loc (loc, subcode,
5093 gimple_expr_type (stmt), op0, op1, op2);
5094 }
5095
5096 default:
5097 gcc_unreachable ();
5098 }
5099 }
5100
5101 case GIMPLE_CALL:
5102 {
5103 tree fn;
5104 gcall *call_stmt = as_a <gcall *> (stmt);
5105
5106 if (gimple_call_internal_p (stmt))
5107 {
5108 enum tree_code subcode = ERROR_MARK;
5109 switch (gimple_call_internal_fn (stmt))
5110 {
5111 case IFN_UBSAN_CHECK_ADD:
5112 subcode = PLUS_EXPR;
5113 break;
5114 case IFN_UBSAN_CHECK_SUB:
5115 subcode = MINUS_EXPR;
5116 break;
5117 case IFN_UBSAN_CHECK_MUL:
5118 subcode = MULT_EXPR;
5119 break;
5120 default:
5121 return NULL_TREE;
5122 }
5123 tree arg0 = gimple_call_arg (stmt, 0);
5124 tree arg1 = gimple_call_arg (stmt, 1);
5125 tree op0 = (*valueize) (arg0);
5126 tree op1 = (*valueize) (arg1);
5127
5128 if (TREE_CODE (op0) != INTEGER_CST
5129 || TREE_CODE (op1) != INTEGER_CST)
5130 {
5131 switch (subcode)
5132 {
5133 case MULT_EXPR:
5134 /* x * 0 = 0 * x = 0 without overflow. */
5135 if (integer_zerop (op0) || integer_zerop (op1))
5136 return build_zero_cst (TREE_TYPE (arg0));
5137 break;
5138 case MINUS_EXPR:
5139 /* y - y = 0 without overflow. */
5140 if (operand_equal_p (op0, op1, 0))
5141 return build_zero_cst (TREE_TYPE (arg0));
5142 break;
5143 default:
5144 break;
5145 }
5146 }
5147 tree res
5148 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5149 if (res
5150 && TREE_CODE (res) == INTEGER_CST
5151 && !TREE_OVERFLOW (res))
5152 return res;
5153 return NULL_TREE;
5154 }
5155
5156 fn = (*valueize) (gimple_call_fn (stmt));
5157 if (TREE_CODE (fn) == ADDR_EXPR
5158 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5159 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5160 && gimple_builtin_call_types_compatible_p (stmt,
5161 TREE_OPERAND (fn, 0)))
5162 {
5163 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5164 tree retval;
5165 unsigned i;
5166 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5167 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5168 retval = fold_builtin_call_array (loc,
5169 gimple_call_return_type (call_stmt),
5170 fn, gimple_call_num_args (stmt), args);
5171 if (retval)
5172 {
5173 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5174 STRIP_NOPS (retval);
5175 retval = fold_convert (gimple_call_return_type (call_stmt),
5176 retval);
5177 }
5178 return retval;
5179 }
5180 return NULL_TREE;
5181 }
5182
5183 default:
5184 return NULL_TREE;
5185 }
5186 }
5187
5188 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5189 Returns NULL_TREE if folding to a constant is not possible, otherwise
5190 returns a constant according to is_gimple_min_invariant. */
5191
5192 tree
5193 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5194 {
5195 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5196 if (res && is_gimple_min_invariant (res))
5197 return res;
5198 return NULL_TREE;
5199 }
5200
5201
5202 /* The following set of functions are supposed to fold references using
5203 their constant initializers. */
5204
5205 /* See if we can find constructor defining value of BASE.
5206 When we know the consructor with constant offset (such as
5207 base is array[40] and we do know constructor of array), then
5208 BIT_OFFSET is adjusted accordingly.
5209
5210 As a special case, return error_mark_node when constructor
5211 is not explicitly available, but it is known to be zero
5212 such as 'static const int a;'. */
5213 static tree
5214 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5215 tree (*valueize)(tree))
5216 {
5217 HOST_WIDE_INT bit_offset2, size, max_size;
5218 if (TREE_CODE (base) == MEM_REF)
5219 {
5220 if (!integer_zerop (TREE_OPERAND (base, 1)))
5221 {
5222 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5223 return NULL_TREE;
5224 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5225 * BITS_PER_UNIT);
5226 }
5227
5228 if (valueize
5229 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5230 base = valueize (TREE_OPERAND (base, 0));
5231 if (!base || TREE_CODE (base) != ADDR_EXPR)
5232 return NULL_TREE;
5233 base = TREE_OPERAND (base, 0);
5234 }
5235
5236 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5237 DECL_INITIAL. If BASE is a nested reference into another
5238 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5239 the inner reference. */
5240 switch (TREE_CODE (base))
5241 {
5242 case VAR_DECL:
5243 case CONST_DECL:
5244 {
5245 tree init = ctor_for_folding (base);
5246
5247 /* Our semantic is exact opposite of ctor_for_folding;
5248 NULL means unknown, while error_mark_node is 0. */
5249 if (init == error_mark_node)
5250 return NULL_TREE;
5251 if (!init)
5252 return error_mark_node;
5253 return init;
5254 }
5255
5256 case ARRAY_REF:
5257 case COMPONENT_REF:
5258 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5259 if (max_size == -1 || size != max_size)
5260 return NULL_TREE;
5261 *bit_offset += bit_offset2;
5262 return get_base_constructor (base, bit_offset, valueize);
5263
5264 case STRING_CST:
5265 case CONSTRUCTOR:
5266 return base;
5267
5268 default:
5269 return NULL_TREE;
5270 }
5271 }
5272
5273 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5274 SIZE to the memory at bit OFFSET. */
5275
5276 static tree
5277 fold_array_ctor_reference (tree type, tree ctor,
5278 unsigned HOST_WIDE_INT offset,
5279 unsigned HOST_WIDE_INT size,
5280 tree from_decl)
5281 {
5282 unsigned HOST_WIDE_INT cnt;
5283 tree cfield, cval;
5284 offset_int low_bound;
5285 offset_int elt_size;
5286 offset_int index, max_index;
5287 offset_int access_index;
5288 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5289 HOST_WIDE_INT inner_offset;
5290
5291 /* Compute low bound and elt size. */
5292 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5293 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5294 if (domain_type && TYPE_MIN_VALUE (domain_type))
5295 {
5296 /* Static constructors for variably sized objects makes no sense. */
5297 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5298 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5299 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5300 }
5301 else
5302 low_bound = 0;
5303 /* Static constructors for variably sized objects makes no sense. */
5304 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5305 == INTEGER_CST);
5306 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5307
5308 /* We can handle only constantly sized accesses that are known to not
5309 be larger than size of array element. */
5310 if (!TYPE_SIZE_UNIT (type)
5311 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5312 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5313 || elt_size == 0)
5314 return NULL_TREE;
5315
5316 /* Compute the array index we look for. */
5317 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5318 elt_size);
5319 access_index += low_bound;
5320 if (index_type)
5321 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5322 TYPE_SIGN (index_type));
5323
5324 /* And offset within the access. */
5325 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5326
5327 /* See if the array field is large enough to span whole access. We do not
5328 care to fold accesses spanning multiple array indexes. */
5329 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5330 return NULL_TREE;
5331
5332 index = low_bound - 1;
5333 if (index_type)
5334 index = wi::ext (index, TYPE_PRECISION (index_type),
5335 TYPE_SIGN (index_type));
5336
5337 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5338 {
5339 /* Array constructor might explicitely set index, or specify range
5340 or leave index NULL meaning that it is next index after previous
5341 one. */
5342 if (cfield)
5343 {
5344 if (TREE_CODE (cfield) == INTEGER_CST)
5345 max_index = index = wi::to_offset (cfield);
5346 else
5347 {
5348 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5349 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5350 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5351 }
5352 }
5353 else
5354 {
5355 index += 1;
5356 if (index_type)
5357 index = wi::ext (index, TYPE_PRECISION (index_type),
5358 TYPE_SIGN (index_type));
5359 max_index = index;
5360 }
5361
5362 /* Do we have match? */
5363 if (wi::cmpu (access_index, index) >= 0
5364 && wi::cmpu (access_index, max_index) <= 0)
5365 return fold_ctor_reference (type, cval, inner_offset, size,
5366 from_decl);
5367 }
5368 /* When memory is not explicitely mentioned in constructor,
5369 it is 0 (or out of range). */
5370 return build_zero_cst (type);
5371 }
5372
5373 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5374 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5375
5376 static tree
5377 fold_nonarray_ctor_reference (tree type, tree ctor,
5378 unsigned HOST_WIDE_INT offset,
5379 unsigned HOST_WIDE_INT size,
5380 tree from_decl)
5381 {
5382 unsigned HOST_WIDE_INT cnt;
5383 tree cfield, cval;
5384
5385 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5386 cval)
5387 {
5388 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5389 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5390 tree field_size = DECL_SIZE (cfield);
5391 offset_int bitoffset;
5392 offset_int bitoffset_end, access_end;
5393
5394 /* Variable sized objects in static constructors makes no sense,
5395 but field_size can be NULL for flexible array members. */
5396 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5397 && TREE_CODE (byte_offset) == INTEGER_CST
5398 && (field_size != NULL_TREE
5399 ? TREE_CODE (field_size) == INTEGER_CST
5400 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5401
5402 /* Compute bit offset of the field. */
5403 bitoffset = (wi::to_offset (field_offset)
5404 + wi::lshift (wi::to_offset (byte_offset),
5405 LOG2_BITS_PER_UNIT));
5406 /* Compute bit offset where the field ends. */
5407 if (field_size != NULL_TREE)
5408 bitoffset_end = bitoffset + wi::to_offset (field_size);
5409 else
5410 bitoffset_end = 0;
5411
5412 access_end = offset_int (offset) + size;
5413
5414 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5415 [BITOFFSET, BITOFFSET_END)? */
5416 if (wi::cmps (access_end, bitoffset) > 0
5417 && (field_size == NULL_TREE
5418 || wi::lts_p (offset, bitoffset_end)))
5419 {
5420 offset_int inner_offset = offset_int (offset) - bitoffset;
5421 /* We do have overlap. Now see if field is large enough to
5422 cover the access. Give up for accesses spanning multiple
5423 fields. */
5424 if (wi::cmps (access_end, bitoffset_end) > 0)
5425 return NULL_TREE;
5426 if (wi::lts_p (offset, bitoffset))
5427 return NULL_TREE;
5428 return fold_ctor_reference (type, cval,
5429 inner_offset.to_uhwi (), size,
5430 from_decl);
5431 }
5432 }
5433 /* When memory is not explicitely mentioned in constructor, it is 0. */
5434 return build_zero_cst (type);
5435 }
5436
5437 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5438 to the memory at bit OFFSET. */
5439
5440 tree
5441 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5442 unsigned HOST_WIDE_INT size, tree from_decl)
5443 {
5444 tree ret;
5445
5446 /* We found the field with exact match. */
5447 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5448 && !offset)
5449 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5450
5451 /* We are at the end of walk, see if we can view convert the
5452 result. */
5453 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5454 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5455 && !compare_tree_int (TYPE_SIZE (type), size)
5456 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5457 {
5458 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5459 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5460 if (ret)
5461 STRIP_USELESS_TYPE_CONVERSION (ret);
5462 return ret;
5463 }
5464 /* For constants and byte-aligned/sized reads try to go through
5465 native_encode/interpret. */
5466 if (CONSTANT_CLASS_P (ctor)
5467 && BITS_PER_UNIT == 8
5468 && offset % BITS_PER_UNIT == 0
5469 && size % BITS_PER_UNIT == 0
5470 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5471 {
5472 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5473 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5474 offset / BITS_PER_UNIT) > 0)
5475 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5476 }
5477 if (TREE_CODE (ctor) == CONSTRUCTOR)
5478 {
5479
5480 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5481 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5482 return fold_array_ctor_reference (type, ctor, offset, size,
5483 from_decl);
5484 else
5485 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5486 from_decl);
5487 }
5488
5489 return NULL_TREE;
5490 }
5491
5492 /* Return the tree representing the element referenced by T if T is an
5493 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5494 names using VALUEIZE. Return NULL_TREE otherwise. */
5495
5496 tree
5497 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5498 {
5499 tree ctor, idx, base;
5500 HOST_WIDE_INT offset, size, max_size;
5501 tree tem;
5502
5503 if (TREE_THIS_VOLATILE (t))
5504 return NULL_TREE;
5505
5506 if (DECL_P (t))
5507 return get_symbol_constant_value (t);
5508
5509 tem = fold_read_from_constant_string (t);
5510 if (tem)
5511 return tem;
5512
5513 switch (TREE_CODE (t))
5514 {
5515 case ARRAY_REF:
5516 case ARRAY_RANGE_REF:
5517 /* Constant indexes are handled well by get_base_constructor.
5518 Only special case variable offsets.
5519 FIXME: This code can't handle nested references with variable indexes
5520 (they will be handled only by iteration of ccp). Perhaps we can bring
5521 get_ref_base_and_extent here and make it use a valueize callback. */
5522 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5523 && valueize
5524 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5525 && TREE_CODE (idx) == INTEGER_CST)
5526 {
5527 tree low_bound, unit_size;
5528
5529 /* If the resulting bit-offset is constant, track it. */
5530 if ((low_bound = array_ref_low_bound (t),
5531 TREE_CODE (low_bound) == INTEGER_CST)
5532 && (unit_size = array_ref_element_size (t),
5533 tree_fits_uhwi_p (unit_size)))
5534 {
5535 offset_int woffset
5536 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5537 TYPE_PRECISION (TREE_TYPE (idx)));
5538
5539 if (wi::fits_shwi_p (woffset))
5540 {
5541 offset = woffset.to_shwi ();
5542 /* TODO: This code seems wrong, multiply then check
5543 to see if it fits. */
5544 offset *= tree_to_uhwi (unit_size);
5545 offset *= BITS_PER_UNIT;
5546
5547 base = TREE_OPERAND (t, 0);
5548 ctor = get_base_constructor (base, &offset, valueize);
5549 /* Empty constructor. Always fold to 0. */
5550 if (ctor == error_mark_node)
5551 return build_zero_cst (TREE_TYPE (t));
5552 /* Out of bound array access. Value is undefined,
5553 but don't fold. */
5554 if (offset < 0)
5555 return NULL_TREE;
5556 /* We can not determine ctor. */
5557 if (!ctor)
5558 return NULL_TREE;
5559 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5560 tree_to_uhwi (unit_size)
5561 * BITS_PER_UNIT,
5562 base);
5563 }
5564 }
5565 }
5566 /* Fallthru. */
5567
5568 case COMPONENT_REF:
5569 case BIT_FIELD_REF:
5570 case TARGET_MEM_REF:
5571 case MEM_REF:
5572 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5573 ctor = get_base_constructor (base, &offset, valueize);
5574
5575 /* Empty constructor. Always fold to 0. */
5576 if (ctor == error_mark_node)
5577 return build_zero_cst (TREE_TYPE (t));
5578 /* We do not know precise address. */
5579 if (max_size == -1 || max_size != size)
5580 return NULL_TREE;
5581 /* We can not determine ctor. */
5582 if (!ctor)
5583 return NULL_TREE;
5584
5585 /* Out of bound array access. Value is undefined, but don't fold. */
5586 if (offset < 0)
5587 return NULL_TREE;
5588
5589 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5590 base);
5591
5592 case REALPART_EXPR:
5593 case IMAGPART_EXPR:
5594 {
5595 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5596 if (c && TREE_CODE (c) == COMPLEX_CST)
5597 return fold_build1_loc (EXPR_LOCATION (t),
5598 TREE_CODE (t), TREE_TYPE (t), c);
5599 break;
5600 }
5601
5602 default:
5603 break;
5604 }
5605
5606 return NULL_TREE;
5607 }
5608
5609 tree
5610 fold_const_aggregate_ref (tree t)
5611 {
5612 return fold_const_aggregate_ref_1 (t, NULL);
5613 }
5614
5615 /* Lookup virtual method with index TOKEN in a virtual table V
5616 at OFFSET.
5617 Set CAN_REFER if non-NULL to false if method
5618 is not referable or if the virtual table is ill-formed (such as rewriten
5619 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5620
5621 tree
5622 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5623 tree v,
5624 unsigned HOST_WIDE_INT offset,
5625 bool *can_refer)
5626 {
5627 tree vtable = v, init, fn;
5628 unsigned HOST_WIDE_INT size;
5629 unsigned HOST_WIDE_INT elt_size, access_index;
5630 tree domain_type;
5631
5632 if (can_refer)
5633 *can_refer = true;
5634
5635 /* First of all double check we have virtual table. */
5636 if (TREE_CODE (v) != VAR_DECL
5637 || !DECL_VIRTUAL_P (v))
5638 {
5639 /* Pass down that we lost track of the target. */
5640 if (can_refer)
5641 *can_refer = false;
5642 return NULL_TREE;
5643 }
5644
5645 init = ctor_for_folding (v);
5646
5647 /* The virtual tables should always be born with constructors
5648 and we always should assume that they are avaialble for
5649 folding. At the moment we do not stream them in all cases,
5650 but it should never happen that ctor seem unreachable. */
5651 gcc_assert (init);
5652 if (init == error_mark_node)
5653 {
5654 gcc_assert (in_lto_p);
5655 /* Pass down that we lost track of the target. */
5656 if (can_refer)
5657 *can_refer = false;
5658 return NULL_TREE;
5659 }
5660 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5661 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5662 offset *= BITS_PER_UNIT;
5663 offset += token * size;
5664
5665 /* Lookup the value in the constructor that is assumed to be array.
5666 This is equivalent to
5667 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5668 offset, size, NULL);
5669 but in a constant time. We expect that frontend produced a simple
5670 array without indexed initializers. */
5671
5672 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5673 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5674 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5675 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5676
5677 access_index = offset / BITS_PER_UNIT / elt_size;
5678 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5679
5680 /* This code makes an assumption that there are no
5681 indexed fileds produced by C++ FE, so we can directly index the array. */
5682 if (access_index < CONSTRUCTOR_NELTS (init))
5683 {
5684 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5685 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5686 STRIP_NOPS (fn);
5687 }
5688 else
5689 fn = NULL;
5690
5691 /* For type inconsistent program we may end up looking up virtual method
5692 in virtual table that does not contain TOKEN entries. We may overrun
5693 the virtual table and pick up a constant or RTTI info pointer.
5694 In any case the call is undefined. */
5695 if (!fn
5696 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5697 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5698 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5699 else
5700 {
5701 fn = TREE_OPERAND (fn, 0);
5702
5703 /* When cgraph node is missing and function is not public, we cannot
5704 devirtualize. This can happen in WHOPR when the actual method
5705 ends up in other partition, because we found devirtualization
5706 possibility too late. */
5707 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5708 {
5709 if (can_refer)
5710 {
5711 *can_refer = false;
5712 return fn;
5713 }
5714 return NULL_TREE;
5715 }
5716 }
5717
5718 /* Make sure we create a cgraph node for functions we'll reference.
5719 They can be non-existent if the reference comes from an entry
5720 of an external vtable for example. */
5721 cgraph_node::get_create (fn);
5722
5723 return fn;
5724 }
5725
5726 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5727 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5728 KNOWN_BINFO carries the binfo describing the true type of
5729 OBJ_TYPE_REF_OBJECT(REF).
5730 Set CAN_REFER if non-NULL to false if method
5731 is not referable or if the virtual table is ill-formed (such as rewriten
5732 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5733
5734 tree
5735 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5736 bool *can_refer)
5737 {
5738 unsigned HOST_WIDE_INT offset;
5739 tree v;
5740
5741 v = BINFO_VTABLE (known_binfo);
5742 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5743 if (!v)
5744 return NULL_TREE;
5745
5746 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5747 {
5748 if (can_refer)
5749 *can_refer = false;
5750 return NULL_TREE;
5751 }
5752 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5753 }
5754
5755 /* Return true iff VAL is a gimple expression that is known to be
5756 non-negative. Restricted to floating-point inputs. */
5757
5758 bool
5759 gimple_val_nonnegative_real_p (tree val)
5760 {
5761 gimple def_stmt;
5762
5763 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5764
5765 /* Use existing logic for non-gimple trees. */
5766 if (tree_expr_nonnegative_p (val))
5767 return true;
5768
5769 if (TREE_CODE (val) != SSA_NAME)
5770 return false;
5771
5772 /* Currently we look only at the immediately defining statement
5773 to make this determination, since recursion on defining
5774 statements of operands can lead to quadratic behavior in the
5775 worst case. This is expected to catch almost all occurrences
5776 in practice. It would be possible to implement limited-depth
5777 recursion if important cases are lost. Alternatively, passes
5778 that need this information (such as the pow/powi lowering code
5779 in the cse_sincos pass) could be revised to provide it through
5780 dataflow propagation. */
5781
5782 def_stmt = SSA_NAME_DEF_STMT (val);
5783
5784 if (is_gimple_assign (def_stmt))
5785 {
5786 tree op0, op1;
5787
5788 /* See fold-const.c:tree_expr_nonnegative_p for additional
5789 cases that could be handled with recursion. */
5790
5791 switch (gimple_assign_rhs_code (def_stmt))
5792 {
5793 case ABS_EXPR:
5794 /* Always true for floating-point operands. */
5795 return true;
5796
5797 case MULT_EXPR:
5798 /* True if the two operands are identical (since we are
5799 restricted to floating-point inputs). */
5800 op0 = gimple_assign_rhs1 (def_stmt);
5801 op1 = gimple_assign_rhs2 (def_stmt);
5802
5803 if (op0 == op1
5804 || operand_equal_p (op0, op1, 0))
5805 return true;
5806
5807 default:
5808 return false;
5809 }
5810 }
5811 else if (is_gimple_call (def_stmt))
5812 {
5813 tree fndecl = gimple_call_fndecl (def_stmt);
5814 if (fndecl
5815 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5816 {
5817 tree arg1;
5818
5819 switch (DECL_FUNCTION_CODE (fndecl))
5820 {
5821 CASE_FLT_FN (BUILT_IN_ACOS):
5822 CASE_FLT_FN (BUILT_IN_ACOSH):
5823 CASE_FLT_FN (BUILT_IN_CABS):
5824 CASE_FLT_FN (BUILT_IN_COSH):
5825 CASE_FLT_FN (BUILT_IN_ERFC):
5826 CASE_FLT_FN (BUILT_IN_EXP):
5827 CASE_FLT_FN (BUILT_IN_EXP10):
5828 CASE_FLT_FN (BUILT_IN_EXP2):
5829 CASE_FLT_FN (BUILT_IN_FABS):
5830 CASE_FLT_FN (BUILT_IN_FDIM):
5831 CASE_FLT_FN (BUILT_IN_HYPOT):
5832 CASE_FLT_FN (BUILT_IN_POW10):
5833 return true;
5834
5835 CASE_FLT_FN (BUILT_IN_SQRT):
5836 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5837 nonnegative inputs. */
5838 if (!HONOR_SIGNED_ZEROS (val))
5839 return true;
5840
5841 break;
5842
5843 CASE_FLT_FN (BUILT_IN_POWI):
5844 /* True if the second argument is an even integer. */
5845 arg1 = gimple_call_arg (def_stmt, 1);
5846
5847 if (TREE_CODE (arg1) == INTEGER_CST
5848 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5849 return true;
5850
5851 break;
5852
5853 CASE_FLT_FN (BUILT_IN_POW):
5854 /* True if the second argument is an even integer-valued
5855 real. */
5856 arg1 = gimple_call_arg (def_stmt, 1);
5857
5858 if (TREE_CODE (arg1) == REAL_CST)
5859 {
5860 REAL_VALUE_TYPE c;
5861 HOST_WIDE_INT n;
5862
5863 c = TREE_REAL_CST (arg1);
5864 n = real_to_integer (&c);
5865
5866 if ((n & 1) == 0)
5867 {
5868 REAL_VALUE_TYPE cint;
5869 real_from_integer (&cint, VOIDmode, n, SIGNED);
5870 if (real_identical (&c, &cint))
5871 return true;
5872 }
5873 }
5874
5875 break;
5876
5877 default:
5878 return false;
5879 }
5880 }
5881 }
5882
5883 return false;
5884 }
5885
5886 /* Given a pointer value OP0, return a simplified version of an
5887 indirection through OP0, or NULL_TREE if no simplification is
5888 possible. Note that the resulting type may be different from
5889 the type pointed to in the sense that it is still compatible
5890 from the langhooks point of view. */
5891
5892 tree
5893 gimple_fold_indirect_ref (tree t)
5894 {
5895 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5896 tree sub = t;
5897 tree subtype;
5898
5899 STRIP_NOPS (sub);
5900 subtype = TREE_TYPE (sub);
5901 if (!POINTER_TYPE_P (subtype))
5902 return NULL_TREE;
5903
5904 if (TREE_CODE (sub) == ADDR_EXPR)
5905 {
5906 tree op = TREE_OPERAND (sub, 0);
5907 tree optype = TREE_TYPE (op);
5908 /* *&p => p */
5909 if (useless_type_conversion_p (type, optype))
5910 return op;
5911
5912 /* *(foo *)&fooarray => fooarray[0] */
5913 if (TREE_CODE (optype) == ARRAY_TYPE
5914 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5915 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5916 {
5917 tree type_domain = TYPE_DOMAIN (optype);
5918 tree min_val = size_zero_node;
5919 if (type_domain && TYPE_MIN_VALUE (type_domain))
5920 min_val = TYPE_MIN_VALUE (type_domain);
5921 if (TREE_CODE (min_val) == INTEGER_CST)
5922 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5923 }
5924 /* *(foo *)&complexfoo => __real__ complexfoo */
5925 else if (TREE_CODE (optype) == COMPLEX_TYPE
5926 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5927 return fold_build1 (REALPART_EXPR, type, op);
5928 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5929 else if (TREE_CODE (optype) == VECTOR_TYPE
5930 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5931 {
5932 tree part_width = TYPE_SIZE (type);
5933 tree index = bitsize_int (0);
5934 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5935 }
5936 }
5937
5938 /* *(p + CST) -> ... */
5939 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5940 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5941 {
5942 tree addr = TREE_OPERAND (sub, 0);
5943 tree off = TREE_OPERAND (sub, 1);
5944 tree addrtype;
5945
5946 STRIP_NOPS (addr);
5947 addrtype = TREE_TYPE (addr);
5948
5949 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5950 if (TREE_CODE (addr) == ADDR_EXPR
5951 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5952 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5953 && tree_fits_uhwi_p (off))
5954 {
5955 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5956 tree part_width = TYPE_SIZE (type);
5957 unsigned HOST_WIDE_INT part_widthi
5958 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5959 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5960 tree index = bitsize_int (indexi);
5961 if (offset / part_widthi
5962 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5963 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5964 part_width, index);
5965 }
5966
5967 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5968 if (TREE_CODE (addr) == ADDR_EXPR
5969 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5970 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5971 {
5972 tree size = TYPE_SIZE_UNIT (type);
5973 if (tree_int_cst_equal (size, off))
5974 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5975 }
5976
5977 /* *(p + CST) -> MEM_REF <p, CST>. */
5978 if (TREE_CODE (addr) != ADDR_EXPR
5979 || DECL_P (TREE_OPERAND (addr, 0)))
5980 return fold_build2 (MEM_REF, type,
5981 addr,
5982 wide_int_to_tree (ptype, off));
5983 }
5984
5985 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5986 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5987 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5988 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5989 {
5990 tree type_domain;
5991 tree min_val = size_zero_node;
5992 tree osub = sub;
5993 sub = gimple_fold_indirect_ref (sub);
5994 if (! sub)
5995 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5996 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5997 if (type_domain && TYPE_MIN_VALUE (type_domain))
5998 min_val = TYPE_MIN_VALUE (type_domain);
5999 if (TREE_CODE (min_val) == INTEGER_CST)
6000 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6001 }
6002
6003 return NULL_TREE;
6004 }
6005
6006 /* Return true if CODE is an operation that when operating on signed
6007 integer types involves undefined behavior on overflow and the
6008 operation can be expressed with unsigned arithmetic. */
6009
6010 bool
6011 arith_code_with_undefined_signed_overflow (tree_code code)
6012 {
6013 switch (code)
6014 {
6015 case PLUS_EXPR:
6016 case MINUS_EXPR:
6017 case MULT_EXPR:
6018 case NEGATE_EXPR:
6019 case POINTER_PLUS_EXPR:
6020 return true;
6021 default:
6022 return false;
6023 }
6024 }
6025
6026 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6027 operation that can be transformed to unsigned arithmetic by converting
6028 its operand, carrying out the operation in the corresponding unsigned
6029 type and converting the result back to the original type.
6030
6031 Returns a sequence of statements that replace STMT and also contain
6032 a modified form of STMT itself. */
6033
6034 gimple_seq
6035 rewrite_to_defined_overflow (gimple stmt)
6036 {
6037 if (dump_file && (dump_flags & TDF_DETAILS))
6038 {
6039 fprintf (dump_file, "rewriting stmt with undefined signed "
6040 "overflow ");
6041 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6042 }
6043
6044 tree lhs = gimple_assign_lhs (stmt);
6045 tree type = unsigned_type_for (TREE_TYPE (lhs));
6046 gimple_seq stmts = NULL;
6047 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6048 {
6049 gimple_seq stmts2 = NULL;
6050 gimple_set_op (stmt, i,
6051 force_gimple_operand (fold_convert (type,
6052 gimple_op (stmt, i)),
6053 &stmts2, true, NULL_TREE));
6054 gimple_seq_add_seq (&stmts, stmts2);
6055 }
6056 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6057 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6058 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6059 gimple_seq_add_stmt (&stmts, stmt);
6060 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6061 gimple_seq_add_stmt (&stmts, cvt);
6062
6063 return stmts;
6064 }
6065
6066
6067 /* The valueization hook we use for the gimple_build API simplification.
6068 This makes us match fold_buildN behavior by only combining with
6069 statements in the sequence(s) we are currently building. */
6070
6071 static tree
6072 gimple_build_valueize (tree op)
6073 {
6074 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6075 return op;
6076 return NULL_TREE;
6077 }
6078
6079 /* Build the expression CODE OP0 of type TYPE with location LOC,
6080 simplifying it first if possible. Returns the built
6081 expression value and appends statements possibly defining it
6082 to SEQ. */
6083
6084 tree
6085 gimple_build (gimple_seq *seq, location_t loc,
6086 enum tree_code code, tree type, tree op0)
6087 {
6088 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6089 if (!res)
6090 {
6091 if (gimple_in_ssa_p (cfun))
6092 res = make_ssa_name (type);
6093 else
6094 res = create_tmp_reg (type);
6095 gimple stmt;
6096 if (code == REALPART_EXPR
6097 || code == IMAGPART_EXPR
6098 || code == VIEW_CONVERT_EXPR)
6099 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6100 else
6101 stmt = gimple_build_assign (res, code, op0);
6102 gimple_set_location (stmt, loc);
6103 gimple_seq_add_stmt_without_update (seq, stmt);
6104 }
6105 return res;
6106 }
6107
6108 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6109 simplifying it first if possible. Returns the built
6110 expression value and appends statements possibly defining it
6111 to SEQ. */
6112
6113 tree
6114 gimple_build (gimple_seq *seq, location_t loc,
6115 enum tree_code code, tree type, tree op0, tree op1)
6116 {
6117 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6118 if (!res)
6119 {
6120 if (gimple_in_ssa_p (cfun))
6121 res = make_ssa_name (type);
6122 else
6123 res = create_tmp_reg (type);
6124 gimple stmt = gimple_build_assign (res, code, op0, op1);
6125 gimple_set_location (stmt, loc);
6126 gimple_seq_add_stmt_without_update (seq, stmt);
6127 }
6128 return res;
6129 }
6130
6131 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6132 simplifying it first if possible. Returns the built
6133 expression value and appends statements possibly defining it
6134 to SEQ. */
6135
6136 tree
6137 gimple_build (gimple_seq *seq, location_t loc,
6138 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6139 {
6140 tree res = gimple_simplify (code, type, op0, op1, op2,
6141 seq, gimple_build_valueize);
6142 if (!res)
6143 {
6144 if (gimple_in_ssa_p (cfun))
6145 res = make_ssa_name (type);
6146 else
6147 res = create_tmp_reg (type);
6148 gimple stmt;
6149 if (code == BIT_FIELD_REF)
6150 stmt = gimple_build_assign (res, code,
6151 build3 (code, type, op0, op1, op2));
6152 else
6153 stmt = gimple_build_assign (res, code, op0, op1, op2);
6154 gimple_set_location (stmt, loc);
6155 gimple_seq_add_stmt_without_update (seq, stmt);
6156 }
6157 return res;
6158 }
6159
6160 /* Build the call FN (ARG0) with a result of type TYPE
6161 (or no result if TYPE is void) with location LOC,
6162 simplifying it first if possible. Returns the built
6163 expression value (or NULL_TREE if TYPE is void) and appends
6164 statements possibly defining it to SEQ. */
6165
6166 tree
6167 gimple_build (gimple_seq *seq, location_t loc,
6168 enum built_in_function fn, tree type, tree arg0)
6169 {
6170 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6171 if (!res)
6172 {
6173 tree decl = builtin_decl_implicit (fn);
6174 gimple stmt = gimple_build_call (decl, 1, arg0);
6175 if (!VOID_TYPE_P (type))
6176 {
6177 if (gimple_in_ssa_p (cfun))
6178 res = make_ssa_name (type);
6179 else
6180 res = create_tmp_reg (type);
6181 gimple_call_set_lhs (stmt, res);
6182 }
6183 gimple_set_location (stmt, loc);
6184 gimple_seq_add_stmt_without_update (seq, stmt);
6185 }
6186 return res;
6187 }
6188
6189 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6190 (or no result if TYPE is void) with location LOC,
6191 simplifying it first if possible. Returns the built
6192 expression value (or NULL_TREE if TYPE is void) and appends
6193 statements possibly defining it to SEQ. */
6194
6195 tree
6196 gimple_build (gimple_seq *seq, location_t loc,
6197 enum built_in_function fn, tree type, tree arg0, tree arg1)
6198 {
6199 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6200 if (!res)
6201 {
6202 tree decl = builtin_decl_implicit (fn);
6203 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6204 if (!VOID_TYPE_P (type))
6205 {
6206 if (gimple_in_ssa_p (cfun))
6207 res = make_ssa_name (type);
6208 else
6209 res = create_tmp_reg (type);
6210 gimple_call_set_lhs (stmt, res);
6211 }
6212 gimple_set_location (stmt, loc);
6213 gimple_seq_add_stmt_without_update (seq, stmt);
6214 }
6215 return res;
6216 }
6217
6218 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6219 (or no result if TYPE is void) with location LOC,
6220 simplifying it first if possible. Returns the built
6221 expression value (or NULL_TREE if TYPE is void) and appends
6222 statements possibly defining it to SEQ. */
6223
6224 tree
6225 gimple_build (gimple_seq *seq, location_t loc,
6226 enum built_in_function fn, tree type,
6227 tree arg0, tree arg1, tree arg2)
6228 {
6229 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6230 seq, gimple_build_valueize);
6231 if (!res)
6232 {
6233 tree decl = builtin_decl_implicit (fn);
6234 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6235 if (!VOID_TYPE_P (type))
6236 {
6237 if (gimple_in_ssa_p (cfun))
6238 res = make_ssa_name (type);
6239 else
6240 res = create_tmp_reg (type);
6241 gimple_call_set_lhs (stmt, res);
6242 }
6243 gimple_set_location (stmt, loc);
6244 gimple_seq_add_stmt_without_update (seq, stmt);
6245 }
6246 return res;
6247 }
6248
6249 /* Build the conversion (TYPE) OP with a result of type TYPE
6250 with location LOC if such conversion is neccesary in GIMPLE,
6251 simplifying it first.
6252 Returns the built expression value and appends
6253 statements possibly defining it to SEQ. */
6254
6255 tree
6256 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6257 {
6258 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6259 return op;
6260 return gimple_build (seq, loc, NOP_EXPR, type, op);
6261 }