]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
2015-06-17 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "hard-reg-set.h"
31 #include "function.h"
32 #include "rtl.h"
33 #include "flags.h"
34 #include "insn-config.h"
35 #include "expmed.h"
36 #include "dojump.h"
37 #include "explow.h"
38 #include "calls.h"
39 #include "emit-rtl.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "stor-layout.h"
44 #include "dumpfile.h"
45 #include "bitmap.h"
46 #include "predict.h"
47 #include "dominance.h"
48 #include "basic-block.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
51 #include "gimple-fold.h"
52 #include "gimple-expr.h"
53 #include "gimple.h"
54 #include "gimplify.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "tree-ssanames.h"
58 #include "tree-into-ssa.h"
59 #include "tree-dfa.h"
60 #include "tree-ssa.h"
61 #include "tree-ssa-propagate.h"
62 #include "target.h"
63 #include "plugin-api.h"
64 #include "ipa-ref.h"
65 #include "cgraph.h"
66 #include "ipa-utils.h"
67 #include "gimple-pretty-print.h"
68 #include "tree-ssa-address.h"
69 #include "langhooks.h"
70 #include "gimplify-me.h"
71 #include "dbgcnt.h"
72 #include "builtins.h"
73 #include "output.h"
74 #include "tree-eh.h"
75 #include "gimple-match.h"
76 #include "tree-phinodes.h"
77 #include "ssa-iterators.h"
78
79 /* Return true when DECL can be referenced from current unit.
80 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
81 We can get declarations that are not possible to reference for various
82 reasons:
83
84 1) When analyzing C++ virtual tables.
85 C++ virtual tables do have known constructors even
86 when they are keyed to other compilation unit.
87 Those tables can contain pointers to methods and vars
88 in other units. Those methods have both STATIC and EXTERNAL
89 set.
90 2) In WHOPR mode devirtualization might lead to reference
91 to method that was partitioned elsehwere.
92 In this case we have static VAR_DECL or FUNCTION_DECL
93 that has no corresponding callgraph/varpool node
94 declaring the body.
95 3) COMDAT functions referred by external vtables that
96 we devirtualize only during final compilation stage.
97 At this time we already decided that we will not output
98 the function body and thus we can't reference the symbol
99 directly. */
100
101 static bool
102 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
103 {
104 varpool_node *vnode;
105 struct cgraph_node *node;
106 symtab_node *snode;
107
108 if (DECL_ABSTRACT_P (decl))
109 return false;
110
111 /* We are concerned only about static/external vars and functions. */
112 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
113 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
114 return true;
115
116 /* Static objects can be referred only if they was not optimized out yet. */
117 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
118 {
119 /* Before we start optimizing unreachable code we can be sure all
120 static objects are defined. */
121 if (symtab->function_flags_ready)
122 return true;
123 snode = symtab_node::get (decl);
124 if (!snode || !snode->definition)
125 return false;
126 node = dyn_cast <cgraph_node *> (snode);
127 return !node || !node->global.inlined_to;
128 }
129
130 /* We will later output the initializer, so we can refer to it.
131 So we are concerned only when DECL comes from initializer of
132 external var or var that has been optimized out. */
133 if (!from_decl
134 || TREE_CODE (from_decl) != VAR_DECL
135 || (!DECL_EXTERNAL (from_decl)
136 && (vnode = varpool_node::get (from_decl)) != NULL
137 && vnode->definition)
138 || (flag_ltrans
139 && (vnode = varpool_node::get (from_decl)) != NULL
140 && vnode->in_other_partition))
141 return true;
142 /* We are folding reference from external vtable. The vtable may reffer
143 to a symbol keyed to other compilation unit. The other compilation
144 unit may be in separate DSO and the symbol may be hidden. */
145 if (DECL_VISIBILITY_SPECIFIED (decl)
146 && DECL_EXTERNAL (decl)
147 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
148 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
149 return false;
150 /* When function is public, we always can introduce new reference.
151 Exception are the COMDAT functions where introducing a direct
152 reference imply need to include function body in the curren tunit. */
153 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
154 return true;
155 /* We have COMDAT. We are going to check if we still have definition
156 or if the definition is going to be output in other partition.
157 Bypass this when gimplifying; all needed functions will be produced.
158
159 As observed in PR20991 for already optimized out comdat virtual functions
160 it may be tempting to not necessarily give up because the copy will be
161 output elsewhere when corresponding vtable is output.
162 This is however not possible - ABI specify that COMDATs are output in
163 units where they are used and when the other unit was compiled with LTO
164 it is possible that vtable was kept public while the function itself
165 was privatized. */
166 if (!symtab->function_flags_ready)
167 return true;
168
169 snode = symtab_node::get (decl);
170 if (!snode
171 || ((!snode->definition || DECL_EXTERNAL (decl))
172 && (!snode->in_other_partition
173 || (!snode->forced_by_abi && !snode->force_output))))
174 return false;
175 node = dyn_cast <cgraph_node *> (snode);
176 return !node || !node->global.inlined_to;
177 }
178
179 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
180 acceptable form for is_gimple_min_invariant.
181 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
182
183 tree
184 canonicalize_constructor_val (tree cval, tree from_decl)
185 {
186 tree orig_cval = cval;
187 STRIP_NOPS (cval);
188 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
189 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
190 {
191 tree ptr = TREE_OPERAND (cval, 0);
192 if (is_gimple_min_invariant (ptr))
193 cval = build1_loc (EXPR_LOCATION (cval),
194 ADDR_EXPR, TREE_TYPE (ptr),
195 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
196 ptr,
197 fold_convert (ptr_type_node,
198 TREE_OPERAND (cval, 1))));
199 }
200 if (TREE_CODE (cval) == ADDR_EXPR)
201 {
202 tree base = NULL_TREE;
203 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
204 {
205 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
206 if (base)
207 TREE_OPERAND (cval, 0) = base;
208 }
209 else
210 base = get_base_address (TREE_OPERAND (cval, 0));
211 if (!base)
212 return NULL_TREE;
213
214 if ((TREE_CODE (base) == VAR_DECL
215 || TREE_CODE (base) == FUNCTION_DECL)
216 && !can_refer_decl_in_current_unit_p (base, from_decl))
217 return NULL_TREE;
218 if (TREE_CODE (base) == VAR_DECL)
219 TREE_ADDRESSABLE (base) = 1;
220 else if (TREE_CODE (base) == FUNCTION_DECL)
221 {
222 /* Make sure we create a cgraph node for functions we'll reference.
223 They can be non-existent if the reference comes from an entry
224 of an external vtable for example. */
225 cgraph_node::get_create (base);
226 }
227 /* Fixup types in global initializers. */
228 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
229 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
230
231 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
232 cval = fold_convert (TREE_TYPE (orig_cval), cval);
233 return cval;
234 }
235 if (TREE_OVERFLOW_P (cval))
236 return drop_tree_overflow (cval);
237 return orig_cval;
238 }
239
240 /* If SYM is a constant variable with known value, return the value.
241 NULL_TREE is returned otherwise. */
242
243 tree
244 get_symbol_constant_value (tree sym)
245 {
246 tree val = ctor_for_folding (sym);
247 if (val != error_mark_node)
248 {
249 if (val)
250 {
251 val = canonicalize_constructor_val (unshare_expr (val), sym);
252 if (val && is_gimple_min_invariant (val))
253 return val;
254 else
255 return NULL_TREE;
256 }
257 /* Variables declared 'const' without an initializer
258 have zero as the initializer if they may not be
259 overridden at link or run time. */
260 if (!val
261 && is_gimple_reg_type (TREE_TYPE (sym)))
262 return build_zero_cst (TREE_TYPE (sym));
263 }
264
265 return NULL_TREE;
266 }
267
268
269
270 /* Subroutine of fold_stmt. We perform several simplifications of the
271 memory reference tree EXPR and make sure to re-gimplify them properly
272 after propagation of constant addresses. IS_LHS is true if the
273 reference is supposed to be an lvalue. */
274
275 static tree
276 maybe_fold_reference (tree expr, bool is_lhs)
277 {
278 tree result;
279
280 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
281 || TREE_CODE (expr) == REALPART_EXPR
282 || TREE_CODE (expr) == IMAGPART_EXPR)
283 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
284 return fold_unary_loc (EXPR_LOCATION (expr),
285 TREE_CODE (expr),
286 TREE_TYPE (expr),
287 TREE_OPERAND (expr, 0));
288 else if (TREE_CODE (expr) == BIT_FIELD_REF
289 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
290 return fold_ternary_loc (EXPR_LOCATION (expr),
291 TREE_CODE (expr),
292 TREE_TYPE (expr),
293 TREE_OPERAND (expr, 0),
294 TREE_OPERAND (expr, 1),
295 TREE_OPERAND (expr, 2));
296
297 if (!is_lhs
298 && (result = fold_const_aggregate_ref (expr))
299 && is_gimple_min_invariant (result))
300 return result;
301
302 return NULL_TREE;
303 }
304
305
306 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
307 replacement rhs for the statement or NULL_TREE if no simplification
308 could be made. It is assumed that the operands have been previously
309 folded. */
310
311 static tree
312 fold_gimple_assign (gimple_stmt_iterator *si)
313 {
314 gimple stmt = gsi_stmt (*si);
315 enum tree_code subcode = gimple_assign_rhs_code (stmt);
316 location_t loc = gimple_location (stmt);
317
318 tree result = NULL_TREE;
319
320 switch (get_gimple_rhs_class (subcode))
321 {
322 case GIMPLE_SINGLE_RHS:
323 {
324 tree rhs = gimple_assign_rhs1 (stmt);
325
326 if (TREE_CLOBBER_P (rhs))
327 return NULL_TREE;
328
329 if (REFERENCE_CLASS_P (rhs))
330 return maybe_fold_reference (rhs, false);
331
332 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
333 {
334 tree val = OBJ_TYPE_REF_EXPR (rhs);
335 if (is_gimple_min_invariant (val))
336 return val;
337 else if (flag_devirtualize && virtual_method_call_p (rhs))
338 {
339 bool final;
340 vec <cgraph_node *>targets
341 = possible_polymorphic_call_targets (rhs, stmt, &final);
342 if (final && targets.length () <= 1 && dbg_cnt (devirt))
343 {
344 if (dump_enabled_p ())
345 {
346 location_t loc = gimple_location_safe (stmt);
347 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
348 "resolving virtual function address "
349 "reference to function %s\n",
350 targets.length () == 1
351 ? targets[0]->name ()
352 : "NULL");
353 }
354 if (targets.length () == 1)
355 {
356 val = fold_convert (TREE_TYPE (val),
357 build_fold_addr_expr_loc
358 (loc, targets[0]->decl));
359 STRIP_USELESS_TYPE_CONVERSION (val);
360 }
361 else
362 /* We can not use __builtin_unreachable here because it
363 can not have address taken. */
364 val = build_int_cst (TREE_TYPE (val), 0);
365 return val;
366 }
367 }
368
369 }
370 else if (TREE_CODE (rhs) == ADDR_EXPR)
371 {
372 tree ref = TREE_OPERAND (rhs, 0);
373 tree tem = maybe_fold_reference (ref, true);
374 if (tem
375 && TREE_CODE (tem) == MEM_REF
376 && integer_zerop (TREE_OPERAND (tem, 1)))
377 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
378 else if (tem)
379 result = fold_convert (TREE_TYPE (rhs),
380 build_fold_addr_expr_loc (loc, tem));
381 else if (TREE_CODE (ref) == MEM_REF
382 && integer_zerop (TREE_OPERAND (ref, 1)))
383 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
384 }
385
386 else if (TREE_CODE (rhs) == CONSTRUCTOR
387 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
388 && (CONSTRUCTOR_NELTS (rhs)
389 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
390 {
391 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
392 unsigned i;
393 tree val;
394
395 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
396 if (TREE_CODE (val) != INTEGER_CST
397 && TREE_CODE (val) != REAL_CST
398 && TREE_CODE (val) != FIXED_CST)
399 return NULL_TREE;
400
401 return build_vector_from_ctor (TREE_TYPE (rhs),
402 CONSTRUCTOR_ELTS (rhs));
403 }
404
405 else if (DECL_P (rhs))
406 return get_symbol_constant_value (rhs);
407
408 /* If we couldn't fold the RHS, hand over to the generic
409 fold routines. */
410 if (result == NULL_TREE)
411 result = fold (rhs);
412
413 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
414 that may have been added by fold, and "useless" type
415 conversions that might now be apparent due to propagation. */
416 STRIP_USELESS_TYPE_CONVERSION (result);
417
418 if (result != rhs && valid_gimple_rhs_p (result))
419 return result;
420
421 return NULL_TREE;
422 }
423 break;
424
425 case GIMPLE_UNARY_RHS:
426 break;
427
428 case GIMPLE_BINARY_RHS:
429 /* Try to canonicalize for boolean-typed X the comparisons
430 X == 0, X == 1, X != 0, and X != 1. */
431 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
432 || gimple_assign_rhs_code (stmt) == NE_EXPR)
433 {
434 tree lhs = gimple_assign_lhs (stmt);
435 tree op1 = gimple_assign_rhs1 (stmt);
436 tree op2 = gimple_assign_rhs2 (stmt);
437 tree type = TREE_TYPE (op1);
438
439 /* Check whether the comparison operands are of the same boolean
440 type as the result type is.
441 Check that second operand is an integer-constant with value
442 one or zero. */
443 if (TREE_CODE (op2) == INTEGER_CST
444 && (integer_zerop (op2) || integer_onep (op2))
445 && useless_type_conversion_p (TREE_TYPE (lhs), type))
446 {
447 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
448 bool is_logical_not = false;
449
450 /* X == 0 and X != 1 is a logical-not.of X
451 X == 1 and X != 0 is X */
452 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
453 || (cmp_code == NE_EXPR && integer_onep (op2)))
454 is_logical_not = true;
455
456 if (is_logical_not == false)
457 result = op1;
458 /* Only for one-bit precision typed X the transformation
459 !X -> ~X is valied. */
460 else if (TYPE_PRECISION (type) == 1)
461 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
462 type, op1);
463 /* Otherwise we use !X -> X ^ 1. */
464 else
465 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
466 type, op1, build_int_cst (type, 1));
467
468 }
469 }
470
471 if (!result)
472 result = fold_binary_loc (loc, subcode,
473 TREE_TYPE (gimple_assign_lhs (stmt)),
474 gimple_assign_rhs1 (stmt),
475 gimple_assign_rhs2 (stmt));
476
477 if (result)
478 {
479 STRIP_USELESS_TYPE_CONVERSION (result);
480 if (valid_gimple_rhs_p (result))
481 return result;
482 }
483 break;
484
485 case GIMPLE_TERNARY_RHS:
486 /* Try to fold a conditional expression. */
487 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
488 {
489 tree op0 = gimple_assign_rhs1 (stmt);
490 tree tem;
491 bool set = false;
492 location_t cond_loc = gimple_location (stmt);
493
494 if (COMPARISON_CLASS_P (op0))
495 {
496 fold_defer_overflow_warnings ();
497 tem = fold_binary_loc (cond_loc,
498 TREE_CODE (op0), TREE_TYPE (op0),
499 TREE_OPERAND (op0, 0),
500 TREE_OPERAND (op0, 1));
501 /* This is actually a conditional expression, not a GIMPLE
502 conditional statement, however, the valid_gimple_rhs_p
503 test still applies. */
504 set = (tem && is_gimple_condexpr (tem)
505 && valid_gimple_rhs_p (tem));
506 fold_undefer_overflow_warnings (set, stmt, 0);
507 }
508 else if (is_gimple_min_invariant (op0))
509 {
510 tem = op0;
511 set = true;
512 }
513 else
514 return NULL_TREE;
515
516 if (set)
517 result = fold_build3_loc (cond_loc, COND_EXPR,
518 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
519 gimple_assign_rhs2 (stmt),
520 gimple_assign_rhs3 (stmt));
521 }
522
523 if (!result)
524 result = fold_ternary_loc (loc, subcode,
525 TREE_TYPE (gimple_assign_lhs (stmt)),
526 gimple_assign_rhs1 (stmt),
527 gimple_assign_rhs2 (stmt),
528 gimple_assign_rhs3 (stmt));
529
530 if (result)
531 {
532 STRIP_USELESS_TYPE_CONVERSION (result);
533 if (valid_gimple_rhs_p (result))
534 return result;
535 }
536 break;
537
538 case GIMPLE_INVALID_RHS:
539 gcc_unreachable ();
540 }
541
542 return NULL_TREE;
543 }
544
545 /* Attempt to fold a conditional statement. Return true if any changes were
546 made. We only attempt to fold the condition expression, and do not perform
547 any transformation that would require alteration of the cfg. It is
548 assumed that the operands have been previously folded. */
549
550 static bool
551 fold_gimple_cond (gcond *stmt)
552 {
553 tree result = fold_binary_loc (gimple_location (stmt),
554 gimple_cond_code (stmt),
555 boolean_type_node,
556 gimple_cond_lhs (stmt),
557 gimple_cond_rhs (stmt));
558
559 if (result)
560 {
561 STRIP_USELESS_TYPE_CONVERSION (result);
562 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
563 {
564 gimple_cond_set_condition_from_tree (stmt, result);
565 return true;
566 }
567 }
568
569 return false;
570 }
571
572
573 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
574 adjusting the replacement stmts location and virtual operands.
575 If the statement has a lhs the last stmt in the sequence is expected
576 to assign to that lhs. */
577
578 static void
579 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
580 {
581 gimple stmt = gsi_stmt (*si_p);
582
583 if (gimple_has_location (stmt))
584 annotate_all_with_location (stmts, gimple_location (stmt));
585
586 /* First iterate over the replacement statements backward, assigning
587 virtual operands to their defining statements. */
588 gimple laststore = NULL;
589 for (gimple_stmt_iterator i = gsi_last (stmts);
590 !gsi_end_p (i); gsi_prev (&i))
591 {
592 gimple new_stmt = gsi_stmt (i);
593 if ((gimple_assign_single_p (new_stmt)
594 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
595 || (is_gimple_call (new_stmt)
596 && (gimple_call_flags (new_stmt)
597 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
598 {
599 tree vdef;
600 if (!laststore)
601 vdef = gimple_vdef (stmt);
602 else
603 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
604 gimple_set_vdef (new_stmt, vdef);
605 if (vdef && TREE_CODE (vdef) == SSA_NAME)
606 SSA_NAME_DEF_STMT (vdef) = new_stmt;
607 laststore = new_stmt;
608 }
609 }
610
611 /* Second iterate over the statements forward, assigning virtual
612 operands to their uses. */
613 tree reaching_vuse = gimple_vuse (stmt);
614 for (gimple_stmt_iterator i = gsi_start (stmts);
615 !gsi_end_p (i); gsi_next (&i))
616 {
617 gimple new_stmt = gsi_stmt (i);
618 /* If the new statement possibly has a VUSE, update it with exact SSA
619 name we know will reach this one. */
620 if (gimple_has_mem_ops (new_stmt))
621 gimple_set_vuse (new_stmt, reaching_vuse);
622 gimple_set_modified (new_stmt, true);
623 if (gimple_vdef (new_stmt))
624 reaching_vuse = gimple_vdef (new_stmt);
625 }
626
627 /* If the new sequence does not do a store release the virtual
628 definition of the original statement. */
629 if (reaching_vuse
630 && reaching_vuse == gimple_vuse (stmt))
631 {
632 tree vdef = gimple_vdef (stmt);
633 if (vdef
634 && TREE_CODE (vdef) == SSA_NAME)
635 {
636 unlink_stmt_vdef (stmt);
637 release_ssa_name (vdef);
638 }
639 }
640
641 /* Finally replace the original statement with the sequence. */
642 gsi_replace_with_seq (si_p, stmts, false);
643 }
644
645 /* Convert EXPR into a GIMPLE value suitable for substitution on the
646 RHS of an assignment. Insert the necessary statements before
647 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
648 is replaced. If the call is expected to produces a result, then it
649 is replaced by an assignment of the new RHS to the result variable.
650 If the result is to be ignored, then the call is replaced by a
651 GIMPLE_NOP. A proper VDEF chain is retained by making the first
652 VUSE and the last VDEF of the whole sequence be the same as the replaced
653 statement and using new SSA names for stores in between. */
654
655 void
656 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
657 {
658 tree lhs;
659 gimple stmt, new_stmt;
660 gimple_stmt_iterator i;
661 gimple_seq stmts = NULL;
662
663 stmt = gsi_stmt (*si_p);
664
665 gcc_assert (is_gimple_call (stmt));
666
667 push_gimplify_context (gimple_in_ssa_p (cfun));
668
669 lhs = gimple_call_lhs (stmt);
670 if (lhs == NULL_TREE)
671 {
672 gimplify_and_add (expr, &stmts);
673 /* We can end up with folding a memcpy of an empty class assignment
674 which gets optimized away by C++ gimplification. */
675 if (gimple_seq_empty_p (stmts))
676 {
677 pop_gimplify_context (NULL);
678 if (gimple_in_ssa_p (cfun))
679 {
680 unlink_stmt_vdef (stmt);
681 release_defs (stmt);
682 }
683 gsi_replace (si_p, gimple_build_nop (), true);
684 return;
685 }
686 }
687 else
688 {
689 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
690 new_stmt = gimple_build_assign (lhs, tmp);
691 i = gsi_last (stmts);
692 gsi_insert_after_without_update (&i, new_stmt,
693 GSI_CONTINUE_LINKING);
694 }
695
696 pop_gimplify_context (NULL);
697
698 gsi_replace_with_seq_vops (si_p, stmts);
699 }
700
701
702 /* Replace the call at *GSI with the gimple value VAL. */
703
704 static void
705 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
706 {
707 gimple stmt = gsi_stmt (*gsi);
708 tree lhs = gimple_call_lhs (stmt);
709 gimple repl;
710 if (lhs)
711 {
712 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
713 val = fold_convert (TREE_TYPE (lhs), val);
714 repl = gimple_build_assign (lhs, val);
715 }
716 else
717 repl = gimple_build_nop ();
718 tree vdef = gimple_vdef (stmt);
719 if (vdef && TREE_CODE (vdef) == SSA_NAME)
720 {
721 unlink_stmt_vdef (stmt);
722 release_ssa_name (vdef);
723 }
724 gsi_replace (gsi, repl, true);
725 }
726
727 /* Replace the call at *GSI with the new call REPL and fold that
728 again. */
729
730 static void
731 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
732 {
733 gimple stmt = gsi_stmt (*gsi);
734 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
735 gimple_set_location (repl, gimple_location (stmt));
736 if (gimple_vdef (stmt)
737 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
738 {
739 gimple_set_vdef (repl, gimple_vdef (stmt));
740 gimple_set_vuse (repl, gimple_vuse (stmt));
741 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
742 }
743 gsi_replace (gsi, repl, true);
744 fold_stmt (gsi);
745 }
746
747 /* Return true if VAR is a VAR_DECL or a component thereof. */
748
749 static bool
750 var_decl_component_p (tree var)
751 {
752 tree inner = var;
753 while (handled_component_p (inner))
754 inner = TREE_OPERAND (inner, 0);
755 return SSA_VAR_P (inner);
756 }
757
758 /* Fold function call to builtin mem{{,p}cpy,move}. Return
759 false if no simplification can be made.
760 If ENDP is 0, return DEST (like memcpy).
761 If ENDP is 1, return DEST+LEN (like mempcpy).
762 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
763 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
764 (memmove). */
765
766 static bool
767 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
768 tree dest, tree src, int endp)
769 {
770 gimple stmt = gsi_stmt (*gsi);
771 tree lhs = gimple_call_lhs (stmt);
772 tree len = gimple_call_arg (stmt, 2);
773 tree destvar, srcvar;
774 location_t loc = gimple_location (stmt);
775
776 /* If the LEN parameter is zero, return DEST. */
777 if (integer_zerop (len))
778 {
779 gimple repl;
780 if (gimple_call_lhs (stmt))
781 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
782 else
783 repl = gimple_build_nop ();
784 tree vdef = gimple_vdef (stmt);
785 if (vdef && TREE_CODE (vdef) == SSA_NAME)
786 {
787 unlink_stmt_vdef (stmt);
788 release_ssa_name (vdef);
789 }
790 gsi_replace (gsi, repl, true);
791 return true;
792 }
793
794 /* If SRC and DEST are the same (and not volatile), return
795 DEST{,+LEN,+LEN-1}. */
796 if (operand_equal_p (src, dest, 0))
797 {
798 unlink_stmt_vdef (stmt);
799 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
800 release_ssa_name (gimple_vdef (stmt));
801 if (!lhs)
802 {
803 gsi_replace (gsi, gimple_build_nop (), true);
804 return true;
805 }
806 goto done;
807 }
808 else
809 {
810 tree srctype, desttype;
811 unsigned int src_align, dest_align;
812 tree off0;
813
814 /* Build accesses at offset zero with a ref-all character type. */
815 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
816 ptr_mode, true), 0);
817
818 /* If we can perform the copy efficiently with first doing all loads
819 and then all stores inline it that way. Currently efficiently
820 means that we can load all the memory into a single integer
821 register which is what MOVE_MAX gives us. */
822 src_align = get_pointer_alignment (src);
823 dest_align = get_pointer_alignment (dest);
824 if (tree_fits_uhwi_p (len)
825 && compare_tree_int (len, MOVE_MAX) <= 0
826 /* ??? Don't transform copies from strings with known length this
827 confuses the tree-ssa-strlen.c. This doesn't handle
828 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
829 reason. */
830 && !c_strlen (src, 2))
831 {
832 unsigned ilen = tree_to_uhwi (len);
833 if (exact_log2 (ilen) != -1)
834 {
835 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
836 if (type
837 && TYPE_MODE (type) != BLKmode
838 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
839 == ilen * 8)
840 /* If the destination pointer is not aligned we must be able
841 to emit an unaligned store. */
842 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
843 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
844 {
845 tree srctype = type;
846 tree desttype = type;
847 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
848 srctype = build_aligned_type (type, src_align);
849 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
850 tree tem = fold_const_aggregate_ref (srcmem);
851 if (tem)
852 srcmem = tem;
853 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
854 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
855 src_align))
856 srcmem = NULL_TREE;
857 if (srcmem)
858 {
859 gimple new_stmt;
860 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
861 {
862 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
863 if (gimple_in_ssa_p (cfun))
864 srcmem = make_ssa_name (TREE_TYPE (srcmem),
865 new_stmt);
866 else
867 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
868 gimple_assign_set_lhs (new_stmt, srcmem);
869 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
870 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
871 }
872 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
873 desttype = build_aligned_type (type, dest_align);
874 new_stmt
875 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
876 dest, off0),
877 srcmem);
878 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
879 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
880 if (gimple_vdef (new_stmt)
881 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
882 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
883 if (!lhs)
884 {
885 gsi_replace (gsi, new_stmt, true);
886 return true;
887 }
888 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
889 goto done;
890 }
891 }
892 }
893 }
894
895 if (endp == 3)
896 {
897 /* Both DEST and SRC must be pointer types.
898 ??? This is what old code did. Is the testing for pointer types
899 really mandatory?
900
901 If either SRC is readonly or length is 1, we can use memcpy. */
902 if (!dest_align || !src_align)
903 return false;
904 if (readonly_data_expr (src)
905 || (tree_fits_uhwi_p (len)
906 && (MIN (src_align, dest_align) / BITS_PER_UNIT
907 >= tree_to_uhwi (len))))
908 {
909 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
917 }
918
919 /* If *src and *dest can't overlap, optimize into memcpy as well. */
920 if (TREE_CODE (src) == ADDR_EXPR
921 && TREE_CODE (dest) == ADDR_EXPR)
922 {
923 tree src_base, dest_base, fn;
924 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
925 HOST_WIDE_INT size = -1;
926 HOST_WIDE_INT maxsize = -1;
927
928 srcvar = TREE_OPERAND (src, 0);
929 src_base = get_ref_base_and_extent (srcvar, &src_offset,
930 &size, &maxsize);
931 destvar = TREE_OPERAND (dest, 0);
932 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
933 &size, &maxsize);
934 if (tree_fits_uhwi_p (len))
935 maxsize = tree_to_uhwi (len);
936 else
937 maxsize = -1;
938 src_offset /= BITS_PER_UNIT;
939 dest_offset /= BITS_PER_UNIT;
940 if (SSA_VAR_P (src_base)
941 && SSA_VAR_P (dest_base))
942 {
943 if (operand_equal_p (src_base, dest_base, 0)
944 && ranges_overlap_p (src_offset, maxsize,
945 dest_offset, maxsize))
946 return false;
947 }
948 else if (TREE_CODE (src_base) == MEM_REF
949 && TREE_CODE (dest_base) == MEM_REF)
950 {
951 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
952 TREE_OPERAND (dest_base, 0), 0))
953 return false;
954 offset_int off = mem_ref_offset (src_base) + src_offset;
955 if (!wi::fits_shwi_p (off))
956 return false;
957 src_offset = off.to_shwi ();
958
959 off = mem_ref_offset (dest_base) + dest_offset;
960 if (!wi::fits_shwi_p (off))
961 return false;
962 dest_offset = off.to_shwi ();
963 if (ranges_overlap_p (src_offset, maxsize,
964 dest_offset, maxsize))
965 return false;
966 }
967 else
968 return false;
969
970 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
971 if (!fn)
972 return false;
973 gimple_call_set_fndecl (stmt, fn);
974 gimple_call_set_arg (stmt, 0, dest);
975 gimple_call_set_arg (stmt, 1, src);
976 fold_stmt (gsi);
977 return true;
978 }
979
980 /* If the destination and source do not alias optimize into
981 memcpy as well. */
982 if ((is_gimple_min_invariant (dest)
983 || TREE_CODE (dest) == SSA_NAME)
984 && (is_gimple_min_invariant (src)
985 || TREE_CODE (src) == SSA_NAME))
986 {
987 ao_ref destr, srcr;
988 ao_ref_init_from_ptr_and_size (&destr, dest, len);
989 ao_ref_init_from_ptr_and_size (&srcr, src, len);
990 if (!refs_may_alias_p_1 (&destr, &srcr, false))
991 {
992 tree fn;
993 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
994 if (!fn)
995 return false;
996 gimple_call_set_fndecl (stmt, fn);
997 gimple_call_set_arg (stmt, 0, dest);
998 gimple_call_set_arg (stmt, 1, src);
999 fold_stmt (gsi);
1000 return true;
1001 }
1002 }
1003
1004 return false;
1005 }
1006
1007 if (!tree_fits_shwi_p (len))
1008 return false;
1009 /* FIXME:
1010 This logic lose for arguments like (type *)malloc (sizeof (type)),
1011 since we strip the casts of up to VOID return value from malloc.
1012 Perhaps we ought to inherit type from non-VOID argument here? */
1013 STRIP_NOPS (src);
1014 STRIP_NOPS (dest);
1015 if (!POINTER_TYPE_P (TREE_TYPE (src))
1016 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1017 return false;
1018 /* In the following try to find a type that is most natural to be
1019 used for the memcpy source and destination and that allows
1020 the most optimization when memcpy is turned into a plain assignment
1021 using that type. In theory we could always use a char[len] type
1022 but that only gains us that the destination and source possibly
1023 no longer will have their address taken. */
1024 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1025 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1026 {
1027 tree tem = TREE_OPERAND (src, 0);
1028 STRIP_NOPS (tem);
1029 if (tem != TREE_OPERAND (src, 0))
1030 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1031 }
1032 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1033 {
1034 tree tem = TREE_OPERAND (dest, 0);
1035 STRIP_NOPS (tem);
1036 if (tem != TREE_OPERAND (dest, 0))
1037 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1038 }
1039 srctype = TREE_TYPE (TREE_TYPE (src));
1040 if (TREE_CODE (srctype) == ARRAY_TYPE
1041 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1042 {
1043 srctype = TREE_TYPE (srctype);
1044 STRIP_NOPS (src);
1045 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1046 }
1047 desttype = TREE_TYPE (TREE_TYPE (dest));
1048 if (TREE_CODE (desttype) == ARRAY_TYPE
1049 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1050 {
1051 desttype = TREE_TYPE (desttype);
1052 STRIP_NOPS (dest);
1053 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1054 }
1055 if (TREE_ADDRESSABLE (srctype)
1056 || TREE_ADDRESSABLE (desttype))
1057 return false;
1058
1059 /* Make sure we are not copying using a floating-point mode or
1060 a type whose size possibly does not match its precision. */
1061 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1062 || TREE_CODE (desttype) == BOOLEAN_TYPE
1063 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1064 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1065 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1066 || TREE_CODE (srctype) == BOOLEAN_TYPE
1067 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1068 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1069 if (!srctype)
1070 srctype = desttype;
1071 if (!desttype)
1072 desttype = srctype;
1073 if (!srctype)
1074 return false;
1075
1076 src_align = get_pointer_alignment (src);
1077 dest_align = get_pointer_alignment (dest);
1078 if (dest_align < TYPE_ALIGN (desttype)
1079 || src_align < TYPE_ALIGN (srctype))
1080 return false;
1081
1082 destvar = dest;
1083 STRIP_NOPS (destvar);
1084 if (TREE_CODE (destvar) == ADDR_EXPR
1085 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1086 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1087 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1088 else
1089 destvar = NULL_TREE;
1090
1091 srcvar = src;
1092 STRIP_NOPS (srcvar);
1093 if (TREE_CODE (srcvar) == ADDR_EXPR
1094 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1095 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1096 {
1097 if (!destvar
1098 || src_align >= TYPE_ALIGN (desttype))
1099 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1100 srcvar, off0);
1101 else if (!STRICT_ALIGNMENT)
1102 {
1103 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1104 src_align);
1105 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1106 }
1107 else
1108 srcvar = NULL_TREE;
1109 }
1110 else
1111 srcvar = NULL_TREE;
1112
1113 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1114 return false;
1115
1116 if (srcvar == NULL_TREE)
1117 {
1118 STRIP_NOPS (src);
1119 if (src_align >= TYPE_ALIGN (desttype))
1120 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1121 else
1122 {
1123 if (STRICT_ALIGNMENT)
1124 return false;
1125 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1126 src_align);
1127 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1128 }
1129 }
1130 else if (destvar == NULL_TREE)
1131 {
1132 STRIP_NOPS (dest);
1133 if (dest_align >= TYPE_ALIGN (srctype))
1134 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1135 else
1136 {
1137 if (STRICT_ALIGNMENT)
1138 return false;
1139 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1140 dest_align);
1141 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1142 }
1143 }
1144
1145 gimple new_stmt;
1146 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1147 {
1148 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1149 if (gimple_in_ssa_p (cfun))
1150 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1151 else
1152 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
1153 gimple_assign_set_lhs (new_stmt, srcvar);
1154 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1155 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1156 }
1157 new_stmt = gimple_build_assign (destvar, srcvar);
1158 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1159 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1160 if (gimple_vdef (new_stmt)
1161 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1163 if (!lhs)
1164 {
1165 gsi_replace (gsi, new_stmt, true);
1166 return true;
1167 }
1168 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1169 }
1170
1171 done:
1172 if (endp == 0 || endp == 3)
1173 len = NULL_TREE;
1174 else if (endp == 2)
1175 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1176 ssize_int (1));
1177 if (endp == 2 || endp == 1)
1178 dest = fold_build_pointer_plus_loc (loc, dest, len);
1179
1180 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1181 GSI_SAME_STMT);
1182 gimple repl = gimple_build_assign (lhs, dest);
1183 gsi_replace (gsi, repl, true);
1184 return true;
1185 }
1186
1187 /* Fold function call to builtin memset or bzero at *GSI setting the
1188 memory of size LEN to VAL. Return whether a simplification was made. */
1189
1190 static bool
1191 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1192 {
1193 gimple stmt = gsi_stmt (*gsi);
1194 tree etype;
1195 unsigned HOST_WIDE_INT length, cval;
1196
1197 /* If the LEN parameter is zero, return DEST. */
1198 if (integer_zerop (len))
1199 {
1200 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1201 return true;
1202 }
1203
1204 if (! tree_fits_uhwi_p (len))
1205 return false;
1206
1207 if (TREE_CODE (c) != INTEGER_CST)
1208 return false;
1209
1210 tree dest = gimple_call_arg (stmt, 0);
1211 tree var = dest;
1212 if (TREE_CODE (var) != ADDR_EXPR)
1213 return false;
1214
1215 var = TREE_OPERAND (var, 0);
1216 if (TREE_THIS_VOLATILE (var))
1217 return false;
1218
1219 etype = TREE_TYPE (var);
1220 if (TREE_CODE (etype) == ARRAY_TYPE)
1221 etype = TREE_TYPE (etype);
1222
1223 if (!INTEGRAL_TYPE_P (etype)
1224 && !POINTER_TYPE_P (etype))
1225 return NULL_TREE;
1226
1227 if (! var_decl_component_p (var))
1228 return NULL_TREE;
1229
1230 length = tree_to_uhwi (len);
1231 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1232 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1233 return NULL_TREE;
1234
1235 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1236 return NULL_TREE;
1237
1238 if (integer_zerop (c))
1239 cval = 0;
1240 else
1241 {
1242 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1243 return NULL_TREE;
1244
1245 cval = TREE_INT_CST_LOW (c);
1246 cval &= 0xff;
1247 cval |= cval << 8;
1248 cval |= cval << 16;
1249 cval |= (cval << 31) << 1;
1250 }
1251
1252 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1253 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1254 gimple_set_vuse (store, gimple_vuse (stmt));
1255 tree vdef = gimple_vdef (stmt);
1256 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1257 {
1258 gimple_set_vdef (store, gimple_vdef (stmt));
1259 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1260 }
1261 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1262 if (gimple_call_lhs (stmt))
1263 {
1264 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1265 gsi_replace (gsi, asgn, true);
1266 }
1267 else
1268 {
1269 gimple_stmt_iterator gsi2 = *gsi;
1270 gsi_prev (gsi);
1271 gsi_remove (&gsi2, true);
1272 }
1273
1274 return true;
1275 }
1276
1277
1278 /* Return the string length, maximum string length or maximum value of
1279 ARG in LENGTH.
1280 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1281 is not NULL and, for TYPE == 0, its value is not equal to the length
1282 we determine or if we are unable to determine the length or value,
1283 return false. VISITED is a bitmap of visited variables.
1284 TYPE is 0 if string length should be returned, 1 for maximum string
1285 length and 2 for maximum value ARG can have. */
1286
1287 static bool
1288 get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
1289 {
1290 tree var, val;
1291 gimple def_stmt;
1292
1293 if (TREE_CODE (arg) != SSA_NAME)
1294 {
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1298 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1299 {
1300 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1301 if (TREE_CODE (aop0) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1303 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1304 length, visited, type);
1305 }
1306
1307 if (type == 2)
1308 {
1309 val = arg;
1310 if (TREE_CODE (val) != INTEGER_CST
1311 || tree_int_cst_sgn (val) < 0)
1312 return false;
1313 }
1314 else
1315 val = c_strlen (arg, 1);
1316 if (!val)
1317 return false;
1318
1319 if (*length)
1320 {
1321 if (type > 0)
1322 {
1323 if (TREE_CODE (*length) != INTEGER_CST
1324 || TREE_CODE (val) != INTEGER_CST)
1325 return false;
1326
1327 if (tree_int_cst_lt (*length, val))
1328 *length = val;
1329 return true;
1330 }
1331 else if (simple_cst_equal (val, *length) != 1)
1332 return false;
1333 }
1334
1335 *length = val;
1336 return true;
1337 }
1338
1339 /* If ARG is registered for SSA update we cannot look at its defining
1340 statement. */
1341 if (name_registered_for_update_p (arg))
1342 return false;
1343
1344 /* If we were already here, break the infinite cycle. */
1345 if (!*visited)
1346 *visited = BITMAP_ALLOC (NULL);
1347 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1348 return true;
1349
1350 var = arg;
1351 def_stmt = SSA_NAME_DEF_STMT (var);
1352
1353 switch (gimple_code (def_stmt))
1354 {
1355 case GIMPLE_ASSIGN:
1356 /* The RHS of the statement defining VAR must either have a
1357 constant length or come from another SSA_NAME with a constant
1358 length. */
1359 if (gimple_assign_single_p (def_stmt)
1360 || gimple_assign_unary_nop_p (def_stmt))
1361 {
1362 tree rhs = gimple_assign_rhs1 (def_stmt);
1363 return get_maxval_strlen (rhs, length, visited, type);
1364 }
1365 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1366 {
1367 tree op2 = gimple_assign_rhs2 (def_stmt);
1368 tree op3 = gimple_assign_rhs3 (def_stmt);
1369 return get_maxval_strlen (op2, length, visited, type)
1370 && get_maxval_strlen (op3, length, visited, type);
1371 }
1372 return false;
1373
1374 case GIMPLE_PHI:
1375 {
1376 /* All the arguments of the PHI node must have the same constant
1377 length. */
1378 unsigned i;
1379
1380 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1381 {
1382 tree arg = gimple_phi_arg (def_stmt, i)->def;
1383
1384 /* If this PHI has itself as an argument, we cannot
1385 determine the string length of this argument. However,
1386 if we can find a constant string length for the other
1387 PHI args then we can still be sure that this is a
1388 constant string length. So be optimistic and just
1389 continue with the next argument. */
1390 if (arg == gimple_phi_result (def_stmt))
1391 continue;
1392
1393 if (!get_maxval_strlen (arg, length, visited, type))
1394 return false;
1395 }
1396 }
1397 return true;
1398
1399 default:
1400 return false;
1401 }
1402 }
1403
1404 tree
1405 get_maxval_strlen (tree arg, int type)
1406 {
1407 bitmap visited = NULL;
1408 tree len = NULL_TREE;
1409 if (!get_maxval_strlen (arg, &len, &visited, type))
1410 len = NULL_TREE;
1411 if (visited)
1412 BITMAP_FREE (visited);
1413
1414 return len;
1415 }
1416
1417
1418 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1419 If LEN is not NULL, it represents the length of the string to be
1420 copied. Return NULL_TREE if no simplification can be made. */
1421
1422 static bool
1423 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1424 tree dest, tree src)
1425 {
1426 location_t loc = gimple_location (gsi_stmt (*gsi));
1427 tree fn;
1428
1429 /* If SRC and DEST are the same (and not volatile), return DEST. */
1430 if (operand_equal_p (src, dest, 0))
1431 {
1432 replace_call_with_value (gsi, dest);
1433 return true;
1434 }
1435
1436 if (optimize_function_for_size_p (cfun))
1437 return false;
1438
1439 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1440 if (!fn)
1441 return false;
1442
1443 tree len = get_maxval_strlen (src, 0);
1444 if (!len)
1445 return false;
1446
1447 len = fold_convert_loc (loc, size_type_node, len);
1448 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1449 len = force_gimple_operand_gsi (gsi, len, true,
1450 NULL_TREE, true, GSI_SAME_STMT);
1451 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1452 replace_call_with_call_and_fold (gsi, repl);
1453 return true;
1454 }
1455
1456 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1457 If SLEN is not NULL, it represents the length of the source string.
1458 Return NULL_TREE if no simplification can be made. */
1459
1460 static bool
1461 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1462 tree dest, tree src, tree len)
1463 {
1464 location_t loc = gimple_location (gsi_stmt (*gsi));
1465 tree fn;
1466
1467 /* If the LEN parameter is zero, return DEST. */
1468 if (integer_zerop (len))
1469 {
1470 replace_call_with_value (gsi, dest);
1471 return true;
1472 }
1473
1474 /* We can't compare slen with len as constants below if len is not a
1475 constant. */
1476 if (TREE_CODE (len) != INTEGER_CST)
1477 return false;
1478
1479 /* Now, we must be passed a constant src ptr parameter. */
1480 tree slen = get_maxval_strlen (src, 0);
1481 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1482 return false;
1483
1484 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1485
1486 /* We do not support simplification of this case, though we do
1487 support it when expanding trees into RTL. */
1488 /* FIXME: generate a call to __builtin_memset. */
1489 if (tree_int_cst_lt (slen, len))
1490 return false;
1491
1492 /* OK transform into builtin memcpy. */
1493 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1494 if (!fn)
1495 return false;
1496
1497 len = fold_convert_loc (loc, size_type_node, len);
1498 len = force_gimple_operand_gsi (gsi, len, true,
1499 NULL_TREE, true, GSI_SAME_STMT);
1500 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1501 replace_call_with_call_and_fold (gsi, repl);
1502 return true;
1503 }
1504
1505 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1506 to the call.
1507
1508 Return NULL_TREE if no simplification was possible, otherwise return the
1509 simplified form of the call as a tree.
1510
1511 The simplified form may be a constant or other expression which
1512 computes the same value, but in a more efficient manner (including
1513 calls to other builtin functions).
1514
1515 The call may contain arguments which need to be evaluated, but
1516 which are not useful to determine the result of the call. In
1517 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1518 COMPOUND_EXPR will be an argument which must be evaluated.
1519 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1520 COMPOUND_EXPR in the chain will contain the tree for the simplified
1521 form of the builtin function call. */
1522
1523 static bool
1524 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1525 {
1526 gimple stmt = gsi_stmt (*gsi);
1527 location_t loc = gimple_location (stmt);
1528
1529 const char *p = c_getstr (src);
1530
1531 /* If the string length is zero, return the dst parameter. */
1532 if (p && *p == '\0')
1533 {
1534 replace_call_with_value (gsi, dst);
1535 return true;
1536 }
1537
1538 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1539 return false;
1540
1541 /* See if we can store by pieces into (dst + strlen(dst)). */
1542 tree newdst;
1543 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1544 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1545
1546 if (!strlen_fn || !memcpy_fn)
1547 return false;
1548
1549 /* If the length of the source string isn't computable don't
1550 split strcat into strlen and memcpy. */
1551 tree len = get_maxval_strlen (src, 0);
1552 if (! len)
1553 return false;
1554
1555 /* Create strlen (dst). */
1556 gimple_seq stmts = NULL, stmts2;
1557 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1558 gimple_set_location (repl, loc);
1559 if (gimple_in_ssa_p (cfun))
1560 newdst = make_ssa_name (size_type_node);
1561 else
1562 newdst = create_tmp_reg (size_type_node);
1563 gimple_call_set_lhs (repl, newdst);
1564 gimple_seq_add_stmt_without_update (&stmts, repl);
1565
1566 /* Create (dst p+ strlen (dst)). */
1567 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1568 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1569 gimple_seq_add_seq_without_update (&stmts, stmts2);
1570
1571 len = fold_convert_loc (loc, size_type_node, len);
1572 len = size_binop_loc (loc, PLUS_EXPR, len,
1573 build_int_cst (size_type_node, 1));
1574 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1575 gimple_seq_add_seq_without_update (&stmts, stmts2);
1576
1577 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1579 if (gimple_call_lhs (stmt))
1580 {
1581 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1582 gimple_seq_add_stmt_without_update (&stmts, repl);
1583 gsi_replace_with_seq_vops (gsi, stmts);
1584 /* gsi now points at the assignment to the lhs, get a
1585 stmt iterator to the memcpy call.
1586 ??? We can't use gsi_for_stmt as that doesn't work when the
1587 CFG isn't built yet. */
1588 gimple_stmt_iterator gsi2 = *gsi;
1589 gsi_prev (&gsi2);
1590 fold_stmt (&gsi2);
1591 }
1592 else
1593 {
1594 gsi_replace_with_seq_vops (gsi, stmts);
1595 fold_stmt (gsi);
1596 }
1597 return true;
1598 }
1599
1600 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1601 are the arguments to the call. */
1602
1603 static bool
1604 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1605 {
1606 gimple stmt = gsi_stmt (*gsi);
1607 tree dest = gimple_call_arg (stmt, 0);
1608 tree src = gimple_call_arg (stmt, 1);
1609 tree size = gimple_call_arg (stmt, 2);
1610 tree fn;
1611 const char *p;
1612
1613
1614 p = c_getstr (src);
1615 /* If the SRC parameter is "", return DEST. */
1616 if (p && *p == '\0')
1617 {
1618 replace_call_with_value (gsi, dest);
1619 return true;
1620 }
1621
1622 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1623 return false;
1624
1625 /* If __builtin_strcat_chk is used, assume strcat is available. */
1626 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1627 if (!fn)
1628 return false;
1629
1630 gimple repl = gimple_build_call (fn, 2, dest, src);
1631 replace_call_with_call_and_fold (gsi, repl);
1632 return true;
1633 }
1634
1635 /* Simplify a call to the strncat builtin. */
1636
1637 static bool
1638 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1639 {
1640 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1641 tree dst = gimple_call_arg (stmt, 0);
1642 tree src = gimple_call_arg (stmt, 1);
1643 tree len = gimple_call_arg (stmt, 2);
1644
1645 const char *p = c_getstr (src);
1646
1647 /* If the requested length is zero, or the src parameter string
1648 length is zero, return the dst parameter. */
1649 if (integer_zerop (len) || (p && *p == '\0'))
1650 {
1651 replace_call_with_value (gsi, dst);
1652 return true;
1653 }
1654
1655 /* If the requested len is greater than or equal to the string
1656 length, call strcat. */
1657 if (TREE_CODE (len) == INTEGER_CST && p
1658 && compare_tree_int (len, strlen (p)) >= 0)
1659 {
1660 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1661
1662 /* If the replacement _DECL isn't initialized, don't do the
1663 transformation. */
1664 if (!fn)
1665 return false;
1666
1667 gcall *repl = gimple_build_call (fn, 2, dst, src);
1668 replace_call_with_call_and_fold (gsi, repl);
1669 return true;
1670 }
1671
1672 return false;
1673 }
1674
1675 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1676 LEN, and SIZE. */
1677
1678 static bool
1679 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1680 {
1681 gimple stmt = gsi_stmt (*gsi);
1682 tree dest = gimple_call_arg (stmt, 0);
1683 tree src = gimple_call_arg (stmt, 1);
1684 tree len = gimple_call_arg (stmt, 2);
1685 tree size = gimple_call_arg (stmt, 3);
1686 tree fn;
1687 const char *p;
1688
1689 p = c_getstr (src);
1690 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1691 if ((p && *p == '\0')
1692 || integer_zerop (len))
1693 {
1694 replace_call_with_value (gsi, dest);
1695 return true;
1696 }
1697
1698 if (! tree_fits_uhwi_p (size))
1699 return false;
1700
1701 if (! integer_all_onesp (size))
1702 {
1703 tree src_len = c_strlen (src, 1);
1704 if (src_len
1705 && tree_fits_uhwi_p (src_len)
1706 && tree_fits_uhwi_p (len)
1707 && ! tree_int_cst_lt (len, src_len))
1708 {
1709 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1710 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1711 if (!fn)
1712 return false;
1713
1714 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1715 replace_call_with_call_and_fold (gsi, repl);
1716 return true;
1717 }
1718 return false;
1719 }
1720
1721 /* If __builtin_strncat_chk is used, assume strncat is available. */
1722 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1723 if (!fn)
1724 return false;
1725
1726 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1727 replace_call_with_call_and_fold (gsi, repl);
1728 return true;
1729 }
1730
1731 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1732 to the call. IGNORE is true if the value returned
1733 by the builtin will be ignored. UNLOCKED is true is true if this
1734 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1735 the known length of the string. Return NULL_TREE if no simplification
1736 was possible. */
1737
1738 static bool
1739 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1740 tree arg0, tree arg1,
1741 bool unlocked)
1742 {
1743 gimple stmt = gsi_stmt (*gsi);
1744
1745 /* If we're using an unlocked function, assume the other unlocked
1746 functions exist explicitly. */
1747 tree const fn_fputc = (unlocked
1748 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1749 : builtin_decl_implicit (BUILT_IN_FPUTC));
1750 tree const fn_fwrite = (unlocked
1751 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1752 : builtin_decl_implicit (BUILT_IN_FWRITE));
1753
1754 /* If the return value is used, don't do the transformation. */
1755 if (gimple_call_lhs (stmt))
1756 return false;
1757
1758 /* Get the length of the string passed to fputs. If the length
1759 can't be determined, punt. */
1760 tree len = get_maxval_strlen (arg0, 0);
1761 if (!len
1762 || TREE_CODE (len) != INTEGER_CST)
1763 return false;
1764
1765 switch (compare_tree_int (len, 1))
1766 {
1767 case -1: /* length is 0, delete the call entirely . */
1768 replace_call_with_value (gsi, integer_zero_node);
1769 return true;
1770
1771 case 0: /* length is 1, call fputc. */
1772 {
1773 const char *p = c_getstr (arg0);
1774 if (p != NULL)
1775 {
1776 if (!fn_fputc)
1777 return false;
1778
1779 gimple repl = gimple_build_call (fn_fputc, 2,
1780 build_int_cst
1781 (integer_type_node, p[0]), arg1);
1782 replace_call_with_call_and_fold (gsi, repl);
1783 return true;
1784 }
1785 }
1786 /* FALLTHROUGH */
1787 case 1: /* length is greater than 1, call fwrite. */
1788 {
1789 /* If optimizing for size keep fputs. */
1790 if (optimize_function_for_size_p (cfun))
1791 return false;
1792 /* New argument list transforming fputs(string, stream) to
1793 fwrite(string, 1, len, stream). */
1794 if (!fn_fwrite)
1795 return false;
1796
1797 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1798 size_one_node, len, arg1);
1799 replace_call_with_call_and_fold (gsi, repl);
1800 return true;
1801 }
1802 default:
1803 gcc_unreachable ();
1804 }
1805 return false;
1806 }
1807
1808 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1809 DEST, SRC, LEN, and SIZE are the arguments to the call.
1810 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1811 code of the builtin. If MAXLEN is not NULL, it is maximum length
1812 passed as third argument. */
1813
1814 static bool
1815 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1816 tree dest, tree src, tree len, tree size,
1817 enum built_in_function fcode)
1818 {
1819 gimple stmt = gsi_stmt (*gsi);
1820 location_t loc = gimple_location (stmt);
1821 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1822 tree fn;
1823
1824 /* If SRC and DEST are the same (and not volatile), return DEST
1825 (resp. DEST+LEN for __mempcpy_chk). */
1826 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1827 {
1828 if (fcode != BUILT_IN_MEMPCPY_CHK)
1829 {
1830 replace_call_with_value (gsi, dest);
1831 return true;
1832 }
1833 else
1834 {
1835 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1836 temp = force_gimple_operand_gsi (gsi, temp,
1837 false, NULL_TREE, true,
1838 GSI_SAME_STMT);
1839 replace_call_with_value (gsi, temp);
1840 return true;
1841 }
1842 }
1843
1844 if (! tree_fits_uhwi_p (size))
1845 return false;
1846
1847 tree maxlen = get_maxval_strlen (len, 2);
1848 if (! integer_all_onesp (size))
1849 {
1850 if (! tree_fits_uhwi_p (len))
1851 {
1852 /* If LEN is not constant, try MAXLEN too.
1853 For MAXLEN only allow optimizing into non-_ocs function
1854 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1855 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1856 {
1857 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1858 {
1859 /* (void) __mempcpy_chk () can be optimized into
1860 (void) __memcpy_chk (). */
1861 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1862 if (!fn)
1863 return false;
1864
1865 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1866 replace_call_with_call_and_fold (gsi, repl);
1867 return true;
1868 }
1869 return false;
1870 }
1871 }
1872 else
1873 maxlen = len;
1874
1875 if (tree_int_cst_lt (size, maxlen))
1876 return false;
1877 }
1878
1879 fn = NULL_TREE;
1880 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1881 mem{cpy,pcpy,move,set} is available. */
1882 switch (fcode)
1883 {
1884 case BUILT_IN_MEMCPY_CHK:
1885 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1886 break;
1887 case BUILT_IN_MEMPCPY_CHK:
1888 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1889 break;
1890 case BUILT_IN_MEMMOVE_CHK:
1891 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1892 break;
1893 case BUILT_IN_MEMSET_CHK:
1894 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1895 break;
1896 default:
1897 break;
1898 }
1899
1900 if (!fn)
1901 return false;
1902
1903 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1904 replace_call_with_call_and_fold (gsi, repl);
1905 return true;
1906 }
1907
1908 /* Fold a call to the __st[rp]cpy_chk builtin.
1909 DEST, SRC, and SIZE are the arguments to the call.
1910 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1911 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1912 strings passed as second argument. */
1913
1914 static bool
1915 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1916 tree dest,
1917 tree src, tree size,
1918 enum built_in_function fcode)
1919 {
1920 gimple stmt = gsi_stmt (*gsi);
1921 location_t loc = gimple_location (stmt);
1922 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
1923 tree len, fn;
1924
1925 /* If SRC and DEST are the same (and not volatile), return DEST. */
1926 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1927 {
1928 replace_call_with_value (gsi, dest);
1929 return true;
1930 }
1931
1932 if (! tree_fits_uhwi_p (size))
1933 return false;
1934
1935 tree maxlen = get_maxval_strlen (src, 1);
1936 if (! integer_all_onesp (size))
1937 {
1938 len = c_strlen (src, 1);
1939 if (! len || ! tree_fits_uhwi_p (len))
1940 {
1941 /* If LEN is not constant, try MAXLEN too.
1942 For MAXLEN only allow optimizing into non-_ocs function
1943 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1944 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1945 {
1946 if (fcode == BUILT_IN_STPCPY_CHK)
1947 {
1948 if (! ignore)
1949 return false;
1950
1951 /* If return value of __stpcpy_chk is ignored,
1952 optimize into __strcpy_chk. */
1953 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1954 if (!fn)
1955 return false;
1956
1957 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1958 replace_call_with_call_and_fold (gsi, repl);
1959 return true;
1960 }
1961
1962 if (! len || TREE_SIDE_EFFECTS (len))
1963 return false;
1964
1965 /* If c_strlen returned something, but not a constant,
1966 transform __strcpy_chk into __memcpy_chk. */
1967 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1968 if (!fn)
1969 return false;
1970
1971 len = fold_convert_loc (loc, size_type_node, len);
1972 len = size_binop_loc (loc, PLUS_EXPR, len,
1973 build_int_cst (size_type_node, 1));
1974 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1975 true, GSI_SAME_STMT);
1976 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1977 replace_call_with_call_and_fold (gsi, repl);
1978 return true;
1979 }
1980 }
1981 else
1982 maxlen = len;
1983
1984 if (! tree_int_cst_lt (maxlen, size))
1985 return false;
1986 }
1987
1988 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1989 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1990 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1991 if (!fn)
1992 return false;
1993
1994 gimple repl = gimple_build_call (fn, 2, dest, src);
1995 replace_call_with_call_and_fold (gsi, repl);
1996 return true;
1997 }
1998
1999 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2000 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2001 length passed as third argument. IGNORE is true if return value can be
2002 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2003
2004 static bool
2005 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2006 tree dest, tree src,
2007 tree len, tree size,
2008 enum built_in_function fcode)
2009 {
2010 gimple stmt = gsi_stmt (*gsi);
2011 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2012 tree fn;
2013
2014 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2015 {
2016 /* If return value of __stpncpy_chk is ignored,
2017 optimize into __strncpy_chk. */
2018 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2019 if (fn)
2020 {
2021 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2022 replace_call_with_call_and_fold (gsi, repl);
2023 return true;
2024 }
2025 }
2026
2027 if (! tree_fits_uhwi_p (size))
2028 return false;
2029
2030 tree maxlen = get_maxval_strlen (len, 2);
2031 if (! integer_all_onesp (size))
2032 {
2033 if (! tree_fits_uhwi_p (len))
2034 {
2035 /* If LEN is not constant, try MAXLEN too.
2036 For MAXLEN only allow optimizing into non-_ocs function
2037 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2038 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2039 return false;
2040 }
2041 else
2042 maxlen = len;
2043
2044 if (tree_int_cst_lt (size, maxlen))
2045 return false;
2046 }
2047
2048 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2049 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2050 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2051 if (!fn)
2052 return false;
2053
2054 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2055 replace_call_with_call_and_fold (gsi, repl);
2056 return true;
2057 }
2058
2059 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2060 Return NULL_TREE if no simplification can be made. */
2061
2062 static bool
2063 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2064 {
2065 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2066 location_t loc = gimple_location (stmt);
2067 tree dest = gimple_call_arg (stmt, 0);
2068 tree src = gimple_call_arg (stmt, 1);
2069 tree fn, len, lenp1;
2070
2071 /* If the result is unused, replace stpcpy with strcpy. */
2072 if (gimple_call_lhs (stmt) == NULL_TREE)
2073 {
2074 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2075 if (!fn)
2076 return false;
2077 gimple_call_set_fndecl (stmt, fn);
2078 fold_stmt (gsi);
2079 return true;
2080 }
2081
2082 len = c_strlen (src, 1);
2083 if (!len
2084 || TREE_CODE (len) != INTEGER_CST)
2085 return false;
2086
2087 if (optimize_function_for_size_p (cfun)
2088 /* If length is zero it's small enough. */
2089 && !integer_zerop (len))
2090 return false;
2091
2092 /* If the source has a known length replace stpcpy with memcpy. */
2093 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2094 if (!fn)
2095 return false;
2096
2097 gimple_seq stmts = NULL;
2098 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2099 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2100 tem, build_int_cst (size_type_node, 1));
2101 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2102 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2103 gimple_set_vuse (repl, gimple_vuse (stmt));
2104 gimple_set_vdef (repl, gimple_vdef (stmt));
2105 if (gimple_vdef (repl)
2106 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2107 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2108 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2109 /* Replace the result with dest + len. */
2110 stmts = NULL;
2111 tem = gimple_convert (&stmts, loc, sizetype, len);
2112 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2113 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2114 POINTER_PLUS_EXPR, dest, tem);
2115 gsi_replace (gsi, ret, true);
2116 /* Finally fold the memcpy call. */
2117 gimple_stmt_iterator gsi2 = *gsi;
2118 gsi_prev (&gsi2);
2119 fold_stmt (&gsi2);
2120 return true;
2121 }
2122
2123 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2124 NULL_TREE if a normal call should be emitted rather than expanding
2125 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2126 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2127 passed as second argument. */
2128
2129 static bool
2130 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2131 enum built_in_function fcode)
2132 {
2133 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2134 tree dest, size, len, fn, fmt, flag;
2135 const char *fmt_str;
2136
2137 /* Verify the required arguments in the original call. */
2138 if (gimple_call_num_args (stmt) < 5)
2139 return false;
2140
2141 dest = gimple_call_arg (stmt, 0);
2142 len = gimple_call_arg (stmt, 1);
2143 flag = gimple_call_arg (stmt, 2);
2144 size = gimple_call_arg (stmt, 3);
2145 fmt = gimple_call_arg (stmt, 4);
2146
2147 if (! tree_fits_uhwi_p (size))
2148 return false;
2149
2150 if (! integer_all_onesp (size))
2151 {
2152 tree maxlen = get_maxval_strlen (len, 2);
2153 if (! tree_fits_uhwi_p (len))
2154 {
2155 /* If LEN is not constant, try MAXLEN too.
2156 For MAXLEN only allow optimizing into non-_ocs function
2157 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2158 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2159 return false;
2160 }
2161 else
2162 maxlen = len;
2163
2164 if (tree_int_cst_lt (size, maxlen))
2165 return false;
2166 }
2167
2168 if (!init_target_chars ())
2169 return false;
2170
2171 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2172 or if format doesn't contain % chars or is "%s". */
2173 if (! integer_zerop (flag))
2174 {
2175 fmt_str = c_getstr (fmt);
2176 if (fmt_str == NULL)
2177 return false;
2178 if (strchr (fmt_str, target_percent) != NULL
2179 && strcmp (fmt_str, target_percent_s))
2180 return false;
2181 }
2182
2183 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2184 available. */
2185 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2186 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2187 if (!fn)
2188 return false;
2189
2190 /* Replace the called function and the first 5 argument by 3 retaining
2191 trailing varargs. */
2192 gimple_call_set_fndecl (stmt, fn);
2193 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2194 gimple_call_set_arg (stmt, 0, dest);
2195 gimple_call_set_arg (stmt, 1, len);
2196 gimple_call_set_arg (stmt, 2, fmt);
2197 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2198 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2199 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2200 fold_stmt (gsi);
2201 return true;
2202 }
2203
2204 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2205 Return NULL_TREE if a normal call should be emitted rather than
2206 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2207 or BUILT_IN_VSPRINTF_CHK. */
2208
2209 static bool
2210 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2211 enum built_in_function fcode)
2212 {
2213 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2214 tree dest, size, len, fn, fmt, flag;
2215 const char *fmt_str;
2216 unsigned nargs = gimple_call_num_args (stmt);
2217
2218 /* Verify the required arguments in the original call. */
2219 if (nargs < 4)
2220 return false;
2221 dest = gimple_call_arg (stmt, 0);
2222 flag = gimple_call_arg (stmt, 1);
2223 size = gimple_call_arg (stmt, 2);
2224 fmt = gimple_call_arg (stmt, 3);
2225
2226 if (! tree_fits_uhwi_p (size))
2227 return false;
2228
2229 len = NULL_TREE;
2230
2231 if (!init_target_chars ())
2232 return false;
2233
2234 /* Check whether the format is a literal string constant. */
2235 fmt_str = c_getstr (fmt);
2236 if (fmt_str != NULL)
2237 {
2238 /* If the format doesn't contain % args or %%, we know the size. */
2239 if (strchr (fmt_str, target_percent) == 0)
2240 {
2241 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2242 len = build_int_cstu (size_type_node, strlen (fmt_str));
2243 }
2244 /* If the format is "%s" and first ... argument is a string literal,
2245 we know the size too. */
2246 else if (fcode == BUILT_IN_SPRINTF_CHK
2247 && strcmp (fmt_str, target_percent_s) == 0)
2248 {
2249 tree arg;
2250
2251 if (nargs == 5)
2252 {
2253 arg = gimple_call_arg (stmt, 4);
2254 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2255 {
2256 len = c_strlen (arg, 1);
2257 if (! len || ! tree_fits_uhwi_p (len))
2258 len = NULL_TREE;
2259 }
2260 }
2261 }
2262 }
2263
2264 if (! integer_all_onesp (size))
2265 {
2266 if (! len || ! tree_int_cst_lt (len, size))
2267 return false;
2268 }
2269
2270 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2271 or if format doesn't contain % chars or is "%s". */
2272 if (! integer_zerop (flag))
2273 {
2274 if (fmt_str == NULL)
2275 return false;
2276 if (strchr (fmt_str, target_percent) != NULL
2277 && strcmp (fmt_str, target_percent_s))
2278 return false;
2279 }
2280
2281 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2282 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2283 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2284 if (!fn)
2285 return false;
2286
2287 /* Replace the called function and the first 4 argument by 2 retaining
2288 trailing varargs. */
2289 gimple_call_set_fndecl (stmt, fn);
2290 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2291 gimple_call_set_arg (stmt, 0, dest);
2292 gimple_call_set_arg (stmt, 1, fmt);
2293 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2294 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2295 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2296 fold_stmt (gsi);
2297 return true;
2298 }
2299
2300 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2301 ORIG may be null if this is a 2-argument call. We don't attempt to
2302 simplify calls with more than 3 arguments.
2303
2304 Return NULL_TREE if no simplification was possible, otherwise return the
2305 simplified form of the call as a tree. If IGNORED is true, it means that
2306 the caller does not use the returned value of the function. */
2307
2308 static bool
2309 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2310 {
2311 gimple stmt = gsi_stmt (*gsi);
2312 tree dest = gimple_call_arg (stmt, 0);
2313 tree fmt = gimple_call_arg (stmt, 1);
2314 tree orig = NULL_TREE;
2315 const char *fmt_str = NULL;
2316
2317 /* Verify the required arguments in the original call. We deal with two
2318 types of sprintf() calls: 'sprintf (str, fmt)' and
2319 'sprintf (dest, "%s", orig)'. */
2320 if (gimple_call_num_args (stmt) > 3)
2321 return false;
2322
2323 if (gimple_call_num_args (stmt) == 3)
2324 orig = gimple_call_arg (stmt, 2);
2325
2326 /* Check whether the format is a literal string constant. */
2327 fmt_str = c_getstr (fmt);
2328 if (fmt_str == NULL)
2329 return false;
2330
2331 if (!init_target_chars ())
2332 return false;
2333
2334 /* If the format doesn't contain % args or %%, use strcpy. */
2335 if (strchr (fmt_str, target_percent) == NULL)
2336 {
2337 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2338
2339 if (!fn)
2340 return false;
2341
2342 /* Don't optimize sprintf (buf, "abc", ptr++). */
2343 if (orig)
2344 return false;
2345
2346 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2347 'format' is known to contain no % formats. */
2348 gimple_seq stmts = NULL;
2349 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2350 gimple_seq_add_stmt_without_update (&stmts, repl);
2351 if (gimple_call_lhs (stmt))
2352 {
2353 repl = gimple_build_assign (gimple_call_lhs (stmt),
2354 build_int_cst (integer_type_node,
2355 strlen (fmt_str)));
2356 gimple_seq_add_stmt_without_update (&stmts, repl);
2357 gsi_replace_with_seq_vops (gsi, stmts);
2358 /* gsi now points at the assignment to the lhs, get a
2359 stmt iterator to the memcpy call.
2360 ??? We can't use gsi_for_stmt as that doesn't work when the
2361 CFG isn't built yet. */
2362 gimple_stmt_iterator gsi2 = *gsi;
2363 gsi_prev (&gsi2);
2364 fold_stmt (&gsi2);
2365 }
2366 else
2367 {
2368 gsi_replace_with_seq_vops (gsi, stmts);
2369 fold_stmt (gsi);
2370 }
2371 return true;
2372 }
2373
2374 /* If the format is "%s", use strcpy if the result isn't used. */
2375 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2376 {
2377 tree fn;
2378 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2379
2380 if (!fn)
2381 return false;
2382
2383 /* Don't crash on sprintf (str1, "%s"). */
2384 if (!orig)
2385 return false;
2386
2387 tree orig_len = NULL_TREE;
2388 if (gimple_call_lhs (stmt))
2389 {
2390 orig_len = get_maxval_strlen (orig, 0);
2391 if (!orig_len)
2392 return false;
2393 }
2394
2395 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2396 gimple_seq stmts = NULL;
2397 gimple repl = gimple_build_call (fn, 2, dest, orig);
2398 gimple_seq_add_stmt_without_update (&stmts, repl);
2399 if (gimple_call_lhs (stmt))
2400 {
2401 if (!useless_type_conversion_p (integer_type_node,
2402 TREE_TYPE (orig_len)))
2403 orig_len = fold_convert (integer_type_node, orig_len);
2404 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2405 gimple_seq_add_stmt_without_update (&stmts, repl);
2406 gsi_replace_with_seq_vops (gsi, stmts);
2407 /* gsi now points at the assignment to the lhs, get a
2408 stmt iterator to the memcpy call.
2409 ??? We can't use gsi_for_stmt as that doesn't work when the
2410 CFG isn't built yet. */
2411 gimple_stmt_iterator gsi2 = *gsi;
2412 gsi_prev (&gsi2);
2413 fold_stmt (&gsi2);
2414 }
2415 else
2416 {
2417 gsi_replace_with_seq_vops (gsi, stmts);
2418 fold_stmt (gsi);
2419 }
2420 return true;
2421 }
2422 return false;
2423 }
2424
2425 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2426 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2427 attempt to simplify calls with more than 4 arguments.
2428
2429 Return NULL_TREE if no simplification was possible, otherwise return the
2430 simplified form of the call as a tree. If IGNORED is true, it means that
2431 the caller does not use the returned value of the function. */
2432
2433 static bool
2434 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
2435 {
2436 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2437 tree dest = gimple_call_arg (stmt, 0);
2438 tree destsize = gimple_call_arg (stmt, 1);
2439 tree fmt = gimple_call_arg (stmt, 2);
2440 tree orig = NULL_TREE;
2441 const char *fmt_str = NULL;
2442
2443 if (gimple_call_num_args (stmt) > 4)
2444 return false;
2445
2446 if (gimple_call_num_args (stmt) == 4)
2447 orig = gimple_call_arg (stmt, 3);
2448
2449 if (!tree_fits_uhwi_p (destsize))
2450 return false;
2451 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2452
2453 /* Check whether the format is a literal string constant. */
2454 fmt_str = c_getstr (fmt);
2455 if (fmt_str == NULL)
2456 return false;
2457
2458 if (!init_target_chars ())
2459 return false;
2460
2461 /* If the format doesn't contain % args or %%, use strcpy. */
2462 if (strchr (fmt_str, target_percent) == NULL)
2463 {
2464 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2465 if (!fn)
2466 return false;
2467
2468 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2469 if (orig)
2470 return false;
2471
2472 /* We could expand this as
2473 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2474 or to
2475 memcpy (str, fmt_with_nul_at_cstm1, cst);
2476 but in the former case that might increase code size
2477 and in the latter case grow .rodata section too much.
2478 So punt for now. */
2479 size_t len = strlen (fmt_str);
2480 if (len >= destlen)
2481 return false;
2482
2483 gimple_seq stmts = NULL;
2484 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2485 gimple_seq_add_stmt_without_update (&stmts, repl);
2486 if (gimple_call_lhs (stmt))
2487 {
2488 repl = gimple_build_assign (gimple_call_lhs (stmt),
2489 build_int_cst (integer_type_node, len));
2490 gimple_seq_add_stmt_without_update (&stmts, repl);
2491 gsi_replace_with_seq_vops (gsi, stmts);
2492 /* gsi now points at the assignment to the lhs, get a
2493 stmt iterator to the memcpy call.
2494 ??? We can't use gsi_for_stmt as that doesn't work when the
2495 CFG isn't built yet. */
2496 gimple_stmt_iterator gsi2 = *gsi;
2497 gsi_prev (&gsi2);
2498 fold_stmt (&gsi2);
2499 }
2500 else
2501 {
2502 gsi_replace_with_seq_vops (gsi, stmts);
2503 fold_stmt (gsi);
2504 }
2505 return true;
2506 }
2507
2508 /* If the format is "%s", use strcpy if the result isn't used. */
2509 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2510 {
2511 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2512 if (!fn)
2513 return false;
2514
2515 /* Don't crash on snprintf (str1, cst, "%s"). */
2516 if (!orig)
2517 return false;
2518
2519 tree orig_len = get_maxval_strlen (orig, 0);
2520 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
2521 return false;
2522
2523 /* We could expand this as
2524 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2525 or to
2526 memcpy (str1, str2_with_nul_at_cstm1, cst);
2527 but in the former case that might increase code size
2528 and in the latter case grow .rodata section too much.
2529 So punt for now. */
2530 if (compare_tree_int (orig_len, destlen) >= 0)
2531 return false;
2532
2533 /* Convert snprintf (str1, cst, "%s", str2) into
2534 strcpy (str1, str2) if strlen (str2) < cst. */
2535 gimple_seq stmts = NULL;
2536 gimple repl = gimple_build_call (fn, 2, dest, orig);
2537 gimple_seq_add_stmt_without_update (&stmts, repl);
2538 if (gimple_call_lhs (stmt))
2539 {
2540 if (!useless_type_conversion_p (integer_type_node,
2541 TREE_TYPE (orig_len)))
2542 orig_len = fold_convert (integer_type_node, orig_len);
2543 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2544 gimple_seq_add_stmt_without_update (&stmts, repl);
2545 gsi_replace_with_seq_vops (gsi, stmts);
2546 /* gsi now points at the assignment to the lhs, get a
2547 stmt iterator to the memcpy call.
2548 ??? We can't use gsi_for_stmt as that doesn't work when the
2549 CFG isn't built yet. */
2550 gimple_stmt_iterator gsi2 = *gsi;
2551 gsi_prev (&gsi2);
2552 fold_stmt (&gsi2);
2553 }
2554 else
2555 {
2556 gsi_replace_with_seq_vops (gsi, stmts);
2557 fold_stmt (gsi);
2558 }
2559 return true;
2560 }
2561 return false;
2562 }
2563
2564 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2565 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2566 more than 3 arguments, and ARG may be null in the 2-argument case.
2567
2568 Return NULL_TREE if no simplification was possible, otherwise return the
2569 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2570 code of the function to be simplified. */
2571
2572 static bool
2573 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2574 tree fp, tree fmt, tree arg,
2575 enum built_in_function fcode)
2576 {
2577 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2578 tree fn_fputc, fn_fputs;
2579 const char *fmt_str = NULL;
2580
2581 /* If the return value is used, don't do the transformation. */
2582 if (gimple_call_lhs (stmt) != NULL_TREE)
2583 return false;
2584
2585 /* Check whether the format is a literal string constant. */
2586 fmt_str = c_getstr (fmt);
2587 if (fmt_str == NULL)
2588 return false;
2589
2590 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2591 {
2592 /* If we're using an unlocked function, assume the other
2593 unlocked functions exist explicitly. */
2594 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2595 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2596 }
2597 else
2598 {
2599 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2600 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2601 }
2602
2603 if (!init_target_chars ())
2604 return false;
2605
2606 /* If the format doesn't contain % args or %%, use strcpy. */
2607 if (strchr (fmt_str, target_percent) == NULL)
2608 {
2609 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2610 && arg)
2611 return false;
2612
2613 /* If the format specifier was "", fprintf does nothing. */
2614 if (fmt_str[0] == '\0')
2615 {
2616 replace_call_with_value (gsi, NULL_TREE);
2617 return true;
2618 }
2619
2620 /* When "string" doesn't contain %, replace all cases of
2621 fprintf (fp, string) with fputs (string, fp). The fputs
2622 builtin will take care of special cases like length == 1. */
2623 if (fn_fputs)
2624 {
2625 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2626 replace_call_with_call_and_fold (gsi, repl);
2627 return true;
2628 }
2629 }
2630
2631 /* The other optimizations can be done only on the non-va_list variants. */
2632 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2633 return false;
2634
2635 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2636 else if (strcmp (fmt_str, target_percent_s) == 0)
2637 {
2638 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2639 return false;
2640 if (fn_fputs)
2641 {
2642 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2643 replace_call_with_call_and_fold (gsi, repl);
2644 return true;
2645 }
2646 }
2647
2648 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2649 else if (strcmp (fmt_str, target_percent_c) == 0)
2650 {
2651 if (!arg
2652 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2653 return false;
2654 if (fn_fputc)
2655 {
2656 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2657 replace_call_with_call_and_fold (gsi, repl);
2658 return true;
2659 }
2660 }
2661
2662 return false;
2663 }
2664
2665 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2666 FMT and ARG are the arguments to the call; we don't fold cases with
2667 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2668
2669 Return NULL_TREE if no simplification was possible, otherwise return the
2670 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2671 code of the function to be simplified. */
2672
2673 static bool
2674 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2675 tree arg, enum built_in_function fcode)
2676 {
2677 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2678 tree fn_putchar, fn_puts, newarg;
2679 const char *fmt_str = NULL;
2680
2681 /* If the return value is used, don't do the transformation. */
2682 if (gimple_call_lhs (stmt) != NULL_TREE)
2683 return false;
2684
2685 /* Check whether the format is a literal string constant. */
2686 fmt_str = c_getstr (fmt);
2687 if (fmt_str == NULL)
2688 return false;
2689
2690 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2691 {
2692 /* If we're using an unlocked function, assume the other
2693 unlocked functions exist explicitly. */
2694 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2695 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2696 }
2697 else
2698 {
2699 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2700 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2701 }
2702
2703 if (!init_target_chars ())
2704 return false;
2705
2706 if (strcmp (fmt_str, target_percent_s) == 0
2707 || strchr (fmt_str, target_percent) == NULL)
2708 {
2709 const char *str;
2710
2711 if (strcmp (fmt_str, target_percent_s) == 0)
2712 {
2713 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2714 return false;
2715
2716 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2717 return false;
2718
2719 str = c_getstr (arg);
2720 if (str == NULL)
2721 return false;
2722 }
2723 else
2724 {
2725 /* The format specifier doesn't contain any '%' characters. */
2726 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2727 && arg)
2728 return false;
2729 str = fmt_str;
2730 }
2731
2732 /* If the string was "", printf does nothing. */
2733 if (str[0] == '\0')
2734 {
2735 replace_call_with_value (gsi, NULL_TREE);
2736 return true;
2737 }
2738
2739 /* If the string has length of 1, call putchar. */
2740 if (str[1] == '\0')
2741 {
2742 /* Given printf("c"), (where c is any one character,)
2743 convert "c"[0] to an int and pass that to the replacement
2744 function. */
2745 newarg = build_int_cst (integer_type_node, str[0]);
2746 if (fn_putchar)
2747 {
2748 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2749 replace_call_with_call_and_fold (gsi, repl);
2750 return true;
2751 }
2752 }
2753 else
2754 {
2755 /* If the string was "string\n", call puts("string"). */
2756 size_t len = strlen (str);
2757 if ((unsigned char)str[len - 1] == target_newline
2758 && (size_t) (int) len == len
2759 && (int) len > 0)
2760 {
2761 char *newstr;
2762 tree offset_node, string_cst;
2763
2764 /* Create a NUL-terminated string that's one char shorter
2765 than the original, stripping off the trailing '\n'. */
2766 newarg = build_string_literal (len, str);
2767 string_cst = string_constant (newarg, &offset_node);
2768 gcc_checking_assert (string_cst
2769 && (TREE_STRING_LENGTH (string_cst)
2770 == (int) len)
2771 && integer_zerop (offset_node)
2772 && (unsigned char)
2773 TREE_STRING_POINTER (string_cst)[len - 1]
2774 == target_newline);
2775 /* build_string_literal creates a new STRING_CST,
2776 modify it in place to avoid double copying. */
2777 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2778 newstr[len - 1] = '\0';
2779 if (fn_puts)
2780 {
2781 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2782 replace_call_with_call_and_fold (gsi, repl);
2783 return true;
2784 }
2785 }
2786 else
2787 /* We'd like to arrange to call fputs(string,stdout) here,
2788 but we need stdout and don't have a way to get it yet. */
2789 return false;
2790 }
2791 }
2792
2793 /* The other optimizations can be done only on the non-va_list variants. */
2794 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2795 return false;
2796
2797 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2798 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2799 {
2800 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2801 return false;
2802 if (fn_puts)
2803 {
2804 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2805 replace_call_with_call_and_fold (gsi, repl);
2806 return true;
2807 }
2808 }
2809
2810 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2811 else if (strcmp (fmt_str, target_percent_c) == 0)
2812 {
2813 if (!arg || ! useless_type_conversion_p (integer_type_node,
2814 TREE_TYPE (arg)))
2815 return false;
2816 if (fn_putchar)
2817 {
2818 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2819 replace_call_with_call_and_fold (gsi, repl);
2820 return true;
2821 }
2822 }
2823
2824 return false;
2825 }
2826
2827
2828
2829 /* Fold a call to __builtin_strlen with known length LEN. */
2830
2831 static bool
2832 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
2833 {
2834 gimple stmt = gsi_stmt (*gsi);
2835 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
2836 if (!len)
2837 return false;
2838 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
2839 replace_call_with_value (gsi, len);
2840 return true;
2841 }
2842
2843
2844 /* Fold the non-target builtin at *GSI and return whether any simplification
2845 was made. */
2846
2847 static bool
2848 gimple_fold_builtin (gimple_stmt_iterator *gsi)
2849 {
2850 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
2851 tree callee = gimple_call_fndecl (stmt);
2852
2853 /* Give up for always_inline inline builtins until they are
2854 inlined. */
2855 if (avoid_folding_inline_builtin (callee))
2856 return false;
2857
2858 unsigned n = gimple_call_num_args (stmt);
2859 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2860 switch (fcode)
2861 {
2862 case BUILT_IN_BZERO:
2863 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2864 gimple_call_arg (stmt, 1));
2865 case BUILT_IN_MEMSET:
2866 return gimple_fold_builtin_memset (gsi,
2867 gimple_call_arg (stmt, 1),
2868 gimple_call_arg (stmt, 2));
2869 case BUILT_IN_BCOPY:
2870 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2871 gimple_call_arg (stmt, 0), 3);
2872 case BUILT_IN_MEMCPY:
2873 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2874 gimple_call_arg (stmt, 1), 0);
2875 case BUILT_IN_MEMPCPY:
2876 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2877 gimple_call_arg (stmt, 1), 1);
2878 case BUILT_IN_MEMMOVE:
2879 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2880 gimple_call_arg (stmt, 1), 3);
2881 case BUILT_IN_SPRINTF_CHK:
2882 case BUILT_IN_VSPRINTF_CHK:
2883 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
2884 case BUILT_IN_STRCAT_CHK:
2885 return gimple_fold_builtin_strcat_chk (gsi);
2886 case BUILT_IN_STRNCAT_CHK:
2887 return gimple_fold_builtin_strncat_chk (gsi);
2888 case BUILT_IN_STRLEN:
2889 return gimple_fold_builtin_strlen (gsi);
2890 case BUILT_IN_STRCPY:
2891 return gimple_fold_builtin_strcpy (gsi,
2892 gimple_call_arg (stmt, 0),
2893 gimple_call_arg (stmt, 1));
2894 case BUILT_IN_STRNCPY:
2895 return gimple_fold_builtin_strncpy (gsi,
2896 gimple_call_arg (stmt, 0),
2897 gimple_call_arg (stmt, 1),
2898 gimple_call_arg (stmt, 2));
2899 case BUILT_IN_STRCAT:
2900 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2901 gimple_call_arg (stmt, 1));
2902 case BUILT_IN_STRNCAT:
2903 return gimple_fold_builtin_strncat (gsi);
2904 case BUILT_IN_FPUTS:
2905 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2906 gimple_call_arg (stmt, 1), false);
2907 case BUILT_IN_FPUTS_UNLOCKED:
2908 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2909 gimple_call_arg (stmt, 1), true);
2910 case BUILT_IN_MEMCPY_CHK:
2911 case BUILT_IN_MEMPCPY_CHK:
2912 case BUILT_IN_MEMMOVE_CHK:
2913 case BUILT_IN_MEMSET_CHK:
2914 return gimple_fold_builtin_memory_chk (gsi,
2915 gimple_call_arg (stmt, 0),
2916 gimple_call_arg (stmt, 1),
2917 gimple_call_arg (stmt, 2),
2918 gimple_call_arg (stmt, 3),
2919 fcode);
2920 case BUILT_IN_STPCPY:
2921 return gimple_fold_builtin_stpcpy (gsi);
2922 case BUILT_IN_STRCPY_CHK:
2923 case BUILT_IN_STPCPY_CHK:
2924 return gimple_fold_builtin_stxcpy_chk (gsi,
2925 gimple_call_arg (stmt, 0),
2926 gimple_call_arg (stmt, 1),
2927 gimple_call_arg (stmt, 2),
2928 fcode);
2929 case BUILT_IN_STRNCPY_CHK:
2930 case BUILT_IN_STPNCPY_CHK:
2931 return gimple_fold_builtin_stxncpy_chk (gsi,
2932 gimple_call_arg (stmt, 0),
2933 gimple_call_arg (stmt, 1),
2934 gimple_call_arg (stmt, 2),
2935 gimple_call_arg (stmt, 3),
2936 fcode);
2937 case BUILT_IN_SNPRINTF_CHK:
2938 case BUILT_IN_VSNPRINTF_CHK:
2939 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
2940 case BUILT_IN_SNPRINTF:
2941 return gimple_fold_builtin_snprintf (gsi);
2942 case BUILT_IN_SPRINTF:
2943 return gimple_fold_builtin_sprintf (gsi);
2944 case BUILT_IN_FPRINTF:
2945 case BUILT_IN_FPRINTF_UNLOCKED:
2946 case BUILT_IN_VFPRINTF:
2947 if (n == 2 || n == 3)
2948 return gimple_fold_builtin_fprintf (gsi,
2949 gimple_call_arg (stmt, 0),
2950 gimple_call_arg (stmt, 1),
2951 n == 3
2952 ? gimple_call_arg (stmt, 2)
2953 : NULL_TREE,
2954 fcode);
2955 break;
2956 case BUILT_IN_FPRINTF_CHK:
2957 case BUILT_IN_VFPRINTF_CHK:
2958 if (n == 3 || n == 4)
2959 return gimple_fold_builtin_fprintf (gsi,
2960 gimple_call_arg (stmt, 0),
2961 gimple_call_arg (stmt, 2),
2962 n == 4
2963 ? gimple_call_arg (stmt, 3)
2964 : NULL_TREE,
2965 fcode);
2966 break;
2967 case BUILT_IN_PRINTF:
2968 case BUILT_IN_PRINTF_UNLOCKED:
2969 case BUILT_IN_VPRINTF:
2970 if (n == 1 || n == 2)
2971 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2972 n == 2
2973 ? gimple_call_arg (stmt, 1)
2974 : NULL_TREE, fcode);
2975 break;
2976 case BUILT_IN_PRINTF_CHK:
2977 case BUILT_IN_VPRINTF_CHK:
2978 if (n == 2 || n == 3)
2979 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2980 n == 3
2981 ? gimple_call_arg (stmt, 2)
2982 : NULL_TREE, fcode);
2983 default:;
2984 }
2985
2986 /* Try the generic builtin folder. */
2987 bool ignore = (gimple_call_lhs (stmt) == NULL);
2988 tree result = fold_call_stmt (stmt, ignore);
2989 if (result)
2990 {
2991 if (ignore)
2992 STRIP_NOPS (result);
2993 else
2994 result = fold_convert (gimple_call_return_type (stmt), result);
2995 if (!update_call_from_tree (gsi, result))
2996 gimplify_and_update_call_from_tree (gsi, result);
2997 return true;
2998 }
2999
3000 return false;
3001 }
3002
3003 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3004 doesn't fit into TYPE. The test for overflow should be regardless of
3005 -fwrapv, and even for unsigned types. */
3006
3007 bool
3008 arith_overflowed_p (enum tree_code code, const_tree type,
3009 const_tree arg0, const_tree arg1)
3010 {
3011 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3012 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3013 widest2_int_cst;
3014 widest2_int warg0 = widest2_int_cst (arg0);
3015 widest2_int warg1 = widest2_int_cst (arg1);
3016 widest2_int wres;
3017 switch (code)
3018 {
3019 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3020 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3021 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3022 default: gcc_unreachable ();
3023 }
3024 signop sign = TYPE_SIGN (type);
3025 if (sign == UNSIGNED && wi::neg_p (wres))
3026 return true;
3027 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3028 }
3029
3030 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3031 The statement may be replaced by another statement, e.g., if the call
3032 simplifies to a constant value. Return true if any changes were made.
3033 It is assumed that the operands have been previously folded. */
3034
3035 static bool
3036 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3037 {
3038 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3039 tree callee;
3040 bool changed = false;
3041 unsigned i;
3042
3043 /* Fold *& in call arguments. */
3044 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3045 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3046 {
3047 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3048 if (tmp)
3049 {
3050 gimple_call_set_arg (stmt, i, tmp);
3051 changed = true;
3052 }
3053 }
3054
3055 /* Check for virtual calls that became direct calls. */
3056 callee = gimple_call_fn (stmt);
3057 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3058 {
3059 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3060 {
3061 if (dump_file && virtual_method_call_p (callee)
3062 && !possible_polymorphic_call_target_p
3063 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3064 (OBJ_TYPE_REF_EXPR (callee)))))
3065 {
3066 fprintf (dump_file,
3067 "Type inheritance inconsistent devirtualization of ");
3068 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3069 fprintf (dump_file, " to ");
3070 print_generic_expr (dump_file, callee, TDF_SLIM);
3071 fprintf (dump_file, "\n");
3072 }
3073
3074 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3075 changed = true;
3076 }
3077 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3078 {
3079 bool final;
3080 vec <cgraph_node *>targets
3081 = possible_polymorphic_call_targets (callee, stmt, &final);
3082 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3083 {
3084 tree lhs = gimple_call_lhs (stmt);
3085 if (dump_enabled_p ())
3086 {
3087 location_t loc = gimple_location_safe (stmt);
3088 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3089 "folding virtual function call to %s\n",
3090 targets.length () == 1
3091 ? targets[0]->name ()
3092 : "__builtin_unreachable");
3093 }
3094 if (targets.length () == 1)
3095 {
3096 gimple_call_set_fndecl (stmt, targets[0]->decl);
3097 changed = true;
3098 /* If the call becomes noreturn, remove the lhs. */
3099 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3100 {
3101 if (TREE_CODE (lhs) == SSA_NAME)
3102 {
3103 tree var = create_tmp_var (TREE_TYPE (lhs));
3104 tree def = get_or_create_ssa_default_def (cfun, var);
3105 gimple new_stmt = gimple_build_assign (lhs, def);
3106 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3107 }
3108 gimple_call_set_lhs (stmt, NULL_TREE);
3109 }
3110 maybe_remove_unused_call_args (cfun, stmt);
3111 }
3112 else
3113 {
3114 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3115 gimple new_stmt = gimple_build_call (fndecl, 0);
3116 gimple_set_location (new_stmt, gimple_location (stmt));
3117 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3118 {
3119 tree var = create_tmp_var (TREE_TYPE (lhs));
3120 tree def = get_or_create_ssa_default_def (cfun, var);
3121
3122 /* To satisfy condition for
3123 cgraph_update_edges_for_call_stmt_node,
3124 we need to preserve GIMPLE_CALL statement
3125 at position of GSI iterator. */
3126 update_call_from_tree (gsi, def);
3127 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
3128 }
3129 else
3130 {
3131 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3132 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3133 gsi_replace (gsi, new_stmt, false);
3134 }
3135 return true;
3136 }
3137 }
3138 }
3139 }
3140
3141 /* Check for indirect calls that became direct calls, and then
3142 no longer require a static chain. */
3143 if (gimple_call_chain (stmt))
3144 {
3145 tree fn = gimple_call_fndecl (stmt);
3146 if (fn && !DECL_STATIC_CHAIN (fn))
3147 {
3148 gimple_call_set_chain (stmt, NULL);
3149 changed = true;
3150 }
3151 else
3152 {
3153 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3154 if (tmp)
3155 {
3156 gimple_call_set_chain (stmt, tmp);
3157 changed = true;
3158 }
3159 }
3160 }
3161
3162 if (inplace)
3163 return changed;
3164
3165 /* Check for builtins that CCP can handle using information not
3166 available in the generic fold routines. */
3167 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3168 {
3169 if (gimple_fold_builtin (gsi))
3170 changed = true;
3171 }
3172 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
3173 {
3174 changed |= targetm.gimple_fold_builtin (gsi);
3175 }
3176 else if (gimple_call_internal_p (stmt))
3177 {
3178 enum tree_code subcode = ERROR_MARK;
3179 tree result = NULL_TREE;
3180 bool cplx_result = false;
3181 tree overflow = NULL_TREE;
3182 switch (gimple_call_internal_fn (stmt))
3183 {
3184 case IFN_BUILTIN_EXPECT:
3185 result = fold_builtin_expect (gimple_location (stmt),
3186 gimple_call_arg (stmt, 0),
3187 gimple_call_arg (stmt, 1),
3188 gimple_call_arg (stmt, 2));
3189 break;
3190 case IFN_UBSAN_OBJECT_SIZE:
3191 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3192 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3193 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3194 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3195 gimple_call_arg (stmt, 2))))
3196 {
3197 gsi_replace (gsi, gimple_build_nop (), true);
3198 unlink_stmt_vdef (stmt);
3199 release_defs (stmt);
3200 return true;
3201 }
3202 break;
3203 case IFN_UBSAN_CHECK_ADD:
3204 subcode = PLUS_EXPR;
3205 break;
3206 case IFN_UBSAN_CHECK_SUB:
3207 subcode = MINUS_EXPR;
3208 break;
3209 case IFN_UBSAN_CHECK_MUL:
3210 subcode = MULT_EXPR;
3211 break;
3212 case IFN_ADD_OVERFLOW:
3213 subcode = PLUS_EXPR;
3214 cplx_result = true;
3215 break;
3216 case IFN_SUB_OVERFLOW:
3217 subcode = MINUS_EXPR;
3218 cplx_result = true;
3219 break;
3220 case IFN_MUL_OVERFLOW:
3221 subcode = MULT_EXPR;
3222 cplx_result = true;
3223 break;
3224 default:
3225 break;
3226 }
3227 if (subcode != ERROR_MARK)
3228 {
3229 tree arg0 = gimple_call_arg (stmt, 0);
3230 tree arg1 = gimple_call_arg (stmt, 1);
3231 tree type = TREE_TYPE (arg0);
3232 if (cplx_result)
3233 {
3234 tree lhs = gimple_call_lhs (stmt);
3235 if (lhs == NULL_TREE)
3236 type = NULL_TREE;
3237 else
3238 type = TREE_TYPE (TREE_TYPE (lhs));
3239 }
3240 if (type == NULL_TREE)
3241 ;
3242 /* x = y + 0; x = y - 0; x = y * 0; */
3243 else if (integer_zerop (arg1))
3244 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
3245 /* x = 0 + y; x = 0 * y; */
3246 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
3247 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
3248 /* x = y - y; */
3249 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
3250 result = integer_zero_node;
3251 /* x = y * 1; x = 1 * y; */
3252 else if (subcode == MULT_EXPR && integer_onep (arg1))
3253 result = arg0;
3254 else if (subcode == MULT_EXPR && integer_onep (arg0))
3255 result = arg1;
3256 else if (TREE_CODE (arg0) == INTEGER_CST
3257 && TREE_CODE (arg1) == INTEGER_CST)
3258 {
3259 if (cplx_result)
3260 result = int_const_binop (subcode, fold_convert (type, arg0),
3261 fold_convert (type, arg1));
3262 else
3263 result = int_const_binop (subcode, arg0, arg1);
3264 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3265 {
3266 if (cplx_result)
3267 overflow = build_one_cst (type);
3268 else
3269 result = NULL_TREE;
3270 }
3271 }
3272 if (result)
3273 {
3274 if (result == integer_zero_node)
3275 result = build_zero_cst (type);
3276 else if (cplx_result && TREE_TYPE (result) != type)
3277 {
3278 if (TREE_CODE (result) == INTEGER_CST)
3279 {
3280 if (arith_overflowed_p (PLUS_EXPR, type, result,
3281 integer_zero_node))
3282 overflow = build_one_cst (type);
3283 }
3284 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3285 && TYPE_UNSIGNED (type))
3286 || (TYPE_PRECISION (type)
3287 < (TYPE_PRECISION (TREE_TYPE (result))
3288 + (TYPE_UNSIGNED (TREE_TYPE (result))
3289 && !TYPE_UNSIGNED (type)))))
3290 result = NULL_TREE;
3291 if (result)
3292 result = fold_convert (type, result);
3293 }
3294 }
3295 }
3296
3297 if (result)
3298 {
3299 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3300 result = drop_tree_overflow (result);
3301 if (cplx_result)
3302 {
3303 if (overflow == NULL_TREE)
3304 overflow = build_zero_cst (TREE_TYPE (result));
3305 tree ctype = build_complex_type (TREE_TYPE (result));
3306 if (TREE_CODE (result) == INTEGER_CST
3307 && TREE_CODE (overflow) == INTEGER_CST)
3308 result = build_complex (ctype, result, overflow);
3309 else
3310 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3311 ctype, result, overflow);
3312 }
3313 if (!update_call_from_tree (gsi, result))
3314 gimplify_and_update_call_from_tree (gsi, result);
3315 changed = true;
3316 }
3317 }
3318
3319 return changed;
3320 }
3321
3322
3323 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3324 gimple_simplify.
3325
3326 Replaces *GSI with the simplification result in RCODE and OPS
3327 and the associated statements in *SEQ. Does the replacement
3328 according to INPLACE and returns true if the operation succeeded. */
3329
3330 static bool
3331 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3332 code_helper rcode, tree *ops,
3333 gimple_seq *seq, bool inplace)
3334 {
3335 gimple stmt = gsi_stmt (*gsi);
3336
3337 /* Play safe and do not allow abnormals to be mentioned in
3338 newly created statements. See also maybe_push_res_to_seq. */
3339 if ((TREE_CODE (ops[0]) == SSA_NAME
3340 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3341 || (ops[1]
3342 && TREE_CODE (ops[1]) == SSA_NAME
3343 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3344 || (ops[2]
3345 && TREE_CODE (ops[2]) == SSA_NAME
3346 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3347 return false;
3348
3349 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
3350 {
3351 gcc_assert (rcode.is_tree_code ());
3352 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3353 /* GIMPLE_CONDs condition may not throw. */
3354 && (!flag_exceptions
3355 || !cfun->can_throw_non_call_exceptions
3356 || !operation_could_trap_p (rcode,
3357 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3358 false, NULL_TREE)))
3359 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
3360 else if (rcode == SSA_NAME)
3361 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
3362 build_zero_cst (TREE_TYPE (ops[0])));
3363 else if (rcode == INTEGER_CST)
3364 {
3365 if (integer_zerop (ops[0]))
3366 gimple_cond_make_false (cond_stmt);
3367 else
3368 gimple_cond_make_true (cond_stmt);
3369 }
3370 else if (!inplace)
3371 {
3372 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3373 ops, seq);
3374 if (!res)
3375 return false;
3376 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
3377 build_zero_cst (TREE_TYPE (res)));
3378 }
3379 else
3380 return false;
3381 if (dump_file && (dump_flags & TDF_DETAILS))
3382 {
3383 fprintf (dump_file, "gimple_simplified to ");
3384 if (!gimple_seq_empty_p (*seq))
3385 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3386 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3387 0, TDF_SLIM);
3388 }
3389 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3390 return true;
3391 }
3392 else if (is_gimple_assign (stmt)
3393 && rcode.is_tree_code ())
3394 {
3395 if (!inplace
3396 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
3397 {
3398 maybe_build_generic_op (rcode,
3399 TREE_TYPE (gimple_assign_lhs (stmt)),
3400 &ops[0], ops[1], ops[2]);
3401 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
3402 if (dump_file && (dump_flags & TDF_DETAILS))
3403 {
3404 fprintf (dump_file, "gimple_simplified to ");
3405 if (!gimple_seq_empty_p (*seq))
3406 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3407 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3408 0, TDF_SLIM);
3409 }
3410 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3411 return true;
3412 }
3413 }
3414 else if (!inplace)
3415 {
3416 if (gimple_has_lhs (stmt))
3417 {
3418 tree lhs = gimple_get_lhs (stmt);
3419 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3420 ops, seq, lhs))
3421 return false;
3422 if (dump_file && (dump_flags & TDF_DETAILS))
3423 {
3424 fprintf (dump_file, "gimple_simplified to ");
3425 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3426 }
3427 gsi_replace_with_seq_vops (gsi, *seq);
3428 return true;
3429 }
3430 else
3431 gcc_unreachable ();
3432 }
3433
3434 return false;
3435 }
3436
3437 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3438
3439 static bool
3440 maybe_canonicalize_mem_ref_addr (tree *t)
3441 {
3442 bool res = false;
3443
3444 if (TREE_CODE (*t) == ADDR_EXPR)
3445 t = &TREE_OPERAND (*t, 0);
3446
3447 while (handled_component_p (*t))
3448 t = &TREE_OPERAND (*t, 0);
3449
3450 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3451 of invariant addresses into a SSA name MEM_REF address. */
3452 if (TREE_CODE (*t) == MEM_REF
3453 || TREE_CODE (*t) == TARGET_MEM_REF)
3454 {
3455 tree addr = TREE_OPERAND (*t, 0);
3456 if (TREE_CODE (addr) == ADDR_EXPR
3457 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3458 || handled_component_p (TREE_OPERAND (addr, 0))))
3459 {
3460 tree base;
3461 HOST_WIDE_INT coffset;
3462 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3463 &coffset);
3464 if (!base)
3465 gcc_unreachable ();
3466
3467 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3468 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3469 TREE_OPERAND (*t, 1),
3470 size_int (coffset));
3471 res = true;
3472 }
3473 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3474 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3475 }
3476
3477 /* Canonicalize back MEM_REFs to plain reference trees if the object
3478 accessed is a decl that has the same access semantics as the MEM_REF. */
3479 if (TREE_CODE (*t) == MEM_REF
3480 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
3481 && integer_zerop (TREE_OPERAND (*t, 1))
3482 && MR_DEPENDENCE_CLIQUE (*t) == 0)
3483 {
3484 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3485 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3486 if (/* Same volatile qualification. */
3487 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3488 /* Same TBAA behavior with -fstrict-aliasing. */
3489 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3490 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3491 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3492 /* Same alignment. */
3493 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3494 /* We have to look out here to not drop a required conversion
3495 from the rhs to the lhs if *t appears on the lhs or vice-versa
3496 if it appears on the rhs. Thus require strict type
3497 compatibility. */
3498 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3499 {
3500 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3501 res = true;
3502 }
3503 }
3504
3505 /* Canonicalize TARGET_MEM_REF in particular with respect to
3506 the indexes becoming constant. */
3507 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3508 {
3509 tree tem = maybe_fold_tmr (*t);
3510 if (tem)
3511 {
3512 *t = tem;
3513 res = true;
3514 }
3515 }
3516
3517 return res;
3518 }
3519
3520 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3521 distinguishes both cases. */
3522
3523 static bool
3524 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
3525 {
3526 bool changed = false;
3527 gimple stmt = gsi_stmt (*gsi);
3528 unsigned i;
3529
3530 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3531 after propagation.
3532 ??? This shouldn't be done in generic folding but in the
3533 propagation helpers which also know whether an address was
3534 propagated. */
3535 switch (gimple_code (stmt))
3536 {
3537 case GIMPLE_ASSIGN:
3538 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3539 {
3540 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3541 if ((REFERENCE_CLASS_P (*rhs)
3542 || TREE_CODE (*rhs) == ADDR_EXPR)
3543 && maybe_canonicalize_mem_ref_addr (rhs))
3544 changed = true;
3545 tree *lhs = gimple_assign_lhs_ptr (stmt);
3546 if (REFERENCE_CLASS_P (*lhs)
3547 && maybe_canonicalize_mem_ref_addr (lhs))
3548 changed = true;
3549 }
3550 break;
3551 case GIMPLE_CALL:
3552 {
3553 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3554 {
3555 tree *arg = gimple_call_arg_ptr (stmt, i);
3556 if (REFERENCE_CLASS_P (*arg)
3557 && maybe_canonicalize_mem_ref_addr (arg))
3558 changed = true;
3559 }
3560 tree *lhs = gimple_call_lhs_ptr (stmt);
3561 if (*lhs
3562 && REFERENCE_CLASS_P (*lhs)
3563 && maybe_canonicalize_mem_ref_addr (lhs))
3564 changed = true;
3565 break;
3566 }
3567 case GIMPLE_ASM:
3568 {
3569 gasm *asm_stmt = as_a <gasm *> (stmt);
3570 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3571 {
3572 tree link = gimple_asm_output_op (asm_stmt, i);
3573 tree op = TREE_VALUE (link);
3574 if (REFERENCE_CLASS_P (op)
3575 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3576 changed = true;
3577 }
3578 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3579 {
3580 tree link = gimple_asm_input_op (asm_stmt, i);
3581 tree op = TREE_VALUE (link);
3582 if ((REFERENCE_CLASS_P (op)
3583 || TREE_CODE (op) == ADDR_EXPR)
3584 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3585 changed = true;
3586 }
3587 }
3588 break;
3589 case GIMPLE_DEBUG:
3590 if (gimple_debug_bind_p (stmt))
3591 {
3592 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3593 if (*val
3594 && (REFERENCE_CLASS_P (*val)
3595 || TREE_CODE (*val) == ADDR_EXPR)
3596 && maybe_canonicalize_mem_ref_addr (val))
3597 changed = true;
3598 }
3599 break;
3600 default:;
3601 }
3602
3603 /* Dispatch to pattern-based folding. */
3604 if (!inplace
3605 || is_gimple_assign (stmt)
3606 || gimple_code (stmt) == GIMPLE_COND)
3607 {
3608 gimple_seq seq = NULL;
3609 code_helper rcode;
3610 tree ops[3] = {};
3611 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
3612 valueize, valueize))
3613 {
3614 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3615 changed = true;
3616 else
3617 gimple_seq_discard (seq);
3618 }
3619 }
3620
3621 stmt = gsi_stmt (*gsi);
3622
3623 /* Fold the main computation performed by the statement. */
3624 switch (gimple_code (stmt))
3625 {
3626 case GIMPLE_ASSIGN:
3627 {
3628 unsigned old_num_ops = gimple_num_ops (stmt);
3629 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3630 tree lhs = gimple_assign_lhs (stmt);
3631 tree new_rhs;
3632 /* First canonicalize operand order. This avoids building new
3633 trees if this is the only thing fold would later do. */
3634 if ((commutative_tree_code (subcode)
3635 || commutative_ternary_tree_code (subcode))
3636 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3637 gimple_assign_rhs2 (stmt), false))
3638 {
3639 tree tem = gimple_assign_rhs1 (stmt);
3640 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3641 gimple_assign_set_rhs2 (stmt, tem);
3642 changed = true;
3643 }
3644 new_rhs = fold_gimple_assign (gsi);
3645 if (new_rhs
3646 && !useless_type_conversion_p (TREE_TYPE (lhs),
3647 TREE_TYPE (new_rhs)))
3648 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3649 if (new_rhs
3650 && (!inplace
3651 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3652 {
3653 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3654 changed = true;
3655 }
3656 break;
3657 }
3658
3659 case GIMPLE_COND:
3660 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
3661 break;
3662
3663 case GIMPLE_CALL:
3664 changed |= gimple_fold_call (gsi, inplace);
3665 break;
3666
3667 case GIMPLE_ASM:
3668 /* Fold *& in asm operands. */
3669 {
3670 gasm *asm_stmt = as_a <gasm *> (stmt);
3671 size_t noutputs;
3672 const char **oconstraints;
3673 const char *constraint;
3674 bool allows_mem, allows_reg;
3675
3676 noutputs = gimple_asm_noutputs (asm_stmt);
3677 oconstraints = XALLOCAVEC (const char *, noutputs);
3678
3679 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
3680 {
3681 tree link = gimple_asm_output_op (asm_stmt, i);
3682 tree op = TREE_VALUE (link);
3683 oconstraints[i]
3684 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3685 if (REFERENCE_CLASS_P (op)
3686 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3687 {
3688 TREE_VALUE (link) = op;
3689 changed = true;
3690 }
3691 }
3692 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
3693 {
3694 tree link = gimple_asm_input_op (asm_stmt, i);
3695 tree op = TREE_VALUE (link);
3696 constraint
3697 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3698 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3699 oconstraints, &allows_mem, &allows_reg);
3700 if (REFERENCE_CLASS_P (op)
3701 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3702 != NULL_TREE)
3703 {
3704 TREE_VALUE (link) = op;
3705 changed = true;
3706 }
3707 }
3708 }
3709 break;
3710
3711 case GIMPLE_DEBUG:
3712 if (gimple_debug_bind_p (stmt))
3713 {
3714 tree val = gimple_debug_bind_get_value (stmt);
3715 if (val
3716 && REFERENCE_CLASS_P (val))
3717 {
3718 tree tem = maybe_fold_reference (val, false);
3719 if (tem)
3720 {
3721 gimple_debug_bind_set_value (stmt, tem);
3722 changed = true;
3723 }
3724 }
3725 else if (val
3726 && TREE_CODE (val) == ADDR_EXPR)
3727 {
3728 tree ref = TREE_OPERAND (val, 0);
3729 tree tem = maybe_fold_reference (ref, false);
3730 if (tem)
3731 {
3732 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3733 gimple_debug_bind_set_value (stmt, tem);
3734 changed = true;
3735 }
3736 }
3737 }
3738 break;
3739
3740 default:;
3741 }
3742
3743 stmt = gsi_stmt (*gsi);
3744
3745 /* Fold *& on the lhs. */
3746 if (gimple_has_lhs (stmt))
3747 {
3748 tree lhs = gimple_get_lhs (stmt);
3749 if (lhs && REFERENCE_CLASS_P (lhs))
3750 {
3751 tree new_lhs = maybe_fold_reference (lhs, true);
3752 if (new_lhs)
3753 {
3754 gimple_set_lhs (stmt, new_lhs);
3755 changed = true;
3756 }
3757 }
3758 }
3759
3760 return changed;
3761 }
3762
3763 /* Valueziation callback that ends up not following SSA edges. */
3764
3765 tree
3766 no_follow_ssa_edges (tree)
3767 {
3768 return NULL_TREE;
3769 }
3770
3771 /* Valueization callback that ends up following single-use SSA edges only. */
3772
3773 tree
3774 follow_single_use_edges (tree val)
3775 {
3776 if (TREE_CODE (val) == SSA_NAME
3777 && !has_single_use (val))
3778 return NULL_TREE;
3779 return val;
3780 }
3781
3782 /* Fold the statement pointed to by GSI. In some cases, this function may
3783 replace the whole statement with a new one. Returns true iff folding
3784 makes any changes.
3785 The statement pointed to by GSI should be in valid gimple form but may
3786 be in unfolded state as resulting from for example constant propagation
3787 which can produce *&x = 0. */
3788
3789 bool
3790 fold_stmt (gimple_stmt_iterator *gsi)
3791 {
3792 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3793 }
3794
3795 bool
3796 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3797 {
3798 return fold_stmt_1 (gsi, false, valueize);
3799 }
3800
3801 /* Perform the minimal folding on statement *GSI. Only operations like
3802 *&x created by constant propagation are handled. The statement cannot
3803 be replaced with a new one. Return true if the statement was
3804 changed, false otherwise.
3805 The statement *GSI should be in valid gimple form but may
3806 be in unfolded state as resulting from for example constant propagation
3807 which can produce *&x = 0. */
3808
3809 bool
3810 fold_stmt_inplace (gimple_stmt_iterator *gsi)
3811 {
3812 gimple stmt = gsi_stmt (*gsi);
3813 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
3814 gcc_assert (gsi_stmt (*gsi) == stmt);
3815 return changed;
3816 }
3817
3818 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3819 if EXPR is null or we don't know how.
3820 If non-null, the result always has boolean type. */
3821
3822 static tree
3823 canonicalize_bool (tree expr, bool invert)
3824 {
3825 if (!expr)
3826 return NULL_TREE;
3827 else if (invert)
3828 {
3829 if (integer_nonzerop (expr))
3830 return boolean_false_node;
3831 else if (integer_zerop (expr))
3832 return boolean_true_node;
3833 else if (TREE_CODE (expr) == SSA_NAME)
3834 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3835 build_int_cst (TREE_TYPE (expr), 0));
3836 else if (COMPARISON_CLASS_P (expr))
3837 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3838 boolean_type_node,
3839 TREE_OPERAND (expr, 0),
3840 TREE_OPERAND (expr, 1));
3841 else
3842 return NULL_TREE;
3843 }
3844 else
3845 {
3846 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3847 return expr;
3848 if (integer_nonzerop (expr))
3849 return boolean_true_node;
3850 else if (integer_zerop (expr))
3851 return boolean_false_node;
3852 else if (TREE_CODE (expr) == SSA_NAME)
3853 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3854 build_int_cst (TREE_TYPE (expr), 0));
3855 else if (COMPARISON_CLASS_P (expr))
3856 return fold_build2 (TREE_CODE (expr),
3857 boolean_type_node,
3858 TREE_OPERAND (expr, 0),
3859 TREE_OPERAND (expr, 1));
3860 else
3861 return NULL_TREE;
3862 }
3863 }
3864
3865 /* Check to see if a boolean expression EXPR is logically equivalent to the
3866 comparison (OP1 CODE OP2). Check for various identities involving
3867 SSA_NAMEs. */
3868
3869 static bool
3870 same_bool_comparison_p (const_tree expr, enum tree_code code,
3871 const_tree op1, const_tree op2)
3872 {
3873 gimple s;
3874
3875 /* The obvious case. */
3876 if (TREE_CODE (expr) == code
3877 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3878 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3879 return true;
3880
3881 /* Check for comparing (name, name != 0) and the case where expr
3882 is an SSA_NAME with a definition matching the comparison. */
3883 if (TREE_CODE (expr) == SSA_NAME
3884 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3885 {
3886 if (operand_equal_p (expr, op1, 0))
3887 return ((code == NE_EXPR && integer_zerop (op2))
3888 || (code == EQ_EXPR && integer_nonzerop (op2)));
3889 s = SSA_NAME_DEF_STMT (expr);
3890 if (is_gimple_assign (s)
3891 && gimple_assign_rhs_code (s) == code
3892 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3893 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3894 return true;
3895 }
3896
3897 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3898 of name is a comparison, recurse. */
3899 if (TREE_CODE (op1) == SSA_NAME
3900 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3901 {
3902 s = SSA_NAME_DEF_STMT (op1);
3903 if (is_gimple_assign (s)
3904 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3905 {
3906 enum tree_code c = gimple_assign_rhs_code (s);
3907 if ((c == NE_EXPR && integer_zerop (op2))
3908 || (c == EQ_EXPR && integer_nonzerop (op2)))
3909 return same_bool_comparison_p (expr, c,
3910 gimple_assign_rhs1 (s),
3911 gimple_assign_rhs2 (s));
3912 if ((c == EQ_EXPR && integer_zerop (op2))
3913 || (c == NE_EXPR && integer_nonzerop (op2)))
3914 return same_bool_comparison_p (expr,
3915 invert_tree_comparison (c, false),
3916 gimple_assign_rhs1 (s),
3917 gimple_assign_rhs2 (s));
3918 }
3919 }
3920 return false;
3921 }
3922
3923 /* Check to see if two boolean expressions OP1 and OP2 are logically
3924 equivalent. */
3925
3926 static bool
3927 same_bool_result_p (const_tree op1, const_tree op2)
3928 {
3929 /* Simple cases first. */
3930 if (operand_equal_p (op1, op2, 0))
3931 return true;
3932
3933 /* Check the cases where at least one of the operands is a comparison.
3934 These are a bit smarter than operand_equal_p in that they apply some
3935 identifies on SSA_NAMEs. */
3936 if (COMPARISON_CLASS_P (op2)
3937 && same_bool_comparison_p (op1, TREE_CODE (op2),
3938 TREE_OPERAND (op2, 0),
3939 TREE_OPERAND (op2, 1)))
3940 return true;
3941 if (COMPARISON_CLASS_P (op1)
3942 && same_bool_comparison_p (op2, TREE_CODE (op1),
3943 TREE_OPERAND (op1, 0),
3944 TREE_OPERAND (op1, 1)))
3945 return true;
3946
3947 /* Default case. */
3948 return false;
3949 }
3950
3951 /* Forward declarations for some mutually recursive functions. */
3952
3953 static tree
3954 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3955 enum tree_code code2, tree op2a, tree op2b);
3956 static tree
3957 and_var_with_comparison (tree var, bool invert,
3958 enum tree_code code2, tree op2a, tree op2b);
3959 static tree
3960 and_var_with_comparison_1 (gimple stmt,
3961 enum tree_code code2, tree op2a, tree op2b);
3962 static tree
3963 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3964 enum tree_code code2, tree op2a, tree op2b);
3965 static tree
3966 or_var_with_comparison (tree var, bool invert,
3967 enum tree_code code2, tree op2a, tree op2b);
3968 static tree
3969 or_var_with_comparison_1 (gimple stmt,
3970 enum tree_code code2, tree op2a, tree op2b);
3971
3972 /* Helper function for and_comparisons_1: try to simplify the AND of the
3973 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3974 If INVERT is true, invert the value of the VAR before doing the AND.
3975 Return NULL_EXPR if we can't simplify this to a single expression. */
3976
3977 static tree
3978 and_var_with_comparison (tree var, bool invert,
3979 enum tree_code code2, tree op2a, tree op2b)
3980 {
3981 tree t;
3982 gimple stmt = SSA_NAME_DEF_STMT (var);
3983
3984 /* We can only deal with variables whose definitions are assignments. */
3985 if (!is_gimple_assign (stmt))
3986 return NULL_TREE;
3987
3988 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3989 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3990 Then we only have to consider the simpler non-inverted cases. */
3991 if (invert)
3992 t = or_var_with_comparison_1 (stmt,
3993 invert_tree_comparison (code2, false),
3994 op2a, op2b);
3995 else
3996 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3997 return canonicalize_bool (t, invert);
3998 }
3999
4000 /* Try to simplify the AND of the ssa variable defined by the assignment
4001 STMT with the comparison specified by (OP2A CODE2 OP2B).
4002 Return NULL_EXPR if we can't simplify this to a single expression. */
4003
4004 static tree
4005 and_var_with_comparison_1 (gimple stmt,
4006 enum tree_code code2, tree op2a, tree op2b)
4007 {
4008 tree var = gimple_assign_lhs (stmt);
4009 tree true_test_var = NULL_TREE;
4010 tree false_test_var = NULL_TREE;
4011 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4012
4013 /* Check for identities like (var AND (var == 0)) => false. */
4014 if (TREE_CODE (op2a) == SSA_NAME
4015 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4016 {
4017 if ((code2 == NE_EXPR && integer_zerop (op2b))
4018 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4019 {
4020 true_test_var = op2a;
4021 if (var == true_test_var)
4022 return var;
4023 }
4024 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4025 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4026 {
4027 false_test_var = op2a;
4028 if (var == false_test_var)
4029 return boolean_false_node;
4030 }
4031 }
4032
4033 /* If the definition is a comparison, recurse on it. */
4034 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4035 {
4036 tree t = and_comparisons_1 (innercode,
4037 gimple_assign_rhs1 (stmt),
4038 gimple_assign_rhs2 (stmt),
4039 code2,
4040 op2a,
4041 op2b);
4042 if (t)
4043 return t;
4044 }
4045
4046 /* If the definition is an AND or OR expression, we may be able to
4047 simplify by reassociating. */
4048 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4049 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4050 {
4051 tree inner1 = gimple_assign_rhs1 (stmt);
4052 tree inner2 = gimple_assign_rhs2 (stmt);
4053 gimple s;
4054 tree t;
4055 tree partial = NULL_TREE;
4056 bool is_and = (innercode == BIT_AND_EXPR);
4057
4058 /* Check for boolean identities that don't require recursive examination
4059 of inner1/inner2:
4060 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4061 inner1 AND (inner1 OR inner2) => inner1
4062 !inner1 AND (inner1 AND inner2) => false
4063 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4064 Likewise for similar cases involving inner2. */
4065 if (inner1 == true_test_var)
4066 return (is_and ? var : inner1);
4067 else if (inner2 == true_test_var)
4068 return (is_and ? var : inner2);
4069 else if (inner1 == false_test_var)
4070 return (is_and
4071 ? boolean_false_node
4072 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4073 else if (inner2 == false_test_var)
4074 return (is_and
4075 ? boolean_false_node
4076 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4077
4078 /* Next, redistribute/reassociate the AND across the inner tests.
4079 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4080 if (TREE_CODE (inner1) == SSA_NAME
4081 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4082 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4083 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4084 gimple_assign_rhs1 (s),
4085 gimple_assign_rhs2 (s),
4086 code2, op2a, op2b)))
4087 {
4088 /* Handle the AND case, where we are reassociating:
4089 (inner1 AND inner2) AND (op2a code2 op2b)
4090 => (t AND inner2)
4091 If the partial result t is a constant, we win. Otherwise
4092 continue on to try reassociating with the other inner test. */
4093 if (is_and)
4094 {
4095 if (integer_onep (t))
4096 return inner2;
4097 else if (integer_zerop (t))
4098 return boolean_false_node;
4099 }
4100
4101 /* Handle the OR case, where we are redistributing:
4102 (inner1 OR inner2) AND (op2a code2 op2b)
4103 => (t OR (inner2 AND (op2a code2 op2b))) */
4104 else if (integer_onep (t))
4105 return boolean_true_node;
4106
4107 /* Save partial result for later. */
4108 partial = t;
4109 }
4110
4111 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4112 if (TREE_CODE (inner2) == SSA_NAME
4113 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4114 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4115 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4116 gimple_assign_rhs1 (s),
4117 gimple_assign_rhs2 (s),
4118 code2, op2a, op2b)))
4119 {
4120 /* Handle the AND case, where we are reassociating:
4121 (inner1 AND inner2) AND (op2a code2 op2b)
4122 => (inner1 AND t) */
4123 if (is_and)
4124 {
4125 if (integer_onep (t))
4126 return inner1;
4127 else if (integer_zerop (t))
4128 return boolean_false_node;
4129 /* If both are the same, we can apply the identity
4130 (x AND x) == x. */
4131 else if (partial && same_bool_result_p (t, partial))
4132 return t;
4133 }
4134
4135 /* Handle the OR case. where we are redistributing:
4136 (inner1 OR inner2) AND (op2a code2 op2b)
4137 => (t OR (inner1 AND (op2a code2 op2b)))
4138 => (t OR partial) */
4139 else
4140 {
4141 if (integer_onep (t))
4142 return boolean_true_node;
4143 else if (partial)
4144 {
4145 /* We already got a simplification for the other
4146 operand to the redistributed OR expression. The
4147 interesting case is when at least one is false.
4148 Or, if both are the same, we can apply the identity
4149 (x OR x) == x. */
4150 if (integer_zerop (partial))
4151 return t;
4152 else if (integer_zerop (t))
4153 return partial;
4154 else if (same_bool_result_p (t, partial))
4155 return t;
4156 }
4157 }
4158 }
4159 }
4160 return NULL_TREE;
4161 }
4162
4163 /* Try to simplify the AND of two comparisons defined by
4164 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4165 If this can be done without constructing an intermediate value,
4166 return the resulting tree; otherwise NULL_TREE is returned.
4167 This function is deliberately asymmetric as it recurses on SSA_DEFs
4168 in the first comparison but not the second. */
4169
4170 static tree
4171 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4172 enum tree_code code2, tree op2a, tree op2b)
4173 {
4174 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4175
4176 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4177 if (operand_equal_p (op1a, op2a, 0)
4178 && operand_equal_p (op1b, op2b, 0))
4179 {
4180 /* Result will be either NULL_TREE, or a combined comparison. */
4181 tree t = combine_comparisons (UNKNOWN_LOCATION,
4182 TRUTH_ANDIF_EXPR, code1, code2,
4183 truth_type, op1a, op1b);
4184 if (t)
4185 return t;
4186 }
4187
4188 /* Likewise the swapped case of the above. */
4189 if (operand_equal_p (op1a, op2b, 0)
4190 && operand_equal_p (op1b, op2a, 0))
4191 {
4192 /* Result will be either NULL_TREE, or a combined comparison. */
4193 tree t = combine_comparisons (UNKNOWN_LOCATION,
4194 TRUTH_ANDIF_EXPR, code1,
4195 swap_tree_comparison (code2),
4196 truth_type, op1a, op1b);
4197 if (t)
4198 return t;
4199 }
4200
4201 /* If both comparisons are of the same value against constants, we might
4202 be able to merge them. */
4203 if (operand_equal_p (op1a, op2a, 0)
4204 && TREE_CODE (op1b) == INTEGER_CST
4205 && TREE_CODE (op2b) == INTEGER_CST)
4206 {
4207 int cmp = tree_int_cst_compare (op1b, op2b);
4208
4209 /* If we have (op1a == op1b), we should either be able to
4210 return that or FALSE, depending on whether the constant op1b
4211 also satisfies the other comparison against op2b. */
4212 if (code1 == EQ_EXPR)
4213 {
4214 bool done = true;
4215 bool val;
4216 switch (code2)
4217 {
4218 case EQ_EXPR: val = (cmp == 0); break;
4219 case NE_EXPR: val = (cmp != 0); break;
4220 case LT_EXPR: val = (cmp < 0); break;
4221 case GT_EXPR: val = (cmp > 0); break;
4222 case LE_EXPR: val = (cmp <= 0); break;
4223 case GE_EXPR: val = (cmp >= 0); break;
4224 default: done = false;
4225 }
4226 if (done)
4227 {
4228 if (val)
4229 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4230 else
4231 return boolean_false_node;
4232 }
4233 }
4234 /* Likewise if the second comparison is an == comparison. */
4235 else if (code2 == EQ_EXPR)
4236 {
4237 bool done = true;
4238 bool val;
4239 switch (code1)
4240 {
4241 case EQ_EXPR: val = (cmp == 0); break;
4242 case NE_EXPR: val = (cmp != 0); break;
4243 case LT_EXPR: val = (cmp > 0); break;
4244 case GT_EXPR: val = (cmp < 0); break;
4245 case LE_EXPR: val = (cmp >= 0); break;
4246 case GE_EXPR: val = (cmp <= 0); break;
4247 default: done = false;
4248 }
4249 if (done)
4250 {
4251 if (val)
4252 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4253 else
4254 return boolean_false_node;
4255 }
4256 }
4257
4258 /* Same business with inequality tests. */
4259 else if (code1 == NE_EXPR)
4260 {
4261 bool val;
4262 switch (code2)
4263 {
4264 case EQ_EXPR: val = (cmp != 0); break;
4265 case NE_EXPR: val = (cmp == 0); break;
4266 case LT_EXPR: val = (cmp >= 0); break;
4267 case GT_EXPR: val = (cmp <= 0); break;
4268 case LE_EXPR: val = (cmp > 0); break;
4269 case GE_EXPR: val = (cmp < 0); break;
4270 default:
4271 val = false;
4272 }
4273 if (val)
4274 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4275 }
4276 else if (code2 == NE_EXPR)
4277 {
4278 bool val;
4279 switch (code1)
4280 {
4281 case EQ_EXPR: val = (cmp == 0); break;
4282 case NE_EXPR: val = (cmp != 0); break;
4283 case LT_EXPR: val = (cmp <= 0); break;
4284 case GT_EXPR: val = (cmp >= 0); break;
4285 case LE_EXPR: val = (cmp < 0); break;
4286 case GE_EXPR: val = (cmp > 0); break;
4287 default:
4288 val = false;
4289 }
4290 if (val)
4291 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4292 }
4293
4294 /* Chose the more restrictive of two < or <= comparisons. */
4295 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4296 && (code2 == LT_EXPR || code2 == LE_EXPR))
4297 {
4298 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4299 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4300 else
4301 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4302 }
4303
4304 /* Likewise chose the more restrictive of two > or >= comparisons. */
4305 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4306 && (code2 == GT_EXPR || code2 == GE_EXPR))
4307 {
4308 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4309 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4310 else
4311 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4312 }
4313
4314 /* Check for singleton ranges. */
4315 else if (cmp == 0
4316 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4317 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4318 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4319
4320 /* Check for disjoint ranges. */
4321 else if (cmp <= 0
4322 && (code1 == LT_EXPR || code1 == LE_EXPR)
4323 && (code2 == GT_EXPR || code2 == GE_EXPR))
4324 return boolean_false_node;
4325 else if (cmp >= 0
4326 && (code1 == GT_EXPR || code1 == GE_EXPR)
4327 && (code2 == LT_EXPR || code2 == LE_EXPR))
4328 return boolean_false_node;
4329 }
4330
4331 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4332 NAME's definition is a truth value. See if there are any simplifications
4333 that can be done against the NAME's definition. */
4334 if (TREE_CODE (op1a) == SSA_NAME
4335 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4336 && (integer_zerop (op1b) || integer_onep (op1b)))
4337 {
4338 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4339 || (code1 == NE_EXPR && integer_onep (op1b)));
4340 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4341 switch (gimple_code (stmt))
4342 {
4343 case GIMPLE_ASSIGN:
4344 /* Try to simplify by copy-propagating the definition. */
4345 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4346
4347 case GIMPLE_PHI:
4348 /* If every argument to the PHI produces the same result when
4349 ANDed with the second comparison, we win.
4350 Do not do this unless the type is bool since we need a bool
4351 result here anyway. */
4352 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4353 {
4354 tree result = NULL_TREE;
4355 unsigned i;
4356 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4357 {
4358 tree arg = gimple_phi_arg_def (stmt, i);
4359
4360 /* If this PHI has itself as an argument, ignore it.
4361 If all the other args produce the same result,
4362 we're still OK. */
4363 if (arg == gimple_phi_result (stmt))
4364 continue;
4365 else if (TREE_CODE (arg) == INTEGER_CST)
4366 {
4367 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4368 {
4369 if (!result)
4370 result = boolean_false_node;
4371 else if (!integer_zerop (result))
4372 return NULL_TREE;
4373 }
4374 else if (!result)
4375 result = fold_build2 (code2, boolean_type_node,
4376 op2a, op2b);
4377 else if (!same_bool_comparison_p (result,
4378 code2, op2a, op2b))
4379 return NULL_TREE;
4380 }
4381 else if (TREE_CODE (arg) == SSA_NAME
4382 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4383 {
4384 tree temp;
4385 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4386 /* In simple cases we can look through PHI nodes,
4387 but we have to be careful with loops.
4388 See PR49073. */
4389 if (! dom_info_available_p (CDI_DOMINATORS)
4390 || gimple_bb (def_stmt) == gimple_bb (stmt)
4391 || dominated_by_p (CDI_DOMINATORS,
4392 gimple_bb (def_stmt),
4393 gimple_bb (stmt)))
4394 return NULL_TREE;
4395 temp = and_var_with_comparison (arg, invert, code2,
4396 op2a, op2b);
4397 if (!temp)
4398 return NULL_TREE;
4399 else if (!result)
4400 result = temp;
4401 else if (!same_bool_result_p (result, temp))
4402 return NULL_TREE;
4403 }
4404 else
4405 return NULL_TREE;
4406 }
4407 return result;
4408 }
4409
4410 default:
4411 break;
4412 }
4413 }
4414 return NULL_TREE;
4415 }
4416
4417 /* Try to simplify the AND of two comparisons, specified by
4418 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4419 If this can be simplified to a single expression (without requiring
4420 introducing more SSA variables to hold intermediate values),
4421 return the resulting tree. Otherwise return NULL_TREE.
4422 If the result expression is non-null, it has boolean type. */
4423
4424 tree
4425 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4426 enum tree_code code2, tree op2a, tree op2b)
4427 {
4428 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4429 if (t)
4430 return t;
4431 else
4432 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4433 }
4434
4435 /* Helper function for or_comparisons_1: try to simplify the OR of the
4436 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4437 If INVERT is true, invert the value of VAR before doing the OR.
4438 Return NULL_EXPR if we can't simplify this to a single expression. */
4439
4440 static tree
4441 or_var_with_comparison (tree var, bool invert,
4442 enum tree_code code2, tree op2a, tree op2b)
4443 {
4444 tree t;
4445 gimple stmt = SSA_NAME_DEF_STMT (var);
4446
4447 /* We can only deal with variables whose definitions are assignments. */
4448 if (!is_gimple_assign (stmt))
4449 return NULL_TREE;
4450
4451 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4452 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4453 Then we only have to consider the simpler non-inverted cases. */
4454 if (invert)
4455 t = and_var_with_comparison_1 (stmt,
4456 invert_tree_comparison (code2, false),
4457 op2a, op2b);
4458 else
4459 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4460 return canonicalize_bool (t, invert);
4461 }
4462
4463 /* Try to simplify the OR of the ssa variable defined by the assignment
4464 STMT with the comparison specified by (OP2A CODE2 OP2B).
4465 Return NULL_EXPR if we can't simplify this to a single expression. */
4466
4467 static tree
4468 or_var_with_comparison_1 (gimple stmt,
4469 enum tree_code code2, tree op2a, tree op2b)
4470 {
4471 tree var = gimple_assign_lhs (stmt);
4472 tree true_test_var = NULL_TREE;
4473 tree false_test_var = NULL_TREE;
4474 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4475
4476 /* Check for identities like (var OR (var != 0)) => true . */
4477 if (TREE_CODE (op2a) == SSA_NAME
4478 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4479 {
4480 if ((code2 == NE_EXPR && integer_zerop (op2b))
4481 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4482 {
4483 true_test_var = op2a;
4484 if (var == true_test_var)
4485 return var;
4486 }
4487 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4488 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4489 {
4490 false_test_var = op2a;
4491 if (var == false_test_var)
4492 return boolean_true_node;
4493 }
4494 }
4495
4496 /* If the definition is a comparison, recurse on it. */
4497 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4498 {
4499 tree t = or_comparisons_1 (innercode,
4500 gimple_assign_rhs1 (stmt),
4501 gimple_assign_rhs2 (stmt),
4502 code2,
4503 op2a,
4504 op2b);
4505 if (t)
4506 return t;
4507 }
4508
4509 /* If the definition is an AND or OR expression, we may be able to
4510 simplify by reassociating. */
4511 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4512 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
4513 {
4514 tree inner1 = gimple_assign_rhs1 (stmt);
4515 tree inner2 = gimple_assign_rhs2 (stmt);
4516 gimple s;
4517 tree t;
4518 tree partial = NULL_TREE;
4519 bool is_or = (innercode == BIT_IOR_EXPR);
4520
4521 /* Check for boolean identities that don't require recursive examination
4522 of inner1/inner2:
4523 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4524 inner1 OR (inner1 AND inner2) => inner1
4525 !inner1 OR (inner1 OR inner2) => true
4526 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4527 */
4528 if (inner1 == true_test_var)
4529 return (is_or ? var : inner1);
4530 else if (inner2 == true_test_var)
4531 return (is_or ? var : inner2);
4532 else if (inner1 == false_test_var)
4533 return (is_or
4534 ? boolean_true_node
4535 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4536 else if (inner2 == false_test_var)
4537 return (is_or
4538 ? boolean_true_node
4539 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4540
4541 /* Next, redistribute/reassociate the OR across the inner tests.
4542 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4543 if (TREE_CODE (inner1) == SSA_NAME
4544 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4545 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4546 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4547 gimple_assign_rhs1 (s),
4548 gimple_assign_rhs2 (s),
4549 code2, op2a, op2b)))
4550 {
4551 /* Handle the OR case, where we are reassociating:
4552 (inner1 OR inner2) OR (op2a code2 op2b)
4553 => (t OR inner2)
4554 If the partial result t is a constant, we win. Otherwise
4555 continue on to try reassociating with the other inner test. */
4556 if (is_or)
4557 {
4558 if (integer_onep (t))
4559 return boolean_true_node;
4560 else if (integer_zerop (t))
4561 return inner2;
4562 }
4563
4564 /* Handle the AND case, where we are redistributing:
4565 (inner1 AND inner2) OR (op2a code2 op2b)
4566 => (t AND (inner2 OR (op2a code op2b))) */
4567 else if (integer_zerop (t))
4568 return boolean_false_node;
4569
4570 /* Save partial result for later. */
4571 partial = t;
4572 }
4573
4574 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4575 if (TREE_CODE (inner2) == SSA_NAME
4576 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4577 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4578 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4579 gimple_assign_rhs1 (s),
4580 gimple_assign_rhs2 (s),
4581 code2, op2a, op2b)))
4582 {
4583 /* Handle the OR case, where we are reassociating:
4584 (inner1 OR inner2) OR (op2a code2 op2b)
4585 => (inner1 OR t)
4586 => (t OR partial) */
4587 if (is_or)
4588 {
4589 if (integer_zerop (t))
4590 return inner1;
4591 else if (integer_onep (t))
4592 return boolean_true_node;
4593 /* If both are the same, we can apply the identity
4594 (x OR x) == x. */
4595 else if (partial && same_bool_result_p (t, partial))
4596 return t;
4597 }
4598
4599 /* Handle the AND case, where we are redistributing:
4600 (inner1 AND inner2) OR (op2a code2 op2b)
4601 => (t AND (inner1 OR (op2a code2 op2b)))
4602 => (t AND partial) */
4603 else
4604 {
4605 if (integer_zerop (t))
4606 return boolean_false_node;
4607 else if (partial)
4608 {
4609 /* We already got a simplification for the other
4610 operand to the redistributed AND expression. The
4611 interesting case is when at least one is true.
4612 Or, if both are the same, we can apply the identity
4613 (x AND x) == x. */
4614 if (integer_onep (partial))
4615 return t;
4616 else if (integer_onep (t))
4617 return partial;
4618 else if (same_bool_result_p (t, partial))
4619 return t;
4620 }
4621 }
4622 }
4623 }
4624 return NULL_TREE;
4625 }
4626
4627 /* Try to simplify the OR of two comparisons defined by
4628 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4629 If this can be done without constructing an intermediate value,
4630 return the resulting tree; otherwise NULL_TREE is returned.
4631 This function is deliberately asymmetric as it recurses on SSA_DEFs
4632 in the first comparison but not the second. */
4633
4634 static tree
4635 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4636 enum tree_code code2, tree op2a, tree op2b)
4637 {
4638 tree truth_type = truth_type_for (TREE_TYPE (op1a));
4639
4640 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4641 if (operand_equal_p (op1a, op2a, 0)
4642 && operand_equal_p (op1b, op2b, 0))
4643 {
4644 /* Result will be either NULL_TREE, or a combined comparison. */
4645 tree t = combine_comparisons (UNKNOWN_LOCATION,
4646 TRUTH_ORIF_EXPR, code1, code2,
4647 truth_type, op1a, op1b);
4648 if (t)
4649 return t;
4650 }
4651
4652 /* Likewise the swapped case of the above. */
4653 if (operand_equal_p (op1a, op2b, 0)
4654 && operand_equal_p (op1b, op2a, 0))
4655 {
4656 /* Result will be either NULL_TREE, or a combined comparison. */
4657 tree t = combine_comparisons (UNKNOWN_LOCATION,
4658 TRUTH_ORIF_EXPR, code1,
4659 swap_tree_comparison (code2),
4660 truth_type, op1a, op1b);
4661 if (t)
4662 return t;
4663 }
4664
4665 /* If both comparisons are of the same value against constants, we might
4666 be able to merge them. */
4667 if (operand_equal_p (op1a, op2a, 0)
4668 && TREE_CODE (op1b) == INTEGER_CST
4669 && TREE_CODE (op2b) == INTEGER_CST)
4670 {
4671 int cmp = tree_int_cst_compare (op1b, op2b);
4672
4673 /* If we have (op1a != op1b), we should either be able to
4674 return that or TRUE, depending on whether the constant op1b
4675 also satisfies the other comparison against op2b. */
4676 if (code1 == NE_EXPR)
4677 {
4678 bool done = true;
4679 bool val;
4680 switch (code2)
4681 {
4682 case EQ_EXPR: val = (cmp == 0); break;
4683 case NE_EXPR: val = (cmp != 0); break;
4684 case LT_EXPR: val = (cmp < 0); break;
4685 case GT_EXPR: val = (cmp > 0); break;
4686 case LE_EXPR: val = (cmp <= 0); break;
4687 case GE_EXPR: val = (cmp >= 0); break;
4688 default: done = false;
4689 }
4690 if (done)
4691 {
4692 if (val)
4693 return boolean_true_node;
4694 else
4695 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4696 }
4697 }
4698 /* Likewise if the second comparison is a != comparison. */
4699 else if (code2 == NE_EXPR)
4700 {
4701 bool done = true;
4702 bool val;
4703 switch (code1)
4704 {
4705 case EQ_EXPR: val = (cmp == 0); break;
4706 case NE_EXPR: val = (cmp != 0); break;
4707 case LT_EXPR: val = (cmp > 0); break;
4708 case GT_EXPR: val = (cmp < 0); break;
4709 case LE_EXPR: val = (cmp >= 0); break;
4710 case GE_EXPR: val = (cmp <= 0); break;
4711 default: done = false;
4712 }
4713 if (done)
4714 {
4715 if (val)
4716 return boolean_true_node;
4717 else
4718 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4719 }
4720 }
4721
4722 /* See if an equality test is redundant with the other comparison. */
4723 else if (code1 == EQ_EXPR)
4724 {
4725 bool val;
4726 switch (code2)
4727 {
4728 case EQ_EXPR: val = (cmp == 0); break;
4729 case NE_EXPR: val = (cmp != 0); break;
4730 case LT_EXPR: val = (cmp < 0); break;
4731 case GT_EXPR: val = (cmp > 0); break;
4732 case LE_EXPR: val = (cmp <= 0); break;
4733 case GE_EXPR: val = (cmp >= 0); break;
4734 default:
4735 val = false;
4736 }
4737 if (val)
4738 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4739 }
4740 else if (code2 == EQ_EXPR)
4741 {
4742 bool val;
4743 switch (code1)
4744 {
4745 case EQ_EXPR: val = (cmp == 0); break;
4746 case NE_EXPR: val = (cmp != 0); break;
4747 case LT_EXPR: val = (cmp > 0); break;
4748 case GT_EXPR: val = (cmp < 0); break;
4749 case LE_EXPR: val = (cmp >= 0); break;
4750 case GE_EXPR: val = (cmp <= 0); break;
4751 default:
4752 val = false;
4753 }
4754 if (val)
4755 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4756 }
4757
4758 /* Chose the less restrictive of two < or <= comparisons. */
4759 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4760 && (code2 == LT_EXPR || code2 == LE_EXPR))
4761 {
4762 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4763 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4764 else
4765 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4766 }
4767
4768 /* Likewise chose the less restrictive of two > or >= comparisons. */
4769 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4770 && (code2 == GT_EXPR || code2 == GE_EXPR))
4771 {
4772 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4773 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4774 else
4775 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4776 }
4777
4778 /* Check for singleton ranges. */
4779 else if (cmp == 0
4780 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4781 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4782 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4783
4784 /* Check for less/greater pairs that don't restrict the range at all. */
4785 else if (cmp >= 0
4786 && (code1 == LT_EXPR || code1 == LE_EXPR)
4787 && (code2 == GT_EXPR || code2 == GE_EXPR))
4788 return boolean_true_node;
4789 else if (cmp <= 0
4790 && (code1 == GT_EXPR || code1 == GE_EXPR)
4791 && (code2 == LT_EXPR || code2 == LE_EXPR))
4792 return boolean_true_node;
4793 }
4794
4795 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4796 NAME's definition is a truth value. See if there are any simplifications
4797 that can be done against the NAME's definition. */
4798 if (TREE_CODE (op1a) == SSA_NAME
4799 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4800 && (integer_zerop (op1b) || integer_onep (op1b)))
4801 {
4802 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4803 || (code1 == NE_EXPR && integer_onep (op1b)));
4804 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4805 switch (gimple_code (stmt))
4806 {
4807 case GIMPLE_ASSIGN:
4808 /* Try to simplify by copy-propagating the definition. */
4809 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4810
4811 case GIMPLE_PHI:
4812 /* If every argument to the PHI produces the same result when
4813 ORed with the second comparison, we win.
4814 Do not do this unless the type is bool since we need a bool
4815 result here anyway. */
4816 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4817 {
4818 tree result = NULL_TREE;
4819 unsigned i;
4820 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4821 {
4822 tree arg = gimple_phi_arg_def (stmt, i);
4823
4824 /* If this PHI has itself as an argument, ignore it.
4825 If all the other args produce the same result,
4826 we're still OK. */
4827 if (arg == gimple_phi_result (stmt))
4828 continue;
4829 else if (TREE_CODE (arg) == INTEGER_CST)
4830 {
4831 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4832 {
4833 if (!result)
4834 result = boolean_true_node;
4835 else if (!integer_onep (result))
4836 return NULL_TREE;
4837 }
4838 else if (!result)
4839 result = fold_build2 (code2, boolean_type_node,
4840 op2a, op2b);
4841 else if (!same_bool_comparison_p (result,
4842 code2, op2a, op2b))
4843 return NULL_TREE;
4844 }
4845 else if (TREE_CODE (arg) == SSA_NAME
4846 && !SSA_NAME_IS_DEFAULT_DEF (arg))
4847 {
4848 tree temp;
4849 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4850 /* In simple cases we can look through PHI nodes,
4851 but we have to be careful with loops.
4852 See PR49073. */
4853 if (! dom_info_available_p (CDI_DOMINATORS)
4854 || gimple_bb (def_stmt) == gimple_bb (stmt)
4855 || dominated_by_p (CDI_DOMINATORS,
4856 gimple_bb (def_stmt),
4857 gimple_bb (stmt)))
4858 return NULL_TREE;
4859 temp = or_var_with_comparison (arg, invert, code2,
4860 op2a, op2b);
4861 if (!temp)
4862 return NULL_TREE;
4863 else if (!result)
4864 result = temp;
4865 else if (!same_bool_result_p (result, temp))
4866 return NULL_TREE;
4867 }
4868 else
4869 return NULL_TREE;
4870 }
4871 return result;
4872 }
4873
4874 default:
4875 break;
4876 }
4877 }
4878 return NULL_TREE;
4879 }
4880
4881 /* Try to simplify the OR of two comparisons, specified by
4882 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4883 If this can be simplified to a single expression (without requiring
4884 introducing more SSA variables to hold intermediate values),
4885 return the resulting tree. Otherwise return NULL_TREE.
4886 If the result expression is non-null, it has boolean type. */
4887
4888 tree
4889 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4890 enum tree_code code2, tree op2a, tree op2b)
4891 {
4892 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4893 if (t)
4894 return t;
4895 else
4896 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4897 }
4898
4899
4900 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4901
4902 Either NULL_TREE, a simplified but non-constant or a constant
4903 is returned.
4904
4905 ??? This should go into a gimple-fold-inline.h file to be eventually
4906 privatized with the single valueize function used in the various TUs
4907 to avoid the indirect function call overhead. */
4908
4909 tree
4910 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4911 tree (*gvalueize) (tree))
4912 {
4913 code_helper rcode;
4914 tree ops[3] = {};
4915 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4916 edges if there are intermediate VARYING defs. For this reason
4917 do not follow SSA edges here even though SCCVN can technically
4918 just deal fine with that. */
4919 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize)
4920 && rcode.is_tree_code ()
4921 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4922 || ((tree_code) rcode) == ADDR_EXPR)
4923 && is_gimple_val (ops[0]))
4924 {
4925 tree res = ops[0];
4926 if (dump_file && dump_flags & TDF_DETAILS)
4927 {
4928 fprintf (dump_file, "Match-and-simplified ");
4929 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4930 fprintf (dump_file, " to ");
4931 print_generic_expr (dump_file, res, 0);
4932 fprintf (dump_file, "\n");
4933 }
4934 return res;
4935 }
4936
4937 location_t loc = gimple_location (stmt);
4938 switch (gimple_code (stmt))
4939 {
4940 case GIMPLE_ASSIGN:
4941 {
4942 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4943
4944 switch (get_gimple_rhs_class (subcode))
4945 {
4946 case GIMPLE_SINGLE_RHS:
4947 {
4948 tree rhs = gimple_assign_rhs1 (stmt);
4949 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4950
4951 if (TREE_CODE (rhs) == SSA_NAME)
4952 {
4953 /* If the RHS is an SSA_NAME, return its known constant value,
4954 if any. */
4955 return (*valueize) (rhs);
4956 }
4957 /* Handle propagating invariant addresses into address
4958 operations. */
4959 else if (TREE_CODE (rhs) == ADDR_EXPR
4960 && !is_gimple_min_invariant (rhs))
4961 {
4962 HOST_WIDE_INT offset = 0;
4963 tree base;
4964 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4965 &offset,
4966 valueize);
4967 if (base
4968 && (CONSTANT_CLASS_P (base)
4969 || decl_address_invariant_p (base)))
4970 return build_invariant_address (TREE_TYPE (rhs),
4971 base, offset);
4972 }
4973 else if (TREE_CODE (rhs) == CONSTRUCTOR
4974 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4975 && (CONSTRUCTOR_NELTS (rhs)
4976 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4977 {
4978 unsigned i;
4979 tree val, *vec;
4980
4981 vec = XALLOCAVEC (tree,
4982 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
4983 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4984 {
4985 val = (*valueize) (val);
4986 if (TREE_CODE (val) == INTEGER_CST
4987 || TREE_CODE (val) == REAL_CST
4988 || TREE_CODE (val) == FIXED_CST)
4989 vec[i] = val;
4990 else
4991 return NULL_TREE;
4992 }
4993
4994 return build_vector (TREE_TYPE (rhs), vec);
4995 }
4996 if (subcode == OBJ_TYPE_REF)
4997 {
4998 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4999 /* If callee is constant, we can fold away the wrapper. */
5000 if (is_gimple_min_invariant (val))
5001 return val;
5002 }
5003
5004 if (kind == tcc_reference)
5005 {
5006 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5007 || TREE_CODE (rhs) == REALPART_EXPR
5008 || TREE_CODE (rhs) == IMAGPART_EXPR)
5009 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5010 {
5011 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5012 return fold_unary_loc (EXPR_LOCATION (rhs),
5013 TREE_CODE (rhs),
5014 TREE_TYPE (rhs), val);
5015 }
5016 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5017 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5018 {
5019 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5020 return fold_ternary_loc (EXPR_LOCATION (rhs),
5021 TREE_CODE (rhs),
5022 TREE_TYPE (rhs), val,
5023 TREE_OPERAND (rhs, 1),
5024 TREE_OPERAND (rhs, 2));
5025 }
5026 else if (TREE_CODE (rhs) == MEM_REF
5027 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5028 {
5029 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5030 if (TREE_CODE (val) == ADDR_EXPR
5031 && is_gimple_min_invariant (val))
5032 {
5033 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5034 unshare_expr (val),
5035 TREE_OPERAND (rhs, 1));
5036 if (tem)
5037 rhs = tem;
5038 }
5039 }
5040 return fold_const_aggregate_ref_1 (rhs, valueize);
5041 }
5042 else if (kind == tcc_declaration)
5043 return get_symbol_constant_value (rhs);
5044 return rhs;
5045 }
5046
5047 case GIMPLE_UNARY_RHS:
5048 return NULL_TREE;
5049
5050 case GIMPLE_BINARY_RHS:
5051 {
5052 /* Handle binary operators that can appear in GIMPLE form. */
5053 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5054 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5055
5056 /* Translate &x + CST into an invariant form suitable for
5057 further propagation. */
5058 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5059 && TREE_CODE (op0) == ADDR_EXPR
5060 && TREE_CODE (op1) == INTEGER_CST)
5061 {
5062 tree off = fold_convert (ptr_type_node, op1);
5063 return build_fold_addr_expr_loc
5064 (loc,
5065 fold_build2 (MEM_REF,
5066 TREE_TYPE (TREE_TYPE (op0)),
5067 unshare_expr (op0), off));
5068 }
5069
5070 return fold_binary_loc (loc, subcode,
5071 gimple_expr_type (stmt), op0, op1);
5072 }
5073
5074 case GIMPLE_TERNARY_RHS:
5075 {
5076 /* Handle ternary operators that can appear in GIMPLE form. */
5077 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5078 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5079 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5080
5081 /* Fold embedded expressions in ternary codes. */
5082 if ((subcode == COND_EXPR
5083 || subcode == VEC_COND_EXPR)
5084 && COMPARISON_CLASS_P (op0))
5085 {
5086 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5087 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5088 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5089 TREE_TYPE (op0), op00, op01);
5090 if (tem)
5091 op0 = tem;
5092 }
5093
5094 return fold_ternary_loc (loc, subcode,
5095 gimple_expr_type (stmt), op0, op1, op2);
5096 }
5097
5098 default:
5099 gcc_unreachable ();
5100 }
5101 }
5102
5103 case GIMPLE_CALL:
5104 {
5105 tree fn;
5106 gcall *call_stmt = as_a <gcall *> (stmt);
5107
5108 if (gimple_call_internal_p (stmt))
5109 {
5110 enum tree_code subcode = ERROR_MARK;
5111 switch (gimple_call_internal_fn (stmt))
5112 {
5113 case IFN_UBSAN_CHECK_ADD:
5114 subcode = PLUS_EXPR;
5115 break;
5116 case IFN_UBSAN_CHECK_SUB:
5117 subcode = MINUS_EXPR;
5118 break;
5119 case IFN_UBSAN_CHECK_MUL:
5120 subcode = MULT_EXPR;
5121 break;
5122 default:
5123 return NULL_TREE;
5124 }
5125 tree arg0 = gimple_call_arg (stmt, 0);
5126 tree arg1 = gimple_call_arg (stmt, 1);
5127 tree op0 = (*valueize) (arg0);
5128 tree op1 = (*valueize) (arg1);
5129
5130 if (TREE_CODE (op0) != INTEGER_CST
5131 || TREE_CODE (op1) != INTEGER_CST)
5132 {
5133 switch (subcode)
5134 {
5135 case MULT_EXPR:
5136 /* x * 0 = 0 * x = 0 without overflow. */
5137 if (integer_zerop (op0) || integer_zerop (op1))
5138 return build_zero_cst (TREE_TYPE (arg0));
5139 break;
5140 case MINUS_EXPR:
5141 /* y - y = 0 without overflow. */
5142 if (operand_equal_p (op0, op1, 0))
5143 return build_zero_cst (TREE_TYPE (arg0));
5144 break;
5145 default:
5146 break;
5147 }
5148 }
5149 tree res
5150 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
5151 if (res
5152 && TREE_CODE (res) == INTEGER_CST
5153 && !TREE_OVERFLOW (res))
5154 return res;
5155 return NULL_TREE;
5156 }
5157
5158 fn = (*valueize) (gimple_call_fn (stmt));
5159 if (TREE_CODE (fn) == ADDR_EXPR
5160 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5161 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5162 && gimple_builtin_call_types_compatible_p (stmt,
5163 TREE_OPERAND (fn, 0)))
5164 {
5165 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
5166 tree retval;
5167 unsigned i;
5168 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5169 args[i] = (*valueize) (gimple_call_arg (stmt, i));
5170 retval = fold_builtin_call_array (loc,
5171 gimple_call_return_type (call_stmt),
5172 fn, gimple_call_num_args (stmt), args);
5173 if (retval)
5174 {
5175 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5176 STRIP_NOPS (retval);
5177 retval = fold_convert (gimple_call_return_type (call_stmt),
5178 retval);
5179 }
5180 return retval;
5181 }
5182 return NULL_TREE;
5183 }
5184
5185 default:
5186 return NULL_TREE;
5187 }
5188 }
5189
5190 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5191 Returns NULL_TREE if folding to a constant is not possible, otherwise
5192 returns a constant according to is_gimple_min_invariant. */
5193
5194 tree
5195 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5196 {
5197 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5198 if (res && is_gimple_min_invariant (res))
5199 return res;
5200 return NULL_TREE;
5201 }
5202
5203
5204 /* The following set of functions are supposed to fold references using
5205 their constant initializers. */
5206
5207 /* See if we can find constructor defining value of BASE.
5208 When we know the consructor with constant offset (such as
5209 base is array[40] and we do know constructor of array), then
5210 BIT_OFFSET is adjusted accordingly.
5211
5212 As a special case, return error_mark_node when constructor
5213 is not explicitly available, but it is known to be zero
5214 such as 'static const int a;'. */
5215 static tree
5216 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5217 tree (*valueize)(tree))
5218 {
5219 HOST_WIDE_INT bit_offset2, size, max_size;
5220 if (TREE_CODE (base) == MEM_REF)
5221 {
5222 if (!integer_zerop (TREE_OPERAND (base, 1)))
5223 {
5224 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
5225 return NULL_TREE;
5226 *bit_offset += (mem_ref_offset (base).to_short_addr ()
5227 * BITS_PER_UNIT);
5228 }
5229
5230 if (valueize
5231 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5232 base = valueize (TREE_OPERAND (base, 0));
5233 if (!base || TREE_CODE (base) != ADDR_EXPR)
5234 return NULL_TREE;
5235 base = TREE_OPERAND (base, 0);
5236 }
5237
5238 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5239 DECL_INITIAL. If BASE is a nested reference into another
5240 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5241 the inner reference. */
5242 switch (TREE_CODE (base))
5243 {
5244 case VAR_DECL:
5245 case CONST_DECL:
5246 {
5247 tree init = ctor_for_folding (base);
5248
5249 /* Our semantic is exact opposite of ctor_for_folding;
5250 NULL means unknown, while error_mark_node is 0. */
5251 if (init == error_mark_node)
5252 return NULL_TREE;
5253 if (!init)
5254 return error_mark_node;
5255 return init;
5256 }
5257
5258 case ARRAY_REF:
5259 case COMPONENT_REF:
5260 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5261 if (max_size == -1 || size != max_size)
5262 return NULL_TREE;
5263 *bit_offset += bit_offset2;
5264 return get_base_constructor (base, bit_offset, valueize);
5265
5266 case STRING_CST:
5267 case CONSTRUCTOR:
5268 return base;
5269
5270 default:
5271 return NULL_TREE;
5272 }
5273 }
5274
5275 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5276 SIZE to the memory at bit OFFSET. */
5277
5278 static tree
5279 fold_array_ctor_reference (tree type, tree ctor,
5280 unsigned HOST_WIDE_INT offset,
5281 unsigned HOST_WIDE_INT size,
5282 tree from_decl)
5283 {
5284 unsigned HOST_WIDE_INT cnt;
5285 tree cfield, cval;
5286 offset_int low_bound;
5287 offset_int elt_size;
5288 offset_int index, max_index;
5289 offset_int access_index;
5290 tree domain_type = NULL_TREE, index_type = NULL_TREE;
5291 HOST_WIDE_INT inner_offset;
5292
5293 /* Compute low bound and elt size. */
5294 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5295 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
5296 if (domain_type && TYPE_MIN_VALUE (domain_type))
5297 {
5298 /* Static constructors for variably sized objects makes no sense. */
5299 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
5300 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
5301 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
5302 }
5303 else
5304 low_bound = 0;
5305 /* Static constructors for variably sized objects makes no sense. */
5306 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
5307 == INTEGER_CST);
5308 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
5309
5310 /* We can handle only constantly sized accesses that are known to not
5311 be larger than size of array element. */
5312 if (!TYPE_SIZE_UNIT (type)
5313 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
5314 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5315 || elt_size == 0)
5316 return NULL_TREE;
5317
5318 /* Compute the array index we look for. */
5319 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5320 elt_size);
5321 access_index += low_bound;
5322 if (index_type)
5323 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5324 TYPE_SIGN (index_type));
5325
5326 /* And offset within the access. */
5327 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
5328
5329 /* See if the array field is large enough to span whole access. We do not
5330 care to fold accesses spanning multiple array indexes. */
5331 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
5332 return NULL_TREE;
5333
5334 index = low_bound - 1;
5335 if (index_type)
5336 index = wi::ext (index, TYPE_PRECISION (index_type),
5337 TYPE_SIGN (index_type));
5338
5339 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5340 {
5341 /* Array constructor might explicitely set index, or specify range
5342 or leave index NULL meaning that it is next index after previous
5343 one. */
5344 if (cfield)
5345 {
5346 if (TREE_CODE (cfield) == INTEGER_CST)
5347 max_index = index = wi::to_offset (cfield);
5348 else
5349 {
5350 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
5351 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5352 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
5353 }
5354 }
5355 else
5356 {
5357 index += 1;
5358 if (index_type)
5359 index = wi::ext (index, TYPE_PRECISION (index_type),
5360 TYPE_SIGN (index_type));
5361 max_index = index;
5362 }
5363
5364 /* Do we have match? */
5365 if (wi::cmpu (access_index, index) >= 0
5366 && wi::cmpu (access_index, max_index) <= 0)
5367 return fold_ctor_reference (type, cval, inner_offset, size,
5368 from_decl);
5369 }
5370 /* When memory is not explicitely mentioned in constructor,
5371 it is 0 (or out of range). */
5372 return build_zero_cst (type);
5373 }
5374
5375 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5376 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5377
5378 static tree
5379 fold_nonarray_ctor_reference (tree type, tree ctor,
5380 unsigned HOST_WIDE_INT offset,
5381 unsigned HOST_WIDE_INT size,
5382 tree from_decl)
5383 {
5384 unsigned HOST_WIDE_INT cnt;
5385 tree cfield, cval;
5386
5387 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5388 cval)
5389 {
5390 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5391 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5392 tree field_size = DECL_SIZE (cfield);
5393 offset_int bitoffset;
5394 offset_int bitoffset_end, access_end;
5395
5396 /* Variable sized objects in static constructors makes no sense,
5397 but field_size can be NULL for flexible array members. */
5398 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5399 && TREE_CODE (byte_offset) == INTEGER_CST
5400 && (field_size != NULL_TREE
5401 ? TREE_CODE (field_size) == INTEGER_CST
5402 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5403
5404 /* Compute bit offset of the field. */
5405 bitoffset = (wi::to_offset (field_offset)
5406 + wi::lshift (wi::to_offset (byte_offset),
5407 LOG2_BITS_PER_UNIT));
5408 /* Compute bit offset where the field ends. */
5409 if (field_size != NULL_TREE)
5410 bitoffset_end = bitoffset + wi::to_offset (field_size);
5411 else
5412 bitoffset_end = 0;
5413
5414 access_end = offset_int (offset) + size;
5415
5416 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5417 [BITOFFSET, BITOFFSET_END)? */
5418 if (wi::cmps (access_end, bitoffset) > 0
5419 && (field_size == NULL_TREE
5420 || wi::lts_p (offset, bitoffset_end)))
5421 {
5422 offset_int inner_offset = offset_int (offset) - bitoffset;
5423 /* We do have overlap. Now see if field is large enough to
5424 cover the access. Give up for accesses spanning multiple
5425 fields. */
5426 if (wi::cmps (access_end, bitoffset_end) > 0)
5427 return NULL_TREE;
5428 if (wi::lts_p (offset, bitoffset))
5429 return NULL_TREE;
5430 return fold_ctor_reference (type, cval,
5431 inner_offset.to_uhwi (), size,
5432 from_decl);
5433 }
5434 }
5435 /* When memory is not explicitely mentioned in constructor, it is 0. */
5436 return build_zero_cst (type);
5437 }
5438
5439 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5440 to the memory at bit OFFSET. */
5441
5442 tree
5443 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
5444 unsigned HOST_WIDE_INT size, tree from_decl)
5445 {
5446 tree ret;
5447
5448 /* We found the field with exact match. */
5449 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5450 && !offset)
5451 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5452
5453 /* We are at the end of walk, see if we can view convert the
5454 result. */
5455 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5456 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5457 && !compare_tree_int (TYPE_SIZE (type), size)
5458 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
5459 {
5460 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
5461 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5462 if (ret)
5463 STRIP_USELESS_TYPE_CONVERSION (ret);
5464 return ret;
5465 }
5466 /* For constants and byte-aligned/sized reads try to go through
5467 native_encode/interpret. */
5468 if (CONSTANT_CLASS_P (ctor)
5469 && BITS_PER_UNIT == 8
5470 && offset % BITS_PER_UNIT == 0
5471 && size % BITS_PER_UNIT == 0
5472 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5473 {
5474 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5475 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5476 offset / BITS_PER_UNIT) > 0)
5477 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5478 }
5479 if (TREE_CODE (ctor) == CONSTRUCTOR)
5480 {
5481
5482 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5483 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
5484 return fold_array_ctor_reference (type, ctor, offset, size,
5485 from_decl);
5486 else
5487 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5488 from_decl);
5489 }
5490
5491 return NULL_TREE;
5492 }
5493
5494 /* Return the tree representing the element referenced by T if T is an
5495 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5496 names using VALUEIZE. Return NULL_TREE otherwise. */
5497
5498 tree
5499 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5500 {
5501 tree ctor, idx, base;
5502 HOST_WIDE_INT offset, size, max_size;
5503 tree tem;
5504
5505 if (TREE_THIS_VOLATILE (t))
5506 return NULL_TREE;
5507
5508 if (DECL_P (t))
5509 return get_symbol_constant_value (t);
5510
5511 tem = fold_read_from_constant_string (t);
5512 if (tem)
5513 return tem;
5514
5515 switch (TREE_CODE (t))
5516 {
5517 case ARRAY_REF:
5518 case ARRAY_RANGE_REF:
5519 /* Constant indexes are handled well by get_base_constructor.
5520 Only special case variable offsets.
5521 FIXME: This code can't handle nested references with variable indexes
5522 (they will be handled only by iteration of ccp). Perhaps we can bring
5523 get_ref_base_and_extent here and make it use a valueize callback. */
5524 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5525 && valueize
5526 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
5527 && TREE_CODE (idx) == INTEGER_CST)
5528 {
5529 tree low_bound, unit_size;
5530
5531 /* If the resulting bit-offset is constant, track it. */
5532 if ((low_bound = array_ref_low_bound (t),
5533 TREE_CODE (low_bound) == INTEGER_CST)
5534 && (unit_size = array_ref_element_size (t),
5535 tree_fits_uhwi_p (unit_size)))
5536 {
5537 offset_int woffset
5538 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5539 TYPE_PRECISION (TREE_TYPE (idx)));
5540
5541 if (wi::fits_shwi_p (woffset))
5542 {
5543 offset = woffset.to_shwi ();
5544 /* TODO: This code seems wrong, multiply then check
5545 to see if it fits. */
5546 offset *= tree_to_uhwi (unit_size);
5547 offset *= BITS_PER_UNIT;
5548
5549 base = TREE_OPERAND (t, 0);
5550 ctor = get_base_constructor (base, &offset, valueize);
5551 /* Empty constructor. Always fold to 0. */
5552 if (ctor == error_mark_node)
5553 return build_zero_cst (TREE_TYPE (t));
5554 /* Out of bound array access. Value is undefined,
5555 but don't fold. */
5556 if (offset < 0)
5557 return NULL_TREE;
5558 /* We can not determine ctor. */
5559 if (!ctor)
5560 return NULL_TREE;
5561 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5562 tree_to_uhwi (unit_size)
5563 * BITS_PER_UNIT,
5564 base);
5565 }
5566 }
5567 }
5568 /* Fallthru. */
5569
5570 case COMPONENT_REF:
5571 case BIT_FIELD_REF:
5572 case TARGET_MEM_REF:
5573 case MEM_REF:
5574 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5575 ctor = get_base_constructor (base, &offset, valueize);
5576
5577 /* Empty constructor. Always fold to 0. */
5578 if (ctor == error_mark_node)
5579 return build_zero_cst (TREE_TYPE (t));
5580 /* We do not know precise address. */
5581 if (max_size == -1 || max_size != size)
5582 return NULL_TREE;
5583 /* We can not determine ctor. */
5584 if (!ctor)
5585 return NULL_TREE;
5586
5587 /* Out of bound array access. Value is undefined, but don't fold. */
5588 if (offset < 0)
5589 return NULL_TREE;
5590
5591 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5592 base);
5593
5594 case REALPART_EXPR:
5595 case IMAGPART_EXPR:
5596 {
5597 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5598 if (c && TREE_CODE (c) == COMPLEX_CST)
5599 return fold_build1_loc (EXPR_LOCATION (t),
5600 TREE_CODE (t), TREE_TYPE (t), c);
5601 break;
5602 }
5603
5604 default:
5605 break;
5606 }
5607
5608 return NULL_TREE;
5609 }
5610
5611 tree
5612 fold_const_aggregate_ref (tree t)
5613 {
5614 return fold_const_aggregate_ref_1 (t, NULL);
5615 }
5616
5617 /* Lookup virtual method with index TOKEN in a virtual table V
5618 at OFFSET.
5619 Set CAN_REFER if non-NULL to false if method
5620 is not referable or if the virtual table is ill-formed (such as rewriten
5621 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5622
5623 tree
5624 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5625 tree v,
5626 unsigned HOST_WIDE_INT offset,
5627 bool *can_refer)
5628 {
5629 tree vtable = v, init, fn;
5630 unsigned HOST_WIDE_INT size;
5631 unsigned HOST_WIDE_INT elt_size, access_index;
5632 tree domain_type;
5633
5634 if (can_refer)
5635 *can_refer = true;
5636
5637 /* First of all double check we have virtual table. */
5638 if (TREE_CODE (v) != VAR_DECL
5639 || !DECL_VIRTUAL_P (v))
5640 {
5641 /* Pass down that we lost track of the target. */
5642 if (can_refer)
5643 *can_refer = false;
5644 return NULL_TREE;
5645 }
5646
5647 init = ctor_for_folding (v);
5648
5649 /* The virtual tables should always be born with constructors
5650 and we always should assume that they are avaialble for
5651 folding. At the moment we do not stream them in all cases,
5652 but it should never happen that ctor seem unreachable. */
5653 gcc_assert (init);
5654 if (init == error_mark_node)
5655 {
5656 gcc_assert (in_lto_p);
5657 /* Pass down that we lost track of the target. */
5658 if (can_refer)
5659 *can_refer = false;
5660 return NULL_TREE;
5661 }
5662 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
5663 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
5664 offset *= BITS_PER_UNIT;
5665 offset += token * size;
5666
5667 /* Lookup the value in the constructor that is assumed to be array.
5668 This is equivalent to
5669 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5670 offset, size, NULL);
5671 but in a constant time. We expect that frontend produced a simple
5672 array without indexed initializers. */
5673
5674 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5675 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5676 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5677 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5678
5679 access_index = offset / BITS_PER_UNIT / elt_size;
5680 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5681
5682 /* This code makes an assumption that there are no
5683 indexed fileds produced by C++ FE, so we can directly index the array. */
5684 if (access_index < CONSTRUCTOR_NELTS (init))
5685 {
5686 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5687 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5688 STRIP_NOPS (fn);
5689 }
5690 else
5691 fn = NULL;
5692
5693 /* For type inconsistent program we may end up looking up virtual method
5694 in virtual table that does not contain TOKEN entries. We may overrun
5695 the virtual table and pick up a constant or RTTI info pointer.
5696 In any case the call is undefined. */
5697 if (!fn
5698 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5699 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5700 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5701 else
5702 {
5703 fn = TREE_OPERAND (fn, 0);
5704
5705 /* When cgraph node is missing and function is not public, we cannot
5706 devirtualize. This can happen in WHOPR when the actual method
5707 ends up in other partition, because we found devirtualization
5708 possibility too late. */
5709 if (!can_refer_decl_in_current_unit_p (fn, vtable))
5710 {
5711 if (can_refer)
5712 {
5713 *can_refer = false;
5714 return fn;
5715 }
5716 return NULL_TREE;
5717 }
5718 }
5719
5720 /* Make sure we create a cgraph node for functions we'll reference.
5721 They can be non-existent if the reference comes from an entry
5722 of an external vtable for example. */
5723 cgraph_node::get_create (fn);
5724
5725 return fn;
5726 }
5727
5728 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5729 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5730 KNOWN_BINFO carries the binfo describing the true type of
5731 OBJ_TYPE_REF_OBJECT(REF).
5732 Set CAN_REFER if non-NULL to false if method
5733 is not referable or if the virtual table is ill-formed (such as rewriten
5734 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5735
5736 tree
5737 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5738 bool *can_refer)
5739 {
5740 unsigned HOST_WIDE_INT offset;
5741 tree v;
5742
5743 v = BINFO_VTABLE (known_binfo);
5744 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5745 if (!v)
5746 return NULL_TREE;
5747
5748 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
5749 {
5750 if (can_refer)
5751 *can_refer = false;
5752 return NULL_TREE;
5753 }
5754 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
5755 }
5756
5757 /* Return true iff VAL is a gimple expression that is known to be
5758 non-negative. Restricted to floating-point inputs. */
5759
5760 bool
5761 gimple_val_nonnegative_real_p (tree val)
5762 {
5763 gimple def_stmt;
5764
5765 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5766
5767 /* Use existing logic for non-gimple trees. */
5768 if (tree_expr_nonnegative_p (val))
5769 return true;
5770
5771 if (TREE_CODE (val) != SSA_NAME)
5772 return false;
5773
5774 /* Currently we look only at the immediately defining statement
5775 to make this determination, since recursion on defining
5776 statements of operands can lead to quadratic behavior in the
5777 worst case. This is expected to catch almost all occurrences
5778 in practice. It would be possible to implement limited-depth
5779 recursion if important cases are lost. Alternatively, passes
5780 that need this information (such as the pow/powi lowering code
5781 in the cse_sincos pass) could be revised to provide it through
5782 dataflow propagation. */
5783
5784 def_stmt = SSA_NAME_DEF_STMT (val);
5785
5786 if (is_gimple_assign (def_stmt))
5787 {
5788 tree op0, op1;
5789
5790 /* See fold-const.c:tree_expr_nonnegative_p for additional
5791 cases that could be handled with recursion. */
5792
5793 switch (gimple_assign_rhs_code (def_stmt))
5794 {
5795 case ABS_EXPR:
5796 /* Always true for floating-point operands. */
5797 return true;
5798
5799 case MULT_EXPR:
5800 /* True if the two operands are identical (since we are
5801 restricted to floating-point inputs). */
5802 op0 = gimple_assign_rhs1 (def_stmt);
5803 op1 = gimple_assign_rhs2 (def_stmt);
5804
5805 if (op0 == op1
5806 || operand_equal_p (op0, op1, 0))
5807 return true;
5808
5809 default:
5810 return false;
5811 }
5812 }
5813 else if (is_gimple_call (def_stmt))
5814 {
5815 tree fndecl = gimple_call_fndecl (def_stmt);
5816 if (fndecl
5817 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5818 {
5819 tree arg1;
5820
5821 switch (DECL_FUNCTION_CODE (fndecl))
5822 {
5823 CASE_FLT_FN (BUILT_IN_ACOS):
5824 CASE_FLT_FN (BUILT_IN_ACOSH):
5825 CASE_FLT_FN (BUILT_IN_CABS):
5826 CASE_FLT_FN (BUILT_IN_COSH):
5827 CASE_FLT_FN (BUILT_IN_ERFC):
5828 CASE_FLT_FN (BUILT_IN_EXP):
5829 CASE_FLT_FN (BUILT_IN_EXP10):
5830 CASE_FLT_FN (BUILT_IN_EXP2):
5831 CASE_FLT_FN (BUILT_IN_FABS):
5832 CASE_FLT_FN (BUILT_IN_FDIM):
5833 CASE_FLT_FN (BUILT_IN_HYPOT):
5834 CASE_FLT_FN (BUILT_IN_POW10):
5835 return true;
5836
5837 CASE_FLT_FN (BUILT_IN_SQRT):
5838 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5839 nonnegative inputs. */
5840 if (!HONOR_SIGNED_ZEROS (val))
5841 return true;
5842
5843 break;
5844
5845 CASE_FLT_FN (BUILT_IN_POWI):
5846 /* True if the second argument is an even integer. */
5847 arg1 = gimple_call_arg (def_stmt, 1);
5848
5849 if (TREE_CODE (arg1) == INTEGER_CST
5850 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5851 return true;
5852
5853 break;
5854
5855 CASE_FLT_FN (BUILT_IN_POW):
5856 /* True if the second argument is an even integer-valued
5857 real. */
5858 arg1 = gimple_call_arg (def_stmt, 1);
5859
5860 if (TREE_CODE (arg1) == REAL_CST)
5861 {
5862 REAL_VALUE_TYPE c;
5863 HOST_WIDE_INT n;
5864
5865 c = TREE_REAL_CST (arg1);
5866 n = real_to_integer (&c);
5867
5868 if ((n & 1) == 0)
5869 {
5870 REAL_VALUE_TYPE cint;
5871 real_from_integer (&cint, VOIDmode, n, SIGNED);
5872 if (real_identical (&c, &cint))
5873 return true;
5874 }
5875 }
5876
5877 break;
5878
5879 default:
5880 return false;
5881 }
5882 }
5883 }
5884
5885 return false;
5886 }
5887
5888 /* Given a pointer value OP0, return a simplified version of an
5889 indirection through OP0, or NULL_TREE if no simplification is
5890 possible. Note that the resulting type may be different from
5891 the type pointed to in the sense that it is still compatible
5892 from the langhooks point of view. */
5893
5894 tree
5895 gimple_fold_indirect_ref (tree t)
5896 {
5897 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5898 tree sub = t;
5899 tree subtype;
5900
5901 STRIP_NOPS (sub);
5902 subtype = TREE_TYPE (sub);
5903 if (!POINTER_TYPE_P (subtype))
5904 return NULL_TREE;
5905
5906 if (TREE_CODE (sub) == ADDR_EXPR)
5907 {
5908 tree op = TREE_OPERAND (sub, 0);
5909 tree optype = TREE_TYPE (op);
5910 /* *&p => p */
5911 if (useless_type_conversion_p (type, optype))
5912 return op;
5913
5914 /* *(foo *)&fooarray => fooarray[0] */
5915 if (TREE_CODE (optype) == ARRAY_TYPE
5916 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5917 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5918 {
5919 tree type_domain = TYPE_DOMAIN (optype);
5920 tree min_val = size_zero_node;
5921 if (type_domain && TYPE_MIN_VALUE (type_domain))
5922 min_val = TYPE_MIN_VALUE (type_domain);
5923 if (TREE_CODE (min_val) == INTEGER_CST)
5924 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5925 }
5926 /* *(foo *)&complexfoo => __real__ complexfoo */
5927 else if (TREE_CODE (optype) == COMPLEX_TYPE
5928 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5929 return fold_build1 (REALPART_EXPR, type, op);
5930 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5931 else if (TREE_CODE (optype) == VECTOR_TYPE
5932 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5933 {
5934 tree part_width = TYPE_SIZE (type);
5935 tree index = bitsize_int (0);
5936 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5937 }
5938 }
5939
5940 /* *(p + CST) -> ... */
5941 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5942 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5943 {
5944 tree addr = TREE_OPERAND (sub, 0);
5945 tree off = TREE_OPERAND (sub, 1);
5946 tree addrtype;
5947
5948 STRIP_NOPS (addr);
5949 addrtype = TREE_TYPE (addr);
5950
5951 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5952 if (TREE_CODE (addr) == ADDR_EXPR
5953 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5954 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
5955 && tree_fits_uhwi_p (off))
5956 {
5957 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
5958 tree part_width = TYPE_SIZE (type);
5959 unsigned HOST_WIDE_INT part_widthi
5960 = tree_to_shwi (part_width) / BITS_PER_UNIT;
5961 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5962 tree index = bitsize_int (indexi);
5963 if (offset / part_widthi
5964 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
5965 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5966 part_width, index);
5967 }
5968
5969 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5970 if (TREE_CODE (addr) == ADDR_EXPR
5971 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5972 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5973 {
5974 tree size = TYPE_SIZE_UNIT (type);
5975 if (tree_int_cst_equal (size, off))
5976 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5977 }
5978
5979 /* *(p + CST) -> MEM_REF <p, CST>. */
5980 if (TREE_CODE (addr) != ADDR_EXPR
5981 || DECL_P (TREE_OPERAND (addr, 0)))
5982 return fold_build2 (MEM_REF, type,
5983 addr,
5984 wide_int_to_tree (ptype, off));
5985 }
5986
5987 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5988 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5989 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5990 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5991 {
5992 tree type_domain;
5993 tree min_val = size_zero_node;
5994 tree osub = sub;
5995 sub = gimple_fold_indirect_ref (sub);
5996 if (! sub)
5997 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5998 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5999 if (type_domain && TYPE_MIN_VALUE (type_domain))
6000 min_val = TYPE_MIN_VALUE (type_domain);
6001 if (TREE_CODE (min_val) == INTEGER_CST)
6002 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6003 }
6004
6005 return NULL_TREE;
6006 }
6007
6008 /* Return true if CODE is an operation that when operating on signed
6009 integer types involves undefined behavior on overflow and the
6010 operation can be expressed with unsigned arithmetic. */
6011
6012 bool
6013 arith_code_with_undefined_signed_overflow (tree_code code)
6014 {
6015 switch (code)
6016 {
6017 case PLUS_EXPR:
6018 case MINUS_EXPR:
6019 case MULT_EXPR:
6020 case NEGATE_EXPR:
6021 case POINTER_PLUS_EXPR:
6022 return true;
6023 default:
6024 return false;
6025 }
6026 }
6027
6028 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6029 operation that can be transformed to unsigned arithmetic by converting
6030 its operand, carrying out the operation in the corresponding unsigned
6031 type and converting the result back to the original type.
6032
6033 Returns a sequence of statements that replace STMT and also contain
6034 a modified form of STMT itself. */
6035
6036 gimple_seq
6037 rewrite_to_defined_overflow (gimple stmt)
6038 {
6039 if (dump_file && (dump_flags & TDF_DETAILS))
6040 {
6041 fprintf (dump_file, "rewriting stmt with undefined signed "
6042 "overflow ");
6043 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6044 }
6045
6046 tree lhs = gimple_assign_lhs (stmt);
6047 tree type = unsigned_type_for (TREE_TYPE (lhs));
6048 gimple_seq stmts = NULL;
6049 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6050 {
6051 gimple_seq stmts2 = NULL;
6052 gimple_set_op (stmt, i,
6053 force_gimple_operand (fold_convert (type,
6054 gimple_op (stmt, i)),
6055 &stmts2, true, NULL_TREE));
6056 gimple_seq_add_seq (&stmts, stmts2);
6057 }
6058 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6059 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6060 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6061 gimple_seq_add_stmt (&stmts, stmt);
6062 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
6063 gimple_seq_add_stmt (&stmts, cvt);
6064
6065 return stmts;
6066 }
6067
6068
6069 /* The valueization hook we use for the gimple_build API simplification.
6070 This makes us match fold_buildN behavior by only combining with
6071 statements in the sequence(s) we are currently building. */
6072
6073 static tree
6074 gimple_build_valueize (tree op)
6075 {
6076 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
6077 return op;
6078 return NULL_TREE;
6079 }
6080
6081 /* Build the expression CODE OP0 of type TYPE with location LOC,
6082 simplifying it first if possible. Returns the built
6083 expression value and appends statements possibly defining it
6084 to SEQ. */
6085
6086 tree
6087 gimple_build (gimple_seq *seq, location_t loc,
6088 enum tree_code code, tree type, tree op0)
6089 {
6090 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
6091 if (!res)
6092 {
6093 if (gimple_in_ssa_p (cfun))
6094 res = make_ssa_name (type);
6095 else
6096 res = create_tmp_reg (type);
6097 gimple stmt;
6098 if (code == REALPART_EXPR
6099 || code == IMAGPART_EXPR
6100 || code == VIEW_CONVERT_EXPR)
6101 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
6102 else
6103 stmt = gimple_build_assign (res, code, op0);
6104 gimple_set_location (stmt, loc);
6105 gimple_seq_add_stmt_without_update (seq, stmt);
6106 }
6107 return res;
6108 }
6109
6110 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6111 simplifying it first if possible. Returns the built
6112 expression value and appends statements possibly defining it
6113 to SEQ. */
6114
6115 tree
6116 gimple_build (gimple_seq *seq, location_t loc,
6117 enum tree_code code, tree type, tree op0, tree op1)
6118 {
6119 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
6120 if (!res)
6121 {
6122 if (gimple_in_ssa_p (cfun))
6123 res = make_ssa_name (type);
6124 else
6125 res = create_tmp_reg (type);
6126 gimple stmt = gimple_build_assign (res, code, op0, op1);
6127 gimple_set_location (stmt, loc);
6128 gimple_seq_add_stmt_without_update (seq, stmt);
6129 }
6130 return res;
6131 }
6132
6133 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6134 simplifying it first if possible. Returns the built
6135 expression value and appends statements possibly defining it
6136 to SEQ. */
6137
6138 tree
6139 gimple_build (gimple_seq *seq, location_t loc,
6140 enum tree_code code, tree type, tree op0, tree op1, tree op2)
6141 {
6142 tree res = gimple_simplify (code, type, op0, op1, op2,
6143 seq, gimple_build_valueize);
6144 if (!res)
6145 {
6146 if (gimple_in_ssa_p (cfun))
6147 res = make_ssa_name (type);
6148 else
6149 res = create_tmp_reg (type);
6150 gimple stmt;
6151 if (code == BIT_FIELD_REF)
6152 stmt = gimple_build_assign (res, code,
6153 build3 (code, type, op0, op1, op2));
6154 else
6155 stmt = gimple_build_assign (res, code, op0, op1, op2);
6156 gimple_set_location (stmt, loc);
6157 gimple_seq_add_stmt_without_update (seq, stmt);
6158 }
6159 return res;
6160 }
6161
6162 /* Build the call FN (ARG0) with a result of type TYPE
6163 (or no result if TYPE is void) with location LOC,
6164 simplifying it first if possible. Returns the built
6165 expression value (or NULL_TREE if TYPE is void) and appends
6166 statements possibly defining it to SEQ. */
6167
6168 tree
6169 gimple_build (gimple_seq *seq, location_t loc,
6170 enum built_in_function fn, tree type, tree arg0)
6171 {
6172 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
6173 if (!res)
6174 {
6175 tree decl = builtin_decl_implicit (fn);
6176 gimple stmt = gimple_build_call (decl, 1, arg0);
6177 if (!VOID_TYPE_P (type))
6178 {
6179 if (gimple_in_ssa_p (cfun))
6180 res = make_ssa_name (type);
6181 else
6182 res = create_tmp_reg (type);
6183 gimple_call_set_lhs (stmt, res);
6184 }
6185 gimple_set_location (stmt, loc);
6186 gimple_seq_add_stmt_without_update (seq, stmt);
6187 }
6188 return res;
6189 }
6190
6191 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6192 (or no result if TYPE is void) with location LOC,
6193 simplifying it first if possible. Returns the built
6194 expression value (or NULL_TREE if TYPE is void) and appends
6195 statements possibly defining it to SEQ. */
6196
6197 tree
6198 gimple_build (gimple_seq *seq, location_t loc,
6199 enum built_in_function fn, tree type, tree arg0, tree arg1)
6200 {
6201 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
6202 if (!res)
6203 {
6204 tree decl = builtin_decl_implicit (fn);
6205 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6206 if (!VOID_TYPE_P (type))
6207 {
6208 if (gimple_in_ssa_p (cfun))
6209 res = make_ssa_name (type);
6210 else
6211 res = create_tmp_reg (type);
6212 gimple_call_set_lhs (stmt, res);
6213 }
6214 gimple_set_location (stmt, loc);
6215 gimple_seq_add_stmt_without_update (seq, stmt);
6216 }
6217 return res;
6218 }
6219
6220 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6221 (or no result if TYPE is void) with location LOC,
6222 simplifying it first if possible. Returns the built
6223 expression value (or NULL_TREE if TYPE is void) and appends
6224 statements possibly defining it to SEQ. */
6225
6226 tree
6227 gimple_build (gimple_seq *seq, location_t loc,
6228 enum built_in_function fn, tree type,
6229 tree arg0, tree arg1, tree arg2)
6230 {
6231 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
6232 seq, gimple_build_valueize);
6233 if (!res)
6234 {
6235 tree decl = builtin_decl_implicit (fn);
6236 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6237 if (!VOID_TYPE_P (type))
6238 {
6239 if (gimple_in_ssa_p (cfun))
6240 res = make_ssa_name (type);
6241 else
6242 res = create_tmp_reg (type);
6243 gimple_call_set_lhs (stmt, res);
6244 }
6245 gimple_set_location (stmt, loc);
6246 gimple_seq_add_stmt_without_update (seq, stmt);
6247 }
6248 return res;
6249 }
6250
6251 /* Build the conversion (TYPE) OP with a result of type TYPE
6252 with location LOC if such conversion is neccesary in GIMPLE,
6253 simplifying it first.
6254 Returns the built expression value and appends
6255 statements possibly defining it to SEQ. */
6256
6257 tree
6258 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6259 {
6260 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6261 return op;
6262 return gimple_build (seq, loc, NOP_EXPR, type, op);
6263 }