]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
decl.c (Has_Thiscall_Convention): New macro.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
949e47e5 2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
cbdd87d4 27#include "function.h"
cbdd87d4
RG
28#include "tree-dump.h"
29#include "tree-flow.h"
30#include "tree-pass.h"
31#include "tree-ssa-propagate.h"
cbdd87d4 32#include "target.h"
cfef45c8 33#include "gimple-fold.h"
cbdd87d4 34
b3b9f3d0
JH
35/* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
1389294c 38
1389294c
JH
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
1389294c 56static bool
b3b9f3d0 57can_refer_decl_in_current_unit_p (tree decl)
1389294c
JH
58{
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
b3b9f3d0
JH
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
1389294c 64 /* External flag is set, so we deal with C++ reference
6649df51
JH
65 to static object from other file.
66 We also may see weakref that is always safe. */
b3b9f3d0
JH
67 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
68 && TREE_CODE (decl) == VAR_DECL)
6649df51 69 return lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)) != NULL;
b3b9f3d0
JH
70 /* When function is public, we always can introduce new reference.
71 Exception are the COMDAT functions where introducing a direct
72 reference imply need to include function body in the curren tunit. */
73 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
74 return true;
75 /* We are not at ltrans stage; so don't worry about WHOPR.
76 Also when still gimplifying all referred comdat functions will be
2e9bb6ba
JH
77 produced.
78 ??? as observed in PR20991 for already optimized out comdat virtual functions
79 we may not neccesarily give up because the copy will be output elsewhere when
80 corresponding vtable is output. */
b3b9f3d0
JH
81 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
82 return true;
83 /* If we already output the function body, we are safe. */
84 if (TREE_ASM_WRITTEN (decl))
85 return true;
1389294c
JH
86 if (TREE_CODE (decl) == FUNCTION_DECL)
87 {
88 node = cgraph_get_node (decl);
b3b9f3d0
JH
89 /* Check that we still have function body and that we didn't took
90 the decision to eliminate offline copy of the function yet.
91 The second is important when devirtualization happens during final
92 compilation stage when making a new reference no longer makes callee
93 to be compiled. */
94 if (!node || !node->analyzed || node->global.inlined_to)
95 return false;
1389294c
JH
96 }
97 else if (TREE_CODE (decl) == VAR_DECL)
98 {
99 vnode = varpool_get_node (decl);
100 if (!vnode || !vnode->finalized)
b3b9f3d0 101 return false;
1389294c 102 }
b3b9f3d0 103 return true;
1389294c
JH
104}
105
0038d4e0 106/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
17f39a39
JH
107 acceptable form for is_gimple_min_invariant. */
108
109tree
110canonicalize_constructor_val (tree cval)
111{
112 STRIP_NOPS (cval);
315f5f1b
RG
113 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
114 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 115 {
315f5f1b
RG
116 tree ptr = TREE_OPERAND (cval, 0);
117 if (is_gimple_min_invariant (ptr))
118 cval = build1_loc (EXPR_LOCATION (cval),
119 ADDR_EXPR, TREE_TYPE (ptr),
120 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
121 ptr,
122 fold_convert (ptr_type_node,
123 TREE_OPERAND (cval, 1))));
17f39a39
JH
124 }
125 if (TREE_CODE (cval) == ADDR_EXPR)
126 {
127 tree base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
128 if (!base)
129 return NULL_TREE;
b3b9f3d0 130
7501ca28
RG
131 if ((TREE_CODE (base) == VAR_DECL
132 || TREE_CODE (base) == FUNCTION_DECL)
b3b9f3d0 133 && !can_refer_decl_in_current_unit_p (base))
1389294c 134 return NULL_TREE;
7501ca28 135 if (TREE_CODE (base) == VAR_DECL)
c5bdb340
RG
136 {
137 TREE_ADDRESSABLE (base) = 1;
138 if (cfun && gimple_referenced_vars (cfun))
139 add_referenced_var (base);
140 }
7501ca28
RG
141 else if (TREE_CODE (base) == FUNCTION_DECL)
142 {
143 /* Make sure we create a cgraph node for functions we'll reference.
144 They can be non-existent if the reference comes from an entry
145 of an external vtable for example. */
146 cgraph_get_create_node (base);
147 }
0038d4e0 148 /* Fixup types in global initializers. */
73aef89e
RG
149 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
150 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
17f39a39
JH
151 }
152 return cval;
153}
cbdd87d4
RG
154
155/* If SYM is a constant variable with known value, return the value.
156 NULL_TREE is returned otherwise. */
157
158tree
159get_symbol_constant_value (tree sym)
160{
64e0f5ff 161 if (const_value_known_p (sym))
cbdd87d4
RG
162 {
163 tree val = DECL_INITIAL (sym);
164 if (val)
165 {
17f39a39 166 val = canonicalize_constructor_val (val);
1389294c 167 if (val && is_gimple_min_invariant (val))
17f39a39 168 return val;
1389294c
JH
169 else
170 return NULL_TREE;
cbdd87d4
RG
171 }
172 /* Variables declared 'const' without an initializer
173 have zero as the initializer if they may not be
174 overridden at link or run time. */
175 if (!val
cbdd87d4
RG
176 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
177 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 178 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
179 }
180
181 return NULL_TREE;
182}
183
184
cbdd87d4
RG
185
186/* Subroutine of fold_stmt. We perform several simplifications of the
187 memory reference tree EXPR and make sure to re-gimplify them properly
188 after propagation of constant addresses. IS_LHS is true if the
189 reference is supposed to be an lvalue. */
190
191static tree
192maybe_fold_reference (tree expr, bool is_lhs)
193{
194 tree *t = &expr;
17f39a39 195 tree result;
cbdd87d4 196
f0eddb90
RG
197 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
198 || TREE_CODE (expr) == REALPART_EXPR
199 || TREE_CODE (expr) == IMAGPART_EXPR)
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_unary_loc (EXPR_LOCATION (expr),
202 TREE_CODE (expr),
203 TREE_TYPE (expr),
204 TREE_OPERAND (expr, 0));
205 else if (TREE_CODE (expr) == BIT_FIELD_REF
206 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
207 return fold_ternary_loc (EXPR_LOCATION (expr),
208 TREE_CODE (expr),
209 TREE_TYPE (expr),
210 TREE_OPERAND (expr, 0),
211 TREE_OPERAND (expr, 1),
212 TREE_OPERAND (expr, 2));
213
214 while (handled_component_p (*t))
215 t = &TREE_OPERAND (*t, 0);
cbdd87d4 216
f0eddb90
RG
217 /* Canonicalize MEM_REFs invariant address operand. Do this first
218 to avoid feeding non-canonical MEM_REFs elsewhere. */
219 if (TREE_CODE (*t) == MEM_REF
220 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 221 {
f0eddb90
RG
222 bool volatile_p = TREE_THIS_VOLATILE (*t);
223 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
224 TREE_OPERAND (*t, 0),
225 TREE_OPERAND (*t, 1));
226 if (tem)
227 {
228 TREE_THIS_VOLATILE (tem) = volatile_p;
229 *t = tem;
230 tem = maybe_fold_reference (expr, is_lhs);
231 if (tem)
232 return tem;
233 return expr;
234 }
cbdd87d4
RG
235 }
236
f0eddb90
RG
237 if (!is_lhs
238 && (result = fold_const_aggregate_ref (expr))
239 && is_gimple_min_invariant (result))
240 return result;
cbdd87d4 241
70f34814
RG
242 /* Fold back MEM_REFs to reference trees. */
243 if (TREE_CODE (*t) == MEM_REF
244 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
245 && integer_zerop (TREE_OPERAND (*t, 1))
246 && (TREE_THIS_VOLATILE (*t)
247 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
248 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
249 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
250 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
251 /* We have to look out here to not drop a required conversion
252 from the rhs to the lhs if is_lhs, but we don't have the
253 rhs here to verify that. Thus require strict type
254 compatibility. */
255 && types_compatible_p (TREE_TYPE (*t),
256 TREE_TYPE (TREE_OPERAND
f0eddb90 257 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 258 {
70f34814
RG
259 tree tem;
260 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
261 tem = maybe_fold_reference (expr, is_lhs);
262 if (tem)
263 return tem;
264 return expr;
265 }
4d948885
RG
266 else if (TREE_CODE (*t) == TARGET_MEM_REF)
267 {
268 tree tem = maybe_fold_tmr (*t);
269 if (tem)
270 {
271 *t = tem;
272 tem = maybe_fold_reference (expr, is_lhs);
273 if (tem)
274 return tem;
275 return expr;
276 }
277 }
cbdd87d4
RG
278
279 return NULL_TREE;
280}
281
282
283/* Attempt to fold an assignment statement pointed-to by SI. Returns a
284 replacement rhs for the statement or NULL_TREE if no simplification
285 could be made. It is assumed that the operands have been previously
286 folded. */
287
288static tree
289fold_gimple_assign (gimple_stmt_iterator *si)
290{
291 gimple stmt = gsi_stmt (*si);
292 enum tree_code subcode = gimple_assign_rhs_code (stmt);
293 location_t loc = gimple_location (stmt);
294
295 tree result = NULL_TREE;
296
297 switch (get_gimple_rhs_class (subcode))
298 {
299 case GIMPLE_SINGLE_RHS:
300 {
301 tree rhs = gimple_assign_rhs1 (stmt);
302
4e71066d 303 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
304 return maybe_fold_reference (rhs, false);
305
306 else if (TREE_CODE (rhs) == ADDR_EXPR)
307 {
70f34814
RG
308 tree ref = TREE_OPERAND (rhs, 0);
309 tree tem = maybe_fold_reference (ref, true);
310 if (tem
311 && TREE_CODE (tem) == MEM_REF
312 && integer_zerop (TREE_OPERAND (tem, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
314 else if (tem)
cbdd87d4
RG
315 result = fold_convert (TREE_TYPE (rhs),
316 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
317 else if (TREE_CODE (ref) == MEM_REF
318 && integer_zerop (TREE_OPERAND (ref, 1)))
319 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
320 }
321
322 else if (TREE_CODE (rhs) == CONSTRUCTOR
323 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
324 && (CONSTRUCTOR_NELTS (rhs)
325 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
326 {
327 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
328 unsigned i;
329 tree val;
330
331 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
332 if (TREE_CODE (val) != INTEGER_CST
333 && TREE_CODE (val) != REAL_CST
334 && TREE_CODE (val) != FIXED_CST)
335 return NULL_TREE;
336
337 return build_vector_from_ctor (TREE_TYPE (rhs),
338 CONSTRUCTOR_ELTS (rhs));
339 }
340
341 else if (DECL_P (rhs))
342 return unshare_expr (get_symbol_constant_value (rhs));
343
344 /* If we couldn't fold the RHS, hand over to the generic
345 fold routines. */
346 if (result == NULL_TREE)
347 result = fold (rhs);
348
349 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
350 that may have been added by fold, and "useless" type
351 conversions that might now be apparent due to propagation. */
352 STRIP_USELESS_TYPE_CONVERSION (result);
353
354 if (result != rhs && valid_gimple_rhs_p (result))
355 return result;
356
357 return NULL_TREE;
358 }
359 break;
360
361 case GIMPLE_UNARY_RHS:
362 {
363 tree rhs = gimple_assign_rhs1 (stmt);
364
365 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
366 if (result)
367 {
368 /* If the operation was a conversion do _not_ mark a
369 resulting constant with TREE_OVERFLOW if the original
370 constant was not. These conversions have implementation
371 defined behavior and retaining the TREE_OVERFLOW flag
372 here would confuse later passes such as VRP. */
373 if (CONVERT_EXPR_CODE_P (subcode)
374 && TREE_CODE (result) == INTEGER_CST
375 && TREE_CODE (rhs) == INTEGER_CST)
376 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
377
378 STRIP_USELESS_TYPE_CONVERSION (result);
379 if (valid_gimple_rhs_p (result))
380 return result;
381 }
cbdd87d4
RG
382 }
383 break;
384
385 case GIMPLE_BINARY_RHS:
9b80d091
KT
386 /* Try to canonicalize for boolean-typed X the comparisons
387 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
388 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
389 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
390 {
391 tree lhs = gimple_assign_lhs (stmt);
392 tree op1 = gimple_assign_rhs1 (stmt);
393 tree op2 = gimple_assign_rhs2 (stmt);
394 tree type = TREE_TYPE (op1);
395
396 /* Check whether the comparison operands are of the same boolean
397 type as the result type is.
398 Check that second operand is an integer-constant with value
399 one or zero. */
400 if (TREE_CODE (op2) == INTEGER_CST
401 && (integer_zerop (op2) || integer_onep (op2))
402 && useless_type_conversion_p (TREE_TYPE (lhs), type))
403 {
404 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
405 bool is_logical_not = false;
406
407 /* X == 0 and X != 1 is a logical-not.of X
408 X == 1 and X != 0 is X */
409 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
410 || (cmp_code == NE_EXPR && integer_onep (op2)))
411 is_logical_not = true;
412
413 if (is_logical_not == false)
414 result = op1;
415 /* Only for one-bit precision typed X the transformation
416 !X -> ~X is valied. */
417 else if (TYPE_PRECISION (type) == 1)
418 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
419 type, op1);
420 /* Otherwise we use !X -> X ^ 1. */
421 else
422 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
423 type, op1, build_int_cst (type, 1));
424
425 }
426 }
cbdd87d4
RG
427
428 if (!result)
429 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
433
434 if (result)
435 {
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
cbdd87d4
RG
439 }
440 break;
441
0354c0c7 442 case GIMPLE_TERNARY_RHS:
4e71066d
RG
443 /* Try to fold a conditional expression. */
444 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
445 {
446 tree op0 = gimple_assign_rhs1 (stmt);
447 tree tem;
448 bool set = false;
449 location_t cond_loc = gimple_location (stmt);
450
451 if (COMPARISON_CLASS_P (op0))
452 {
453 fold_defer_overflow_warnings ();
454 tem = fold_binary_loc (cond_loc,
455 TREE_CODE (op0), TREE_TYPE (op0),
456 TREE_OPERAND (op0, 0),
457 TREE_OPERAND (op0, 1));
458 /* This is actually a conditional expression, not a GIMPLE
459 conditional statement, however, the valid_gimple_rhs_p
460 test still applies. */
461 set = (tem && is_gimple_condexpr (tem)
462 && valid_gimple_rhs_p (tem));
463 fold_undefer_overflow_warnings (set, stmt, 0);
464 }
465 else if (is_gimple_min_invariant (op0))
466 {
467 tem = op0;
468 set = true;
469 }
470 else
471 return NULL_TREE;
472
473 if (set)
474 result = fold_build3_loc (cond_loc, COND_EXPR,
475 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
476 gimple_assign_rhs2 (stmt),
477 gimple_assign_rhs3 (stmt));
478 }
479
480 if (!result)
481 result = fold_ternary_loc (loc, subcode,
482 TREE_TYPE (gimple_assign_lhs (stmt)),
483 gimple_assign_rhs1 (stmt),
484 gimple_assign_rhs2 (stmt),
485 gimple_assign_rhs3 (stmt));
0354c0c7
BS
486
487 if (result)
488 {
489 STRIP_USELESS_TYPE_CONVERSION (result);
490 if (valid_gimple_rhs_p (result))
491 return result;
0354c0c7
BS
492 }
493 break;
494
cbdd87d4
RG
495 case GIMPLE_INVALID_RHS:
496 gcc_unreachable ();
497 }
498
499 return NULL_TREE;
500}
501
502/* Attempt to fold a conditional statement. Return true if any changes were
503 made. We only attempt to fold the condition expression, and do not perform
504 any transformation that would require alteration of the cfg. It is
505 assumed that the operands have been previously folded. */
506
507static bool
508fold_gimple_cond (gimple stmt)
509{
510 tree result = fold_binary_loc (gimple_location (stmt),
511 gimple_cond_code (stmt),
512 boolean_type_node,
513 gimple_cond_lhs (stmt),
514 gimple_cond_rhs (stmt));
515
516 if (result)
517 {
518 STRIP_USELESS_TYPE_CONVERSION (result);
519 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
520 {
521 gimple_cond_set_condition_from_tree (stmt, result);
522 return true;
523 }
524 }
525
526 return false;
527}
528
529/* Convert EXPR into a GIMPLE value suitable for substitution on the
530 RHS of an assignment. Insert the necessary statements before
531 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
532 is replaced. If the call is expected to produces a result, then it
533 is replaced by an assignment of the new RHS to the result variable.
534 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
535 GIMPLE_NOP. A proper VDEF chain is retained by making the first
536 VUSE and the last VDEF of the whole sequence be the same as the replaced
537 statement and using new SSA names for stores in between. */
cbdd87d4
RG
538
539void
540gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
541{
542 tree lhs;
cbdd87d4
RG
543 gimple stmt, new_stmt;
544 gimple_stmt_iterator i;
355a7673 545 gimple_seq stmts = NULL;
cbdd87d4 546 struct gimplify_ctx gctx;
e256dfce 547 gimple laststore;
fe2ef088 548 tree reaching_vuse;
cbdd87d4
RG
549
550 stmt = gsi_stmt (*si_p);
551
552 gcc_assert (is_gimple_call (stmt));
553
cbdd87d4 554 push_gimplify_context (&gctx);
21860814 555 gctx.into_ssa = gimple_in_ssa_p (cfun);
cbdd87d4 556
e256dfce 557 lhs = gimple_call_lhs (stmt);
cbdd87d4 558 if (lhs == NULL_TREE)
6e572326
RG
559 {
560 gimplify_and_add (expr, &stmts);
561 /* We can end up with folding a memcpy of an empty class assignment
562 which gets optimized away by C++ gimplification. */
563 if (gimple_seq_empty_p (stmts))
564 {
9fdc58de 565 pop_gimplify_context (NULL);
6e572326
RG
566 if (gimple_in_ssa_p (cfun))
567 {
568 unlink_stmt_vdef (stmt);
569 release_defs (stmt);
570 }
571 gsi_remove (si_p, true);
572 return;
573 }
574 }
cbdd87d4 575 else
e256dfce
RG
576 {
577 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
578 new_stmt = gimple_build_assign (lhs, tmp);
579 i = gsi_last (stmts);
580 gsi_insert_after_without_update (&i, new_stmt,
581 GSI_CONTINUE_LINKING);
582 }
cbdd87d4
RG
583
584 pop_gimplify_context (NULL);
585
586 if (gimple_has_location (stmt))
587 annotate_all_with_location (stmts, gimple_location (stmt));
588
e256dfce
RG
589 /* First iterate over the replacement statements backward, assigning
590 virtual operands to their defining statements. */
591 laststore = NULL;
592 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
593 {
594 new_stmt = gsi_stmt (i);
949e47e5
JJ
595 if ((gimple_assign_single_p (new_stmt)
596 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
597 || (is_gimple_call (new_stmt)
598 && (gimple_call_flags (new_stmt)
599 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
e256dfce
RG
600 {
601 tree vdef;
602 if (!laststore)
603 vdef = gimple_vdef (stmt);
604 else
605 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
606 gimple_set_vdef (new_stmt, vdef);
52f26be4 607 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce
RG
608 SSA_NAME_DEF_STMT (vdef) = new_stmt;
609 laststore = new_stmt;
610 }
611 }
612
613 /* Second iterate over the statements forward, assigning virtual
614 operands to their uses. */
e256dfce 615 reaching_vuse = gimple_vuse (stmt);
cbdd87d4
RG
616 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
617 {
cbdd87d4 618 new_stmt = gsi_stmt (i);
e256dfce 619 /* The replacement can expose previously unreferenced variables. */
edc74207 620 if (gimple_in_ssa_p (cfun))
835c9db0 621 find_referenced_vars_in (new_stmt);
a24ac460
RG
622 /* If the new statement possibly has a VUSE, update it with exact SSA
623 name we know will reach this one. */
624 if (gimple_has_mem_ops (new_stmt))
e256dfce
RG
625 gimple_set_vuse (new_stmt, reaching_vuse);
626 gimple_set_modified (new_stmt, true);
627 if (gimple_vdef (new_stmt))
628 reaching_vuse = gimple_vdef (new_stmt);
cbdd87d4
RG
629 }
630
e256dfce
RG
631 /* If the new sequence does not do a store release the virtual
632 definition of the original statement. */
633 if (reaching_vuse
634 && reaching_vuse == gimple_vuse (stmt))
cbdd87d4 635 {
e256dfce
RG
636 tree vdef = gimple_vdef (stmt);
637 if (vdef
638 && TREE_CODE (vdef) == SSA_NAME)
fe2ef088
MM
639 {
640 unlink_stmt_vdef (stmt);
e256dfce 641 release_ssa_name (vdef);
8a1561bc 642 }
cbdd87d4
RG
643 }
644
355a7673
MM
645 /* Finally replace the original statement with the sequence. */
646 gsi_replace_with_seq (si_p, stmts, false);
cbdd87d4
RG
647}
648
649/* Return the string length, maximum string length or maximum value of
650 ARG in LENGTH.
651 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
652 is not NULL and, for TYPE == 0, its value is not equal to the length
653 we determine or if we are unable to determine the length or value,
654 return false. VISITED is a bitmap of visited variables.
655 TYPE is 0 if string length should be returned, 1 for maximum string
656 length and 2 for maximum value ARG can have. */
657
658static bool
659get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
660{
661 tree var, val;
662 gimple def_stmt;
663
664 if (TREE_CODE (arg) != SSA_NAME)
665 {
cbdd87d4 666 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
763dc99d
AP
667 if (TREE_CODE (arg) == ADDR_EXPR
668 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
669 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
cbdd87d4
RG
670 {
671 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
672 if (TREE_CODE (aop0) == INDIRECT_REF
673 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
674 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
675 length, visited, type);
676 }
677
678 if (type == 2)
679 {
680 val = arg;
681 if (TREE_CODE (val) != INTEGER_CST
682 || tree_int_cst_sgn (val) < 0)
683 return false;
684 }
685 else
686 val = c_strlen (arg, 1);
687 if (!val)
688 return false;
689
690 if (*length)
691 {
692 if (type > 0)
693 {
694 if (TREE_CODE (*length) != INTEGER_CST
695 || TREE_CODE (val) != INTEGER_CST)
696 return false;
697
698 if (tree_int_cst_lt (*length, val))
699 *length = val;
700 return true;
701 }
702 else if (simple_cst_equal (val, *length) != 1)
703 return false;
704 }
705
706 *length = val;
707 return true;
708 }
709
710 /* If we were already here, break the infinite cycle. */
fcaa4ca4 711 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
cbdd87d4 712 return true;
cbdd87d4
RG
713
714 var = arg;
715 def_stmt = SSA_NAME_DEF_STMT (var);
716
717 switch (gimple_code (def_stmt))
718 {
719 case GIMPLE_ASSIGN:
720 /* The RHS of the statement defining VAR must either have a
721 constant length or come from another SSA_NAME with a constant
722 length. */
723 if (gimple_assign_single_p (def_stmt)
724 || gimple_assign_unary_nop_p (def_stmt))
725 {
726 tree rhs = gimple_assign_rhs1 (def_stmt);
727 return get_maxval_strlen (rhs, length, visited, type);
728 }
763dc99d
AP
729 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
730 {
731 tree op2 = gimple_assign_rhs2 (def_stmt);
732 tree op3 = gimple_assign_rhs3 (def_stmt);
733 return get_maxval_strlen (op2, length, visited, type)
734 && get_maxval_strlen (op3, length, visited, type);
735 }
cbdd87d4
RG
736 return false;
737
738 case GIMPLE_PHI:
739 {
740 /* All the arguments of the PHI node must have the same constant
741 length. */
742 unsigned i;
743
744 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
745 {
746 tree arg = gimple_phi_arg (def_stmt, i)->def;
747
748 /* If this PHI has itself as an argument, we cannot
749 determine the string length of this argument. However,
750 if we can find a constant string length for the other
751 PHI args then we can still be sure that this is a
752 constant string length. So be optimistic and just
753 continue with the next argument. */
754 if (arg == gimple_phi_result (def_stmt))
755 continue;
756
757 if (!get_maxval_strlen (arg, length, visited, type))
758 return false;
759 }
760 }
761 return true;
762
763 default:
764 return false;
765 }
766}
767
768
769/* Fold builtin call in statement STMT. Returns a simplified tree.
770 We may return a non-constant expression, including another call
771 to a different function and with different arguments, e.g.,
772 substituting memcpy for strcpy when the string length is known.
773 Note that some builtins expand into inline code that may not
774 be valid in GIMPLE. Callers must take care. */
775
776tree
777gimple_fold_builtin (gimple stmt)
778{
779 tree result, val[3];
780 tree callee, a;
781 int arg_idx, type;
782 bitmap visited;
783 bool ignore;
784 int nargs;
785 location_t loc = gimple_location (stmt);
786
787 gcc_assert (is_gimple_call (stmt));
788
789 ignore = (gimple_call_lhs (stmt) == NULL);
790
791 /* First try the generic builtin folder. If that succeeds, return the
792 result directly. */
793 result = fold_call_stmt (stmt, ignore);
794 if (result)
795 {
796 if (ignore)
797 STRIP_NOPS (result);
798 return result;
799 }
800
801 /* Ignore MD builtins. */
802 callee = gimple_call_fndecl (stmt);
803 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
804 return NULL_TREE;
805
e7f9dae0
JJ
806 /* Give up for always_inline inline builtins until they are
807 inlined. */
808 if (avoid_folding_inline_builtin (callee))
809 return NULL_TREE;
810
cbdd87d4
RG
811 /* If the builtin could not be folded, and it has no argument list,
812 we're done. */
813 nargs = gimple_call_num_args (stmt);
814 if (nargs == 0)
815 return NULL_TREE;
816
817 /* Limit the work only for builtins we know how to simplify. */
818 switch (DECL_FUNCTION_CODE (callee))
819 {
820 case BUILT_IN_STRLEN:
821 case BUILT_IN_FPUTS:
822 case BUILT_IN_FPUTS_UNLOCKED:
823 arg_idx = 0;
824 type = 0;
825 break;
826 case BUILT_IN_STRCPY:
827 case BUILT_IN_STRNCPY:
828 arg_idx = 1;
829 type = 0;
830 break;
831 case BUILT_IN_MEMCPY_CHK:
832 case BUILT_IN_MEMPCPY_CHK:
833 case BUILT_IN_MEMMOVE_CHK:
834 case BUILT_IN_MEMSET_CHK:
835 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 836 case BUILT_IN_STPNCPY_CHK:
cbdd87d4
RG
837 arg_idx = 2;
838 type = 2;
839 break;
840 case BUILT_IN_STRCPY_CHK:
841 case BUILT_IN_STPCPY_CHK:
842 arg_idx = 1;
843 type = 1;
844 break;
845 case BUILT_IN_SNPRINTF_CHK:
846 case BUILT_IN_VSNPRINTF_CHK:
847 arg_idx = 1;
848 type = 2;
849 break;
850 default:
851 return NULL_TREE;
852 }
853
854 if (arg_idx >= nargs)
855 return NULL_TREE;
856
857 /* Try to use the dataflow information gathered by the CCP process. */
858 visited = BITMAP_ALLOC (NULL);
859 bitmap_clear (visited);
860
861 memset (val, 0, sizeof (val));
862 a = gimple_call_arg (stmt, arg_idx);
863 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
864 val[arg_idx] = NULL_TREE;
865
866 BITMAP_FREE (visited);
867
868 result = NULL_TREE;
869 switch (DECL_FUNCTION_CODE (callee))
870 {
871 case BUILT_IN_STRLEN:
872 if (val[0] && nargs == 1)
873 {
874 tree new_val =
875 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
876
877 /* If the result is not a valid gimple value, or not a cast
6e4da084 878 of a valid gimple value, then we cannot use the result. */
cbdd87d4 879 if (is_gimple_val (new_val)
3dbe9454 880 || (CONVERT_EXPR_P (new_val)
cbdd87d4
RG
881 && is_gimple_val (TREE_OPERAND (new_val, 0))))
882 return new_val;
883 }
884 break;
885
886 case BUILT_IN_STRCPY:
887 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
888 result = fold_builtin_strcpy (loc, callee,
889 gimple_call_arg (stmt, 0),
890 gimple_call_arg (stmt, 1),
891 val[1]);
892 break;
893
894 case BUILT_IN_STRNCPY:
895 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
896 result = fold_builtin_strncpy (loc, callee,
897 gimple_call_arg (stmt, 0),
898 gimple_call_arg (stmt, 1),
899 gimple_call_arg (stmt, 2),
900 val[1]);
901 break;
902
903 case BUILT_IN_FPUTS:
904 if (nargs == 2)
905 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
906 gimple_call_arg (stmt, 1),
907 ignore, false, val[0]);
908 break;
909
910 case BUILT_IN_FPUTS_UNLOCKED:
911 if (nargs == 2)
912 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
913 gimple_call_arg (stmt, 1),
914 ignore, true, val[0]);
915 break;
916
917 case BUILT_IN_MEMCPY_CHK:
918 case BUILT_IN_MEMPCPY_CHK:
919 case BUILT_IN_MEMMOVE_CHK:
920 case BUILT_IN_MEMSET_CHK:
921 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
922 result = fold_builtin_memory_chk (loc, callee,
923 gimple_call_arg (stmt, 0),
924 gimple_call_arg (stmt, 1),
925 gimple_call_arg (stmt, 2),
926 gimple_call_arg (stmt, 3),
927 val[2], ignore,
928 DECL_FUNCTION_CODE (callee));
929 break;
930
931 case BUILT_IN_STRCPY_CHK:
932 case BUILT_IN_STPCPY_CHK:
933 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
934 result = fold_builtin_stxcpy_chk (loc, callee,
935 gimple_call_arg (stmt, 0),
936 gimple_call_arg (stmt, 1),
937 gimple_call_arg (stmt, 2),
938 val[1], ignore,
939 DECL_FUNCTION_CODE (callee));
940 break;
941
942 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 943 case BUILT_IN_STPNCPY_CHK:
cbdd87d4 944 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
f3fc9b80 945 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
cbdd87d4
RG
946 gimple_call_arg (stmt, 1),
947 gimple_call_arg (stmt, 2),
948 gimple_call_arg (stmt, 3),
f3fc9b80
RG
949 val[2], ignore,
950 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
951 break;
952
953 case BUILT_IN_SNPRINTF_CHK:
954 case BUILT_IN_VSNPRINTF_CHK:
955 if (val[1] && is_gimple_val (val[1]))
956 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
957 DECL_FUNCTION_CODE (callee));
958 break;
959
960 default:
961 gcc_unreachable ();
962 }
963
964 if (result && ignore)
965 result = fold_ignored_result (result);
966 return result;
967}
968
1ae6fe9b 969
49c471e3
MJ
970/* Return a binfo to be used for devirtualization of calls based on an object
971 represented by a declaration (i.e. a global or automatically allocated one)
972 or NULL if it cannot be found or is not safe. CST is expected to be an
973 ADDR_EXPR of such object or the function will return NULL. Currently it is
974 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
975
976tree
977gimple_extract_devirt_binfo_from_cst (tree cst)
978{
979 HOST_WIDE_INT offset, size, max_size;
980 tree base, type, expected_type, binfo;
981 bool last_artificial = false;
982
983 if (!flag_devirtualize
984 || TREE_CODE (cst) != ADDR_EXPR
985 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
986 return NULL_TREE;
987
988 cst = TREE_OPERAND (cst, 0);
989 expected_type = TREE_TYPE (cst);
990 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
991 type = TREE_TYPE (base);
992 if (!DECL_P (base)
993 || max_size == -1
994 || max_size != size
995 || TREE_CODE (type) != RECORD_TYPE)
996 return NULL_TREE;
997
998 /* Find the sub-object the constant actually refers to and mark whether it is
999 an artificial one (as opposed to a user-defined one). */
1000 while (true)
1001 {
1002 HOST_WIDE_INT pos, size;
1003 tree fld;
1004
1005 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1006 break;
1007 if (offset < 0)
1008 return NULL_TREE;
1009
1010 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1011 {
1012 if (TREE_CODE (fld) != FIELD_DECL)
1013 continue;
1014
1015 pos = int_bit_position (fld);
1016 size = tree_low_cst (DECL_SIZE (fld), 1);
1017 if (pos <= offset && (pos + size) > offset)
1018 break;
1019 }
1020 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1021 return NULL_TREE;
1022
1023 last_artificial = DECL_ARTIFICIAL (fld);
1024 type = TREE_TYPE (fld);
1025 offset -= pos;
1026 }
1027 /* Artifical sub-objects are ancestors, we do not want to use them for
1028 devirtualization, at least not here. */
1029 if (last_artificial)
1030 return NULL_TREE;
1031 binfo = TYPE_BINFO (type);
1032 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1033 return NULL_TREE;
1034 else
1035 return binfo;
1036}
1037
cbdd87d4
RG
1038/* Attempt to fold a call statement referenced by the statement iterator GSI.
1039 The statement may be replaced by another statement, e.g., if the call
1040 simplifies to a constant value. Return true if any changes were made.
1041 It is assumed that the operands have been previously folded. */
1042
e021c122 1043static bool
ceeffab0 1044gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
1045{
1046 gimple stmt = gsi_stmt (*gsi);
3b45a007 1047 tree callee;
e021c122
RG
1048 bool changed = false;
1049 unsigned i;
cbdd87d4 1050
e021c122
RG
1051 /* Fold *& in call arguments. */
1052 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1053 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1054 {
1055 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1056 if (tmp)
1057 {
1058 gimple_call_set_arg (stmt, i, tmp);
1059 changed = true;
1060 }
1061 }
3b45a007
RG
1062
1063 /* Check for virtual calls that became direct calls. */
1064 callee = gimple_call_fn (stmt);
25583c4f 1065 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 1066 {
49c471e3
MJ
1067 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1068 {
1069 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
1070 changed = true;
1071 }
1072 else
1073 {
1074 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1075 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1076 if (binfo)
1077 {
1078 HOST_WIDE_INT token
1079 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1080 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1081 if (fndecl)
1082 {
1083 gimple_call_set_fndecl (stmt, fndecl);
1084 changed = true;
1085 }
1086 }
49c471e3 1087 }
e021c122 1088 }
49c471e3 1089
e021c122
RG
1090 if (inplace)
1091 return changed;
1092
1093 /* Check for builtins that CCP can handle using information not
1094 available in the generic fold routines. */
89faf322 1095 callee = gimple_call_fndecl (stmt);
e021c122
RG
1096 if (callee && DECL_BUILT_IN (callee))
1097 {
1098 tree result = gimple_fold_builtin (stmt);
f6dbed32 1099 if (result)
e021c122
RG
1100 {
1101 if (!update_call_from_tree (gsi, result))
1102 gimplify_and_update_call_from_tree (gsi, result);
1103 changed = true;
1104 }
3b45a007
RG
1105 }
1106
e021c122 1107 return changed;
cbdd87d4
RG
1108}
1109
1110/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1111 distinguishes both cases. */
1112
1113static bool
1114fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1115{
1116 bool changed = false;
1117 gimple stmt = gsi_stmt (*gsi);
1118 unsigned i;
a9d24544
JJ
1119 gimple_stmt_iterator gsinext = *gsi;
1120 gimple next_stmt;
1121
1122 gsi_next (&gsinext);
1123 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
cbdd87d4
RG
1124
1125 /* Fold the main computation performed by the statement. */
1126 switch (gimple_code (stmt))
1127 {
1128 case GIMPLE_ASSIGN:
1129 {
1130 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 1131 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 1132 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
1133 tree new_rhs;
1134 /* First canonicalize operand order. This avoids building new
1135 trees if this is the only thing fold would later do. */
1136 if ((commutative_tree_code (subcode)
1137 || commutative_ternary_tree_code (subcode))
1138 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1139 gimple_assign_rhs2 (stmt), false))
1140 {
1141 tree tem = gimple_assign_rhs1 (stmt);
1142 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1143 gimple_assign_set_rhs2 (stmt, tem);
1144 changed = true;
1145 }
1146 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
1147 if (new_rhs
1148 && !useless_type_conversion_p (TREE_TYPE (lhs),
1149 TREE_TYPE (new_rhs)))
1150 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1151 if (new_rhs
1152 && (!inplace
1153 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1154 {
1155 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1156 changed = true;
1157 }
1158 break;
1159 }
1160
1161 case GIMPLE_COND:
1162 changed |= fold_gimple_cond (stmt);
1163 break;
1164
1165 case GIMPLE_CALL:
ceeffab0 1166 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
1167 break;
1168
1169 case GIMPLE_ASM:
1170 /* Fold *& in asm operands. */
38384150
JJ
1171 {
1172 size_t noutputs;
1173 const char **oconstraints;
1174 const char *constraint;
1175 bool allows_mem, allows_reg;
1176
1177 noutputs = gimple_asm_noutputs (stmt);
1178 oconstraints = XALLOCAVEC (const char *, noutputs);
1179
1180 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1181 {
1182 tree link = gimple_asm_output_op (stmt, i);
1183 tree op = TREE_VALUE (link);
1184 oconstraints[i]
1185 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1186 if (REFERENCE_CLASS_P (op)
1187 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1188 {
1189 TREE_VALUE (link) = op;
1190 changed = true;
1191 }
1192 }
1193 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1194 {
1195 tree link = gimple_asm_input_op (stmt, i);
1196 tree op = TREE_VALUE (link);
1197 constraint
1198 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1199 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1200 oconstraints, &allows_mem, &allows_reg);
1201 if (REFERENCE_CLASS_P (op)
1202 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1203 != NULL_TREE)
1204 {
1205 TREE_VALUE (link) = op;
1206 changed = true;
1207 }
1208 }
1209 }
cbdd87d4
RG
1210 break;
1211
bd422c4a
RG
1212 case GIMPLE_DEBUG:
1213 if (gimple_debug_bind_p (stmt))
1214 {
1215 tree val = gimple_debug_bind_get_value (stmt);
1216 if (val
1217 && REFERENCE_CLASS_P (val))
1218 {
1219 tree tem = maybe_fold_reference (val, false);
1220 if (tem)
1221 {
1222 gimple_debug_bind_set_value (stmt, tem);
1223 changed = true;
1224 }
1225 }
3e888a5e
RG
1226 else if (val
1227 && TREE_CODE (val) == ADDR_EXPR)
1228 {
1229 tree ref = TREE_OPERAND (val, 0);
1230 tree tem = maybe_fold_reference (ref, false);
1231 if (tem)
1232 {
1233 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1234 gimple_debug_bind_set_value (stmt, tem);
1235 changed = true;
1236 }
1237 }
bd422c4a
RG
1238 }
1239 break;
1240
cbdd87d4
RG
1241 default:;
1242 }
1243
a9d24544
JJ
1244 /* If stmt folds into nothing and it was the last stmt in a bb,
1245 don't call gsi_stmt. */
1246 if (gsi_end_p (*gsi))
1247 {
1248 gcc_assert (next_stmt == NULL);
1249 return changed;
1250 }
1251
cbdd87d4
RG
1252 stmt = gsi_stmt (*gsi);
1253
a9d24544
JJ
1254 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1255 as we'd changing the next stmt. */
1256 if (gimple_has_lhs (stmt) && stmt != next_stmt)
cbdd87d4
RG
1257 {
1258 tree lhs = gimple_get_lhs (stmt);
1259 if (lhs && REFERENCE_CLASS_P (lhs))
1260 {
1261 tree new_lhs = maybe_fold_reference (lhs, true);
1262 if (new_lhs)
1263 {
1264 gimple_set_lhs (stmt, new_lhs);
1265 changed = true;
1266 }
1267 }
1268 }
1269
1270 return changed;
1271}
1272
1273/* Fold the statement pointed to by GSI. In some cases, this function may
1274 replace the whole statement with a new one. Returns true iff folding
1275 makes any changes.
1276 The statement pointed to by GSI should be in valid gimple form but may
1277 be in unfolded state as resulting from for example constant propagation
1278 which can produce *&x = 0. */
1279
1280bool
1281fold_stmt (gimple_stmt_iterator *gsi)
1282{
1283 return fold_stmt_1 (gsi, false);
1284}
1285
59401b92 1286/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
1287 *&x created by constant propagation are handled. The statement cannot
1288 be replaced with a new one. Return true if the statement was
1289 changed, false otherwise.
59401b92 1290 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
1291 be in unfolded state as resulting from for example constant propagation
1292 which can produce *&x = 0. */
1293
1294bool
59401b92 1295fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 1296{
59401b92
RG
1297 gimple stmt = gsi_stmt (*gsi);
1298 bool changed = fold_stmt_1 (gsi, true);
1299 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
1300 return changed;
1301}
1302
e89065a1
SL
1303/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1304 if EXPR is null or we don't know how.
1305 If non-null, the result always has boolean type. */
1306
1307static tree
1308canonicalize_bool (tree expr, bool invert)
1309{
1310 if (!expr)
1311 return NULL_TREE;
1312 else if (invert)
1313 {
1314 if (integer_nonzerop (expr))
1315 return boolean_false_node;
1316 else if (integer_zerop (expr))
1317 return boolean_true_node;
1318 else if (TREE_CODE (expr) == SSA_NAME)
1319 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1320 build_int_cst (TREE_TYPE (expr), 0));
1321 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1322 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1323 boolean_type_node,
1324 TREE_OPERAND (expr, 0),
1325 TREE_OPERAND (expr, 1));
1326 else
1327 return NULL_TREE;
1328 }
1329 else
1330 {
1331 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1332 return expr;
1333 if (integer_nonzerop (expr))
1334 return boolean_true_node;
1335 else if (integer_zerop (expr))
1336 return boolean_false_node;
1337 else if (TREE_CODE (expr) == SSA_NAME)
1338 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1339 build_int_cst (TREE_TYPE (expr), 0));
1340 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1341 return fold_build2 (TREE_CODE (expr),
1342 boolean_type_node,
1343 TREE_OPERAND (expr, 0),
1344 TREE_OPERAND (expr, 1));
1345 else
1346 return NULL_TREE;
1347 }
1348}
1349
1350/* Check to see if a boolean expression EXPR is logically equivalent to the
1351 comparison (OP1 CODE OP2). Check for various identities involving
1352 SSA_NAMEs. */
1353
1354static bool
1355same_bool_comparison_p (const_tree expr, enum tree_code code,
1356 const_tree op1, const_tree op2)
1357{
1358 gimple s;
1359
1360 /* The obvious case. */
1361 if (TREE_CODE (expr) == code
1362 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1363 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1364 return true;
1365
1366 /* Check for comparing (name, name != 0) and the case where expr
1367 is an SSA_NAME with a definition matching the comparison. */
1368 if (TREE_CODE (expr) == SSA_NAME
1369 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1370 {
1371 if (operand_equal_p (expr, op1, 0))
1372 return ((code == NE_EXPR && integer_zerop (op2))
1373 || (code == EQ_EXPR && integer_nonzerop (op2)));
1374 s = SSA_NAME_DEF_STMT (expr);
1375 if (is_gimple_assign (s)
1376 && gimple_assign_rhs_code (s) == code
1377 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1378 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1379 return true;
1380 }
1381
1382 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1383 of name is a comparison, recurse. */
1384 if (TREE_CODE (op1) == SSA_NAME
1385 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1386 {
1387 s = SSA_NAME_DEF_STMT (op1);
1388 if (is_gimple_assign (s)
1389 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1390 {
1391 enum tree_code c = gimple_assign_rhs_code (s);
1392 if ((c == NE_EXPR && integer_zerop (op2))
1393 || (c == EQ_EXPR && integer_nonzerop (op2)))
1394 return same_bool_comparison_p (expr, c,
1395 gimple_assign_rhs1 (s),
1396 gimple_assign_rhs2 (s));
1397 if ((c == EQ_EXPR && integer_zerop (op2))
1398 || (c == NE_EXPR && integer_nonzerop (op2)))
1399 return same_bool_comparison_p (expr,
1400 invert_tree_comparison (c, false),
1401 gimple_assign_rhs1 (s),
1402 gimple_assign_rhs2 (s));
1403 }
1404 }
1405 return false;
1406}
1407
1408/* Check to see if two boolean expressions OP1 and OP2 are logically
1409 equivalent. */
1410
1411static bool
1412same_bool_result_p (const_tree op1, const_tree op2)
1413{
1414 /* Simple cases first. */
1415 if (operand_equal_p (op1, op2, 0))
1416 return true;
1417
1418 /* Check the cases where at least one of the operands is a comparison.
1419 These are a bit smarter than operand_equal_p in that they apply some
1420 identifies on SSA_NAMEs. */
1421 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1422 && same_bool_comparison_p (op1, TREE_CODE (op2),
1423 TREE_OPERAND (op2, 0),
1424 TREE_OPERAND (op2, 1)))
1425 return true;
1426 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1427 && same_bool_comparison_p (op2, TREE_CODE (op1),
1428 TREE_OPERAND (op1, 0),
1429 TREE_OPERAND (op1, 1)))
1430 return true;
1431
1432 /* Default case. */
1433 return false;
1434}
1435
1436/* Forward declarations for some mutually recursive functions. */
1437
1438static tree
1439and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1440 enum tree_code code2, tree op2a, tree op2b);
1441static tree
1442and_var_with_comparison (tree var, bool invert,
1443 enum tree_code code2, tree op2a, tree op2b);
1444static tree
1445and_var_with_comparison_1 (gimple stmt,
1446 enum tree_code code2, tree op2a, tree op2b);
1447static tree
1448or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1449 enum tree_code code2, tree op2a, tree op2b);
1450static tree
1451or_var_with_comparison (tree var, bool invert,
1452 enum tree_code code2, tree op2a, tree op2b);
1453static tree
1454or_var_with_comparison_1 (gimple stmt,
1455 enum tree_code code2, tree op2a, tree op2b);
1456
1457/* Helper function for and_comparisons_1: try to simplify the AND of the
1458 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1459 If INVERT is true, invert the value of the VAR before doing the AND.
1460 Return NULL_EXPR if we can't simplify this to a single expression. */
1461
1462static tree
1463and_var_with_comparison (tree var, bool invert,
1464 enum tree_code code2, tree op2a, tree op2b)
1465{
1466 tree t;
1467 gimple stmt = SSA_NAME_DEF_STMT (var);
1468
1469 /* We can only deal with variables whose definitions are assignments. */
1470 if (!is_gimple_assign (stmt))
1471 return NULL_TREE;
1472
1473 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1474 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1475 Then we only have to consider the simpler non-inverted cases. */
1476 if (invert)
1477 t = or_var_with_comparison_1 (stmt,
1478 invert_tree_comparison (code2, false),
1479 op2a, op2b);
1480 else
1481 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1482 return canonicalize_bool (t, invert);
1483}
1484
1485/* Try to simplify the AND of the ssa variable defined by the assignment
1486 STMT with the comparison specified by (OP2A CODE2 OP2B).
1487 Return NULL_EXPR if we can't simplify this to a single expression. */
1488
1489static tree
1490and_var_with_comparison_1 (gimple stmt,
1491 enum tree_code code2, tree op2a, tree op2b)
1492{
1493 tree var = gimple_assign_lhs (stmt);
1494 tree true_test_var = NULL_TREE;
1495 tree false_test_var = NULL_TREE;
1496 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1497
1498 /* Check for identities like (var AND (var == 0)) => false. */
1499 if (TREE_CODE (op2a) == SSA_NAME
1500 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1501 {
1502 if ((code2 == NE_EXPR && integer_zerop (op2b))
1503 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1504 {
1505 true_test_var = op2a;
1506 if (var == true_test_var)
1507 return var;
1508 }
1509 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1510 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1511 {
1512 false_test_var = op2a;
1513 if (var == false_test_var)
1514 return boolean_false_node;
1515 }
1516 }
1517
1518 /* If the definition is a comparison, recurse on it. */
1519 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1520 {
1521 tree t = and_comparisons_1 (innercode,
1522 gimple_assign_rhs1 (stmt),
1523 gimple_assign_rhs2 (stmt),
1524 code2,
1525 op2a,
1526 op2b);
1527 if (t)
1528 return t;
1529 }
1530
1531 /* If the definition is an AND or OR expression, we may be able to
1532 simplify by reassociating. */
eb9820c0
KT
1533 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1534 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
1535 {
1536 tree inner1 = gimple_assign_rhs1 (stmt);
1537 tree inner2 = gimple_assign_rhs2 (stmt);
1538 gimple s;
1539 tree t;
1540 tree partial = NULL_TREE;
eb9820c0 1541 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
1542
1543 /* Check for boolean identities that don't require recursive examination
1544 of inner1/inner2:
1545 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1546 inner1 AND (inner1 OR inner2) => inner1
1547 !inner1 AND (inner1 AND inner2) => false
1548 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1549 Likewise for similar cases involving inner2. */
1550 if (inner1 == true_test_var)
1551 return (is_and ? var : inner1);
1552 else if (inner2 == true_test_var)
1553 return (is_and ? var : inner2);
1554 else if (inner1 == false_test_var)
1555 return (is_and
1556 ? boolean_false_node
1557 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1558 else if (inner2 == false_test_var)
1559 return (is_and
1560 ? boolean_false_node
1561 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1562
1563 /* Next, redistribute/reassociate the AND across the inner tests.
1564 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1565 if (TREE_CODE (inner1) == SSA_NAME
1566 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1567 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1568 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1569 gimple_assign_rhs1 (s),
1570 gimple_assign_rhs2 (s),
1571 code2, op2a, op2b)))
1572 {
1573 /* Handle the AND case, where we are reassociating:
1574 (inner1 AND inner2) AND (op2a code2 op2b)
1575 => (t AND inner2)
1576 If the partial result t is a constant, we win. Otherwise
1577 continue on to try reassociating with the other inner test. */
1578 if (is_and)
1579 {
1580 if (integer_onep (t))
1581 return inner2;
1582 else if (integer_zerop (t))
1583 return boolean_false_node;
1584 }
1585
1586 /* Handle the OR case, where we are redistributing:
1587 (inner1 OR inner2) AND (op2a code2 op2b)
1588 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
1589 else if (integer_onep (t))
1590 return boolean_true_node;
1591
1592 /* Save partial result for later. */
1593 partial = t;
e89065a1
SL
1594 }
1595
1596 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1597 if (TREE_CODE (inner2) == SSA_NAME
1598 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1599 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1600 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1601 gimple_assign_rhs1 (s),
1602 gimple_assign_rhs2 (s),
1603 code2, op2a, op2b)))
1604 {
1605 /* Handle the AND case, where we are reassociating:
1606 (inner1 AND inner2) AND (op2a code2 op2b)
1607 => (inner1 AND t) */
1608 if (is_and)
1609 {
1610 if (integer_onep (t))
1611 return inner1;
1612 else if (integer_zerop (t))
1613 return boolean_false_node;
8236c8eb
JJ
1614 /* If both are the same, we can apply the identity
1615 (x AND x) == x. */
1616 else if (partial && same_bool_result_p (t, partial))
1617 return t;
e89065a1
SL
1618 }
1619
1620 /* Handle the OR case. where we are redistributing:
1621 (inner1 OR inner2) AND (op2a code2 op2b)
1622 => (t OR (inner1 AND (op2a code2 op2b)))
1623 => (t OR partial) */
1624 else
1625 {
1626 if (integer_onep (t))
1627 return boolean_true_node;
1628 else if (partial)
1629 {
1630 /* We already got a simplification for the other
1631 operand to the redistributed OR expression. The
1632 interesting case is when at least one is false.
1633 Or, if both are the same, we can apply the identity
1634 (x OR x) == x. */
1635 if (integer_zerop (partial))
1636 return t;
1637 else if (integer_zerop (t))
1638 return partial;
1639 else if (same_bool_result_p (t, partial))
1640 return t;
1641 }
1642 }
1643 }
1644 }
1645 return NULL_TREE;
1646}
1647
1648/* Try to simplify the AND of two comparisons defined by
1649 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1650 If this can be done without constructing an intermediate value,
1651 return the resulting tree; otherwise NULL_TREE is returned.
1652 This function is deliberately asymmetric as it recurses on SSA_DEFs
1653 in the first comparison but not the second. */
1654
1655static tree
1656and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1657 enum tree_code code2, tree op2a, tree op2b)
1658{
1659 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1660 if (operand_equal_p (op1a, op2a, 0)
1661 && operand_equal_p (op1b, op2b, 0))
1662 {
eb9820c0 1663 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1664 tree t = combine_comparisons (UNKNOWN_LOCATION,
1665 TRUTH_ANDIF_EXPR, code1, code2,
1666 boolean_type_node, op1a, op1b);
1667 if (t)
1668 return t;
1669 }
1670
1671 /* Likewise the swapped case of the above. */
1672 if (operand_equal_p (op1a, op2b, 0)
1673 && operand_equal_p (op1b, op2a, 0))
1674 {
eb9820c0 1675 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1676 tree t = combine_comparisons (UNKNOWN_LOCATION,
1677 TRUTH_ANDIF_EXPR, code1,
1678 swap_tree_comparison (code2),
1679 boolean_type_node, op1a, op1b);
1680 if (t)
1681 return t;
1682 }
1683
1684 /* If both comparisons are of the same value against constants, we might
1685 be able to merge them. */
1686 if (operand_equal_p (op1a, op2a, 0)
1687 && TREE_CODE (op1b) == INTEGER_CST
1688 && TREE_CODE (op2b) == INTEGER_CST)
1689 {
1690 int cmp = tree_int_cst_compare (op1b, op2b);
1691
1692 /* If we have (op1a == op1b), we should either be able to
1693 return that or FALSE, depending on whether the constant op1b
1694 also satisfies the other comparison against op2b. */
1695 if (code1 == EQ_EXPR)
1696 {
1697 bool done = true;
1698 bool val;
1699 switch (code2)
1700 {
1701 case EQ_EXPR: val = (cmp == 0); break;
1702 case NE_EXPR: val = (cmp != 0); break;
1703 case LT_EXPR: val = (cmp < 0); break;
1704 case GT_EXPR: val = (cmp > 0); break;
1705 case LE_EXPR: val = (cmp <= 0); break;
1706 case GE_EXPR: val = (cmp >= 0); break;
1707 default: done = false;
1708 }
1709 if (done)
1710 {
1711 if (val)
1712 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1713 else
1714 return boolean_false_node;
1715 }
1716 }
1717 /* Likewise if the second comparison is an == comparison. */
1718 else if (code2 == EQ_EXPR)
1719 {
1720 bool done = true;
1721 bool val;
1722 switch (code1)
1723 {
1724 case EQ_EXPR: val = (cmp == 0); break;
1725 case NE_EXPR: val = (cmp != 0); break;
1726 case LT_EXPR: val = (cmp > 0); break;
1727 case GT_EXPR: val = (cmp < 0); break;
1728 case LE_EXPR: val = (cmp >= 0); break;
1729 case GE_EXPR: val = (cmp <= 0); break;
1730 default: done = false;
1731 }
1732 if (done)
1733 {
1734 if (val)
1735 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1736 else
1737 return boolean_false_node;
1738 }
1739 }
1740
1741 /* Same business with inequality tests. */
1742 else if (code1 == NE_EXPR)
1743 {
1744 bool val;
1745 switch (code2)
1746 {
1747 case EQ_EXPR: val = (cmp != 0); break;
1748 case NE_EXPR: val = (cmp == 0); break;
1749 case LT_EXPR: val = (cmp >= 0); break;
1750 case GT_EXPR: val = (cmp <= 0); break;
1751 case LE_EXPR: val = (cmp > 0); break;
1752 case GE_EXPR: val = (cmp < 0); break;
1753 default:
1754 val = false;
1755 }
1756 if (val)
1757 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1758 }
1759 else if (code2 == NE_EXPR)
1760 {
1761 bool val;
1762 switch (code1)
1763 {
1764 case EQ_EXPR: val = (cmp == 0); break;
1765 case NE_EXPR: val = (cmp != 0); break;
1766 case LT_EXPR: val = (cmp <= 0); break;
1767 case GT_EXPR: val = (cmp >= 0); break;
1768 case LE_EXPR: val = (cmp < 0); break;
1769 case GE_EXPR: val = (cmp > 0); break;
1770 default:
1771 val = false;
1772 }
1773 if (val)
1774 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1775 }
1776
1777 /* Chose the more restrictive of two < or <= comparisons. */
1778 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1779 && (code2 == LT_EXPR || code2 == LE_EXPR))
1780 {
1781 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1782 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1783 else
1784 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1785 }
1786
1787 /* Likewise chose the more restrictive of two > or >= comparisons. */
1788 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1789 && (code2 == GT_EXPR || code2 == GE_EXPR))
1790 {
1791 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1792 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1793 else
1794 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1795 }
1796
1797 /* Check for singleton ranges. */
1798 else if (cmp == 0
1799 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1800 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1801 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1802
1803 /* Check for disjoint ranges. */
1804 else if (cmp <= 0
1805 && (code1 == LT_EXPR || code1 == LE_EXPR)
1806 && (code2 == GT_EXPR || code2 == GE_EXPR))
1807 return boolean_false_node;
1808 else if (cmp >= 0
1809 && (code1 == GT_EXPR || code1 == GE_EXPR)
1810 && (code2 == LT_EXPR || code2 == LE_EXPR))
1811 return boolean_false_node;
1812 }
1813
1814 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1815 NAME's definition is a truth value. See if there are any simplifications
1816 that can be done against the NAME's definition. */
1817 if (TREE_CODE (op1a) == SSA_NAME
1818 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1819 && (integer_zerop (op1b) || integer_onep (op1b)))
1820 {
1821 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1822 || (code1 == NE_EXPR && integer_onep (op1b)));
1823 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1824 switch (gimple_code (stmt))
1825 {
1826 case GIMPLE_ASSIGN:
1827 /* Try to simplify by copy-propagating the definition. */
1828 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1829
1830 case GIMPLE_PHI:
1831 /* If every argument to the PHI produces the same result when
1832 ANDed with the second comparison, we win.
1833 Do not do this unless the type is bool since we need a bool
1834 result here anyway. */
1835 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1836 {
1837 tree result = NULL_TREE;
1838 unsigned i;
1839 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1840 {
1841 tree arg = gimple_phi_arg_def (stmt, i);
1842
1843 /* If this PHI has itself as an argument, ignore it.
1844 If all the other args produce the same result,
1845 we're still OK. */
1846 if (arg == gimple_phi_result (stmt))
1847 continue;
1848 else if (TREE_CODE (arg) == INTEGER_CST)
1849 {
1850 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1851 {
1852 if (!result)
1853 result = boolean_false_node;
1854 else if (!integer_zerop (result))
1855 return NULL_TREE;
1856 }
1857 else if (!result)
1858 result = fold_build2 (code2, boolean_type_node,
1859 op2a, op2b);
1860 else if (!same_bool_comparison_p (result,
1861 code2, op2a, op2b))
1862 return NULL_TREE;
1863 }
0e8b84ec
JJ
1864 else if (TREE_CODE (arg) == SSA_NAME
1865 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 1866 {
6c66f733
JJ
1867 tree temp;
1868 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1869 /* In simple cases we can look through PHI nodes,
1870 but we have to be careful with loops.
1871 See PR49073. */
1872 if (! dom_info_available_p (CDI_DOMINATORS)
1873 || gimple_bb (def_stmt) == gimple_bb (stmt)
1874 || dominated_by_p (CDI_DOMINATORS,
1875 gimple_bb (def_stmt),
1876 gimple_bb (stmt)))
1877 return NULL_TREE;
1878 temp = and_var_with_comparison (arg, invert, code2,
1879 op2a, op2b);
e89065a1
SL
1880 if (!temp)
1881 return NULL_TREE;
1882 else if (!result)
1883 result = temp;
1884 else if (!same_bool_result_p (result, temp))
1885 return NULL_TREE;
1886 }
1887 else
1888 return NULL_TREE;
1889 }
1890 return result;
1891 }
1892
1893 default:
1894 break;
1895 }
1896 }
1897 return NULL_TREE;
1898}
1899
1900/* Try to simplify the AND of two comparisons, specified by
1901 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1902 If this can be simplified to a single expression (without requiring
1903 introducing more SSA variables to hold intermediate values),
1904 return the resulting tree. Otherwise return NULL_TREE.
1905 If the result expression is non-null, it has boolean type. */
1906
1907tree
1908maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1909 enum tree_code code2, tree op2a, tree op2b)
1910{
1911 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1912 if (t)
1913 return t;
1914 else
1915 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1916}
1917
1918/* Helper function for or_comparisons_1: try to simplify the OR of the
1919 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1920 If INVERT is true, invert the value of VAR before doing the OR.
1921 Return NULL_EXPR if we can't simplify this to a single expression. */
1922
1923static tree
1924or_var_with_comparison (tree var, bool invert,
1925 enum tree_code code2, tree op2a, tree op2b)
1926{
1927 tree t;
1928 gimple stmt = SSA_NAME_DEF_STMT (var);
1929
1930 /* We can only deal with variables whose definitions are assignments. */
1931 if (!is_gimple_assign (stmt))
1932 return NULL_TREE;
1933
1934 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1935 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1936 Then we only have to consider the simpler non-inverted cases. */
1937 if (invert)
1938 t = and_var_with_comparison_1 (stmt,
1939 invert_tree_comparison (code2, false),
1940 op2a, op2b);
1941 else
1942 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1943 return canonicalize_bool (t, invert);
1944}
1945
1946/* Try to simplify the OR of the ssa variable defined by the assignment
1947 STMT with the comparison specified by (OP2A CODE2 OP2B).
1948 Return NULL_EXPR if we can't simplify this to a single expression. */
1949
1950static tree
1951or_var_with_comparison_1 (gimple stmt,
1952 enum tree_code code2, tree op2a, tree op2b)
1953{
1954 tree var = gimple_assign_lhs (stmt);
1955 tree true_test_var = NULL_TREE;
1956 tree false_test_var = NULL_TREE;
1957 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1958
1959 /* Check for identities like (var OR (var != 0)) => true . */
1960 if (TREE_CODE (op2a) == SSA_NAME
1961 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1962 {
1963 if ((code2 == NE_EXPR && integer_zerop (op2b))
1964 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1965 {
1966 true_test_var = op2a;
1967 if (var == true_test_var)
1968 return var;
1969 }
1970 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1971 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1972 {
1973 false_test_var = op2a;
1974 if (var == false_test_var)
1975 return boolean_true_node;
1976 }
1977 }
1978
1979 /* If the definition is a comparison, recurse on it. */
1980 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1981 {
1982 tree t = or_comparisons_1 (innercode,
1983 gimple_assign_rhs1 (stmt),
1984 gimple_assign_rhs2 (stmt),
1985 code2,
1986 op2a,
1987 op2b);
1988 if (t)
1989 return t;
1990 }
1991
1992 /* If the definition is an AND or OR expression, we may be able to
1993 simplify by reassociating. */
eb9820c0
KT
1994 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1995 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
1996 {
1997 tree inner1 = gimple_assign_rhs1 (stmt);
1998 tree inner2 = gimple_assign_rhs2 (stmt);
1999 gimple s;
2000 tree t;
2001 tree partial = NULL_TREE;
eb9820c0 2002 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
2003
2004 /* Check for boolean identities that don't require recursive examination
2005 of inner1/inner2:
2006 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2007 inner1 OR (inner1 AND inner2) => inner1
2008 !inner1 OR (inner1 OR inner2) => true
2009 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2010 */
2011 if (inner1 == true_test_var)
2012 return (is_or ? var : inner1);
2013 else if (inner2 == true_test_var)
2014 return (is_or ? var : inner2);
2015 else if (inner1 == false_test_var)
2016 return (is_or
2017 ? boolean_true_node
2018 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2019 else if (inner2 == false_test_var)
2020 return (is_or
2021 ? boolean_true_node
2022 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2023
2024 /* Next, redistribute/reassociate the OR across the inner tests.
2025 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2026 if (TREE_CODE (inner1) == SSA_NAME
2027 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2028 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2029 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2030 gimple_assign_rhs1 (s),
2031 gimple_assign_rhs2 (s),
2032 code2, op2a, op2b)))
2033 {
2034 /* Handle the OR case, where we are reassociating:
2035 (inner1 OR inner2) OR (op2a code2 op2b)
2036 => (t OR inner2)
2037 If the partial result t is a constant, we win. Otherwise
2038 continue on to try reassociating with the other inner test. */
8236c8eb 2039 if (is_or)
e89065a1
SL
2040 {
2041 if (integer_onep (t))
2042 return boolean_true_node;
2043 else if (integer_zerop (t))
2044 return inner2;
2045 }
2046
2047 /* Handle the AND case, where we are redistributing:
2048 (inner1 AND inner2) OR (op2a code2 op2b)
2049 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
2050 else if (integer_zerop (t))
2051 return boolean_false_node;
2052
2053 /* Save partial result for later. */
2054 partial = t;
e89065a1
SL
2055 }
2056
2057 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2058 if (TREE_CODE (inner2) == SSA_NAME
2059 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2060 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2061 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2062 gimple_assign_rhs1 (s),
2063 gimple_assign_rhs2 (s),
2064 code2, op2a, op2b)))
2065 {
2066 /* Handle the OR case, where we are reassociating:
2067 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
2068 => (inner1 OR t)
2069 => (t OR partial) */
2070 if (is_or)
e89065a1
SL
2071 {
2072 if (integer_zerop (t))
2073 return inner1;
2074 else if (integer_onep (t))
2075 return boolean_true_node;
8236c8eb
JJ
2076 /* If both are the same, we can apply the identity
2077 (x OR x) == x. */
2078 else if (partial && same_bool_result_p (t, partial))
2079 return t;
e89065a1
SL
2080 }
2081
2082 /* Handle the AND case, where we are redistributing:
2083 (inner1 AND inner2) OR (op2a code2 op2b)
2084 => (t AND (inner1 OR (op2a code2 op2b)))
2085 => (t AND partial) */
2086 else
2087 {
2088 if (integer_zerop (t))
2089 return boolean_false_node;
2090 else if (partial)
2091 {
2092 /* We already got a simplification for the other
2093 operand to the redistributed AND expression. The
2094 interesting case is when at least one is true.
2095 Or, if both are the same, we can apply the identity
8236c8eb 2096 (x AND x) == x. */
e89065a1
SL
2097 if (integer_onep (partial))
2098 return t;
2099 else if (integer_onep (t))
2100 return partial;
2101 else if (same_bool_result_p (t, partial))
8236c8eb 2102 return t;
e89065a1
SL
2103 }
2104 }
2105 }
2106 }
2107 return NULL_TREE;
2108}
2109
2110/* Try to simplify the OR of two comparisons defined by
2111 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2112 If this can be done without constructing an intermediate value,
2113 return the resulting tree; otherwise NULL_TREE is returned.
2114 This function is deliberately asymmetric as it recurses on SSA_DEFs
2115 in the first comparison but not the second. */
2116
2117static tree
2118or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2119 enum tree_code code2, tree op2a, tree op2b)
2120{
2121 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2122 if (operand_equal_p (op1a, op2a, 0)
2123 && operand_equal_p (op1b, op2b, 0))
2124 {
eb9820c0 2125 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2126 tree t = combine_comparisons (UNKNOWN_LOCATION,
2127 TRUTH_ORIF_EXPR, code1, code2,
2128 boolean_type_node, op1a, op1b);
2129 if (t)
2130 return t;
2131 }
2132
2133 /* Likewise the swapped case of the above. */
2134 if (operand_equal_p (op1a, op2b, 0)
2135 && operand_equal_p (op1b, op2a, 0))
2136 {
eb9820c0 2137 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2138 tree t = combine_comparisons (UNKNOWN_LOCATION,
2139 TRUTH_ORIF_EXPR, code1,
2140 swap_tree_comparison (code2),
2141 boolean_type_node, op1a, op1b);
2142 if (t)
2143 return t;
2144 }
2145
2146 /* If both comparisons are of the same value against constants, we might
2147 be able to merge them. */
2148 if (operand_equal_p (op1a, op2a, 0)
2149 && TREE_CODE (op1b) == INTEGER_CST
2150 && TREE_CODE (op2b) == INTEGER_CST)
2151 {
2152 int cmp = tree_int_cst_compare (op1b, op2b);
2153
2154 /* If we have (op1a != op1b), we should either be able to
2155 return that or TRUE, depending on whether the constant op1b
2156 also satisfies the other comparison against op2b. */
2157 if (code1 == NE_EXPR)
2158 {
2159 bool done = true;
2160 bool val;
2161 switch (code2)
2162 {
2163 case EQ_EXPR: val = (cmp == 0); break;
2164 case NE_EXPR: val = (cmp != 0); break;
2165 case LT_EXPR: val = (cmp < 0); break;
2166 case GT_EXPR: val = (cmp > 0); break;
2167 case LE_EXPR: val = (cmp <= 0); break;
2168 case GE_EXPR: val = (cmp >= 0); break;
2169 default: done = false;
2170 }
2171 if (done)
2172 {
2173 if (val)
2174 return boolean_true_node;
2175 else
2176 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2177 }
2178 }
2179 /* Likewise if the second comparison is a != comparison. */
2180 else if (code2 == NE_EXPR)
2181 {
2182 bool done = true;
2183 bool val;
2184 switch (code1)
2185 {
2186 case EQ_EXPR: val = (cmp == 0); break;
2187 case NE_EXPR: val = (cmp != 0); break;
2188 case LT_EXPR: val = (cmp > 0); break;
2189 case GT_EXPR: val = (cmp < 0); break;
2190 case LE_EXPR: val = (cmp >= 0); break;
2191 case GE_EXPR: val = (cmp <= 0); break;
2192 default: done = false;
2193 }
2194 if (done)
2195 {
2196 if (val)
2197 return boolean_true_node;
2198 else
2199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2200 }
2201 }
2202
2203 /* See if an equality test is redundant with the other comparison. */
2204 else if (code1 == EQ_EXPR)
2205 {
2206 bool val;
2207 switch (code2)
2208 {
2209 case EQ_EXPR: val = (cmp == 0); break;
2210 case NE_EXPR: val = (cmp != 0); break;
2211 case LT_EXPR: val = (cmp < 0); break;
2212 case GT_EXPR: val = (cmp > 0); break;
2213 case LE_EXPR: val = (cmp <= 0); break;
2214 case GE_EXPR: val = (cmp >= 0); break;
2215 default:
2216 val = false;
2217 }
2218 if (val)
2219 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2220 }
2221 else if (code2 == EQ_EXPR)
2222 {
2223 bool val;
2224 switch (code1)
2225 {
2226 case EQ_EXPR: val = (cmp == 0); break;
2227 case NE_EXPR: val = (cmp != 0); break;
2228 case LT_EXPR: val = (cmp > 0); break;
2229 case GT_EXPR: val = (cmp < 0); break;
2230 case LE_EXPR: val = (cmp >= 0); break;
2231 case GE_EXPR: val = (cmp <= 0); break;
2232 default:
2233 val = false;
2234 }
2235 if (val)
2236 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2237 }
2238
2239 /* Chose the less restrictive of two < or <= comparisons. */
2240 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2241 && (code2 == LT_EXPR || code2 == LE_EXPR))
2242 {
2243 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2244 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2245 else
2246 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2247 }
2248
2249 /* Likewise chose the less restrictive of two > or >= comparisons. */
2250 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2251 && (code2 == GT_EXPR || code2 == GE_EXPR))
2252 {
2253 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2254 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2255 else
2256 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2257 }
2258
2259 /* Check for singleton ranges. */
2260 else if (cmp == 0
2261 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2262 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2263 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2264
2265 /* Check for less/greater pairs that don't restrict the range at all. */
2266 else if (cmp >= 0
2267 && (code1 == LT_EXPR || code1 == LE_EXPR)
2268 && (code2 == GT_EXPR || code2 == GE_EXPR))
2269 return boolean_true_node;
2270 else if (cmp <= 0
2271 && (code1 == GT_EXPR || code1 == GE_EXPR)
2272 && (code2 == LT_EXPR || code2 == LE_EXPR))
2273 return boolean_true_node;
2274 }
2275
2276 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2277 NAME's definition is a truth value. See if there are any simplifications
2278 that can be done against the NAME's definition. */
2279 if (TREE_CODE (op1a) == SSA_NAME
2280 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2281 && (integer_zerop (op1b) || integer_onep (op1b)))
2282 {
2283 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2284 || (code1 == NE_EXPR && integer_onep (op1b)));
2285 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2286 switch (gimple_code (stmt))
2287 {
2288 case GIMPLE_ASSIGN:
2289 /* Try to simplify by copy-propagating the definition. */
2290 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2291
2292 case GIMPLE_PHI:
2293 /* If every argument to the PHI produces the same result when
2294 ORed with the second comparison, we win.
2295 Do not do this unless the type is bool since we need a bool
2296 result here anyway. */
2297 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2298 {
2299 tree result = NULL_TREE;
2300 unsigned i;
2301 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2302 {
2303 tree arg = gimple_phi_arg_def (stmt, i);
2304
2305 /* If this PHI has itself as an argument, ignore it.
2306 If all the other args produce the same result,
2307 we're still OK. */
2308 if (arg == gimple_phi_result (stmt))
2309 continue;
2310 else if (TREE_CODE (arg) == INTEGER_CST)
2311 {
2312 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2313 {
2314 if (!result)
2315 result = boolean_true_node;
2316 else if (!integer_onep (result))
2317 return NULL_TREE;
2318 }
2319 else if (!result)
2320 result = fold_build2 (code2, boolean_type_node,
2321 op2a, op2b);
2322 else if (!same_bool_comparison_p (result,
2323 code2, op2a, op2b))
2324 return NULL_TREE;
2325 }
0e8b84ec
JJ
2326 else if (TREE_CODE (arg) == SSA_NAME
2327 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 2328 {
6c66f733
JJ
2329 tree temp;
2330 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2331 /* In simple cases we can look through PHI nodes,
2332 but we have to be careful with loops.
2333 See PR49073. */
2334 if (! dom_info_available_p (CDI_DOMINATORS)
2335 || gimple_bb (def_stmt) == gimple_bb (stmt)
2336 || dominated_by_p (CDI_DOMINATORS,
2337 gimple_bb (def_stmt),
2338 gimple_bb (stmt)))
2339 return NULL_TREE;
2340 temp = or_var_with_comparison (arg, invert, code2,
2341 op2a, op2b);
e89065a1
SL
2342 if (!temp)
2343 return NULL_TREE;
2344 else if (!result)
2345 result = temp;
2346 else if (!same_bool_result_p (result, temp))
2347 return NULL_TREE;
2348 }
2349 else
2350 return NULL_TREE;
2351 }
2352 return result;
2353 }
2354
2355 default:
2356 break;
2357 }
2358 }
2359 return NULL_TREE;
2360}
2361
2362/* Try to simplify the OR of two comparisons, specified by
2363 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2364 If this can be simplified to a single expression (without requiring
2365 introducing more SSA variables to hold intermediate values),
2366 return the resulting tree. Otherwise return NULL_TREE.
2367 If the result expression is non-null, it has boolean type. */
2368
2369tree
2370maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2371 enum tree_code code2, tree op2a, tree op2b)
2372{
2373 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2374 if (t)
2375 return t;
2376 else
2377 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2378}
cfef45c8
RG
2379
2380
2381/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2382
2383 Either NULL_TREE, a simplified but non-constant or a constant
2384 is returned.
2385
2386 ??? This should go into a gimple-fold-inline.h file to be eventually
2387 privatized with the single valueize function used in the various TUs
2388 to avoid the indirect function call overhead. */
2389
2390tree
2391gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2392{
2393 location_t loc = gimple_location (stmt);
2394 switch (gimple_code (stmt))
2395 {
2396 case GIMPLE_ASSIGN:
2397 {
2398 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2399
2400 switch (get_gimple_rhs_class (subcode))
2401 {
2402 case GIMPLE_SINGLE_RHS:
2403 {
2404 tree rhs = gimple_assign_rhs1 (stmt);
2405 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2406
2407 if (TREE_CODE (rhs) == SSA_NAME)
2408 {
2409 /* If the RHS is an SSA_NAME, return its known constant value,
2410 if any. */
2411 return (*valueize) (rhs);
2412 }
2413 /* Handle propagating invariant addresses into address
2414 operations. */
2415 else if (TREE_CODE (rhs) == ADDR_EXPR
2416 && !is_gimple_min_invariant (rhs))
2417 {
d25c4172 2418 HOST_WIDE_INT offset = 0;
cfef45c8
RG
2419 tree base;
2420 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2421 &offset,
2422 valueize);
2423 if (base
2424 && (CONSTANT_CLASS_P (base)
2425 || decl_address_invariant_p (base)))
2426 return build_invariant_address (TREE_TYPE (rhs),
2427 base, offset);
2428 }
2429 else if (TREE_CODE (rhs) == CONSTRUCTOR
2430 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2431 && (CONSTRUCTOR_NELTS (rhs)
2432 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2433 {
2434 unsigned i;
d2a12ae7 2435 tree val, *vec;
cfef45c8 2436
d2a12ae7
RG
2437 vec = XALLOCAVEC (tree,
2438 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
2439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2440 {
2441 val = (*valueize) (val);
2442 if (TREE_CODE (val) == INTEGER_CST
2443 || TREE_CODE (val) == REAL_CST
2444 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 2445 vec[i] = val;
cfef45c8
RG
2446 else
2447 return NULL_TREE;
2448 }
2449
d2a12ae7 2450 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8
RG
2451 }
2452
2453 if (kind == tcc_reference)
2454 {
2455 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2456 || TREE_CODE (rhs) == REALPART_EXPR
2457 || TREE_CODE (rhs) == IMAGPART_EXPR)
2458 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2459 {
2460 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2461 return fold_unary_loc (EXPR_LOCATION (rhs),
2462 TREE_CODE (rhs),
2463 TREE_TYPE (rhs), val);
2464 }
2465 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2466 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2467 {
2468 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2469 return fold_ternary_loc (EXPR_LOCATION (rhs),
2470 TREE_CODE (rhs),
2471 TREE_TYPE (rhs), val,
2472 TREE_OPERAND (rhs, 1),
2473 TREE_OPERAND (rhs, 2));
2474 }
2475 else if (TREE_CODE (rhs) == MEM_REF
2476 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2477 {
2478 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2479 if (TREE_CODE (val) == ADDR_EXPR
2480 && is_gimple_min_invariant (val))
2481 {
2482 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2483 unshare_expr (val),
2484 TREE_OPERAND (rhs, 1));
2485 if (tem)
2486 rhs = tem;
2487 }
2488 }
2489 return fold_const_aggregate_ref_1 (rhs, valueize);
2490 }
2491 else if (kind == tcc_declaration)
2492 return get_symbol_constant_value (rhs);
2493 return rhs;
2494 }
2495
2496 case GIMPLE_UNARY_RHS:
2497 {
2498 /* Handle unary operators that can appear in GIMPLE form.
2499 Note that we know the single operand must be a constant,
2500 so this should almost always return a simplified RHS. */
315f5f1b 2501 tree lhs = gimple_assign_lhs (stmt);
cfef45c8
RG
2502 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2503
2504 /* Conversions are useless for CCP purposes if they are
2505 value-preserving. Thus the restrictions that
315f5f1b
RG
2506 useless_type_conversion_p places for restrict qualification
2507 of pointer types should not apply here.
2508 Substitution later will only substitute to allowed places. */
cfef45c8
RG
2509 if (CONVERT_EXPR_CODE_P (subcode)
2510 && POINTER_TYPE_P (TREE_TYPE (lhs))
315f5f1b 2511 && POINTER_TYPE_P (TREE_TYPE (op0))
8f420307
EB
2512 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2513 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2514 && TYPE_MODE (TREE_TYPE (lhs))
2515 == TYPE_MODE (TREE_TYPE (op0)))
315f5f1b 2516 return op0;
cfef45c8
RG
2517
2518 return
2519 fold_unary_ignore_overflow_loc (loc, subcode,
2520 gimple_expr_type (stmt), op0);
2521 }
2522
2523 case GIMPLE_BINARY_RHS:
2524 {
2525 /* Handle binary operators that can appear in GIMPLE form. */
2526 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2527 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2528
2529 /* Translate &x + CST into an invariant form suitable for
2530 further propagation. */
2531 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2532 && TREE_CODE (op0) == ADDR_EXPR
2533 && TREE_CODE (op1) == INTEGER_CST)
2534 {
2535 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
2536 return build_fold_addr_expr_loc
2537 (loc,
2538 fold_build2 (MEM_REF,
cfef45c8
RG
2539 TREE_TYPE (TREE_TYPE (op0)),
2540 unshare_expr (op0), off));
2541 }
2542
2543 return fold_binary_loc (loc, subcode,
2544 gimple_expr_type (stmt), op0, op1);
2545 }
2546
2547 case GIMPLE_TERNARY_RHS:
2548 {
2549 /* Handle ternary operators that can appear in GIMPLE form. */
2550 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2551 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2552 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2553
d3878abf
RG
2554 /* Fold embedded expressions in ternary codes. */
2555 if ((subcode == COND_EXPR
2556 || subcode == VEC_COND_EXPR)
2557 && COMPARISON_CLASS_P (op0))
2558 {
2559 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2560 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2561 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2562 TREE_TYPE (op0), op00, op01);
2563 if (tem)
2564 op0 = tem;
2565 }
2566
cfef45c8
RG
2567 return fold_ternary_loc (loc, subcode,
2568 gimple_expr_type (stmt), op0, op1, op2);
2569 }
2570
2571 default:
2572 gcc_unreachable ();
2573 }
2574 }
2575
2576 case GIMPLE_CALL:
2577 {
25583c4f
RS
2578 tree fn;
2579
2580 if (gimple_call_internal_p (stmt))
2581 /* No folding yet for these functions. */
2582 return NULL_TREE;
2583
2584 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
2585 if (TREE_CODE (fn) == ADDR_EXPR
2586 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2587 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2588 {
2589 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2590 tree call, retval;
2591 unsigned i;
2592 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2593 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2594 call = build_call_array_loc (loc,
2595 gimple_call_return_type (stmt),
2596 fn, gimple_call_num_args (stmt), args);
2597 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2598 if (retval)
2599 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2600 STRIP_NOPS (retval);
2601 return retval;
2602 }
2603 return NULL_TREE;
2604 }
2605
2606 default:
2607 return NULL_TREE;
2608 }
2609}
2610
2611/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2612 Returns NULL_TREE if folding to a constant is not possible, otherwise
2613 returns a constant according to is_gimple_min_invariant. */
2614
2615tree
2616gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2617{
2618 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2619 if (res && is_gimple_min_invariant (res))
2620 return res;
2621 return NULL_TREE;
2622}
2623
2624
2625/* The following set of functions are supposed to fold references using
2626 their constant initializers. */
2627
2628static tree fold_ctor_reference (tree type, tree ctor,
2629 unsigned HOST_WIDE_INT offset,
2630 unsigned HOST_WIDE_INT size);
2631
2632/* See if we can find constructor defining value of BASE.
2633 When we know the consructor with constant offset (such as
2634 base is array[40] and we do know constructor of array), then
2635 BIT_OFFSET is adjusted accordingly.
2636
2637 As a special case, return error_mark_node when constructor
2638 is not explicitly available, but it is known to be zero
2639 such as 'static const int a;'. */
2640static tree
2641get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2642 tree (*valueize)(tree))
2643{
2644 HOST_WIDE_INT bit_offset2, size, max_size;
2645 if (TREE_CODE (base) == MEM_REF)
2646 {
2647 if (!integer_zerop (TREE_OPERAND (base, 1)))
2648 {
2649 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2650 return NULL_TREE;
2651 *bit_offset += (mem_ref_offset (base).low
2652 * BITS_PER_UNIT);
2653 }
2654
2655 if (valueize
2656 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2657 base = valueize (TREE_OPERAND (base, 0));
2658 if (!base || TREE_CODE (base) != ADDR_EXPR)
2659 return NULL_TREE;
2660 base = TREE_OPERAND (base, 0);
2661 }
2662
2663 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2664 DECL_INITIAL. If BASE is a nested reference into another
2665 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2666 the inner reference. */
2667 switch (TREE_CODE (base))
2668 {
2669 case VAR_DECL:
2670 if (!const_value_known_p (base))
2671 return NULL_TREE;
2672
2673 /* Fallthru. */
2674 case CONST_DECL:
2675 if (!DECL_INITIAL (base)
2676 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2677 return error_mark_node;
2678 return DECL_INITIAL (base);
2679
2680 case ARRAY_REF:
2681 case COMPONENT_REF:
2682 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2683 if (max_size == -1 || size != max_size)
2684 return NULL_TREE;
2685 *bit_offset += bit_offset2;
2686 return get_base_constructor (base, bit_offset, valueize);
2687
2688 case STRING_CST:
2689 case CONSTRUCTOR:
2690 return base;
2691
2692 default:
2693 return NULL_TREE;
2694 }
2695}
2696
2697/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2698 to the memory at bit OFFSET.
2699
2700 We do only simple job of folding byte accesses. */
2701
2702static tree
2703fold_string_cst_ctor_reference (tree type, tree ctor,
2704 unsigned HOST_WIDE_INT offset,
2705 unsigned HOST_WIDE_INT size)
2706{
2707 if (INTEGRAL_TYPE_P (type)
2708 && (TYPE_MODE (type)
2709 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2710 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2711 == MODE_INT)
2712 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2713 && size == BITS_PER_UNIT
2714 && !(offset % BITS_PER_UNIT))
2715 {
2716 offset /= BITS_PER_UNIT;
2717 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2718 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2719 [offset]));
2720 /* Folding
2721 const char a[20]="hello";
2722 return a[10];
2723
2724 might lead to offset greater than string length. In this case we
2725 know value is either initialized to 0 or out of bounds. Return 0
2726 in both cases. */
2727 return build_zero_cst (type);
2728 }
2729 return NULL_TREE;
2730}
2731
2732/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2733 SIZE to the memory at bit OFFSET. */
2734
2735static tree
2736fold_array_ctor_reference (tree type, tree ctor,
2737 unsigned HOST_WIDE_INT offset,
2738 unsigned HOST_WIDE_INT size)
2739{
2740 unsigned HOST_WIDE_INT cnt;
2741 tree cfield, cval;
2742 double_int low_bound, elt_size;
2743 double_int index, max_index;
2744 double_int access_index;
b48e22b2 2745 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
2746 HOST_WIDE_INT inner_offset;
2747
2748 /* Compute low bound and elt size. */
eb8f1123
RG
2749 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2750 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
2751 if (domain_type && TYPE_MIN_VALUE (domain_type))
2752 {
2753 /* Static constructors for variably sized objects makes no sense. */
2754 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 2755 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
2756 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2757 }
2758 else
2759 low_bound = double_int_zero;
2760 /* Static constructors for variably sized objects makes no sense. */
2761 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2762 == INTEGER_CST);
2763 elt_size =
2764 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2765
2766
2767 /* We can handle only constantly sized accesses that are known to not
2768 be larger than size of array element. */
2769 if (!TYPE_SIZE_UNIT (type)
2770 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2771 || double_int_cmp (elt_size,
2772 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2773 return NULL_TREE;
2774
2775 /* Compute the array index we look for. */
2776 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2777 elt_size, TRUNC_DIV_EXPR);
2778 access_index = double_int_add (access_index, low_bound);
b48e22b2
EB
2779 if (index_type)
2780 access_index = double_int_ext (access_index,
2781 TYPE_PRECISION (index_type),
2782 TYPE_UNSIGNED (index_type));
cfef45c8
RG
2783
2784 /* And offset within the access. */
2785 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2786
2787 /* See if the array field is large enough to span whole access. We do not
2788 care to fold accesses spanning multiple array indexes. */
2789 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2790 return NULL_TREE;
2791
2792 index = double_int_sub (low_bound, double_int_one);
b48e22b2
EB
2793 if (index_type)
2794 index = double_int_ext (index,
2795 TYPE_PRECISION (index_type),
2796 TYPE_UNSIGNED (index_type));
2797
cfef45c8
RG
2798 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2799 {
2800 /* Array constructor might explicitely set index, or specify range
2801 or leave index NULL meaning that it is next index after previous
2802 one. */
2803 if (cfield)
2804 {
2805 if (TREE_CODE (cfield) == INTEGER_CST)
2806 max_index = index = tree_to_double_int (cfield);
2807 else
2808 {
2809 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2810 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2811 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2812 }
2813 }
2814 else
b48e22b2
EB
2815 {
2816 index = double_int_add (index, double_int_one);
2817 if (index_type)
2818 index = double_int_ext (index,
2819 TYPE_PRECISION (index_type),
2820 TYPE_UNSIGNED (index_type));
2821 max_index = index;
2822 }
cfef45c8
RG
2823
2824 /* Do we have match? */
2825 if (double_int_cmp (access_index, index, 1) >= 0
2826 && double_int_cmp (access_index, max_index, 1) <= 0)
2827 return fold_ctor_reference (type, cval, inner_offset, size);
2828 }
2829 /* When memory is not explicitely mentioned in constructor,
2830 it is 0 (or out of range). */
2831 return build_zero_cst (type);
2832}
2833
2834/* CTOR is CONSTRUCTOR of an aggregate or vector.
2835 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2836
2837static tree
2838fold_nonarray_ctor_reference (tree type, tree ctor,
2839 unsigned HOST_WIDE_INT offset,
2840 unsigned HOST_WIDE_INT size)
2841{
2842 unsigned HOST_WIDE_INT cnt;
2843 tree cfield, cval;
2844
2845 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2846 cval)
2847 {
2848 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2849 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2850 tree field_size = DECL_SIZE (cfield);
2851 double_int bitoffset;
2852 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2853 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
b8b2b009 2854 double_int bitoffset_end, access_end;
cfef45c8
RG
2855
2856 /* Variable sized objects in static constructors makes no sense,
2857 but field_size can be NULL for flexible array members. */
2858 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2859 && TREE_CODE (byte_offset) == INTEGER_CST
2860 && (field_size != NULL_TREE
2861 ? TREE_CODE (field_size) == INTEGER_CST
2862 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2863
2864 /* Compute bit offset of the field. */
2865 bitoffset = double_int_add (tree_to_double_int (field_offset),
2866 double_int_mul (byte_offset_cst,
2867 bits_per_unit_cst));
2868 /* Compute bit offset where the field ends. */
2869 if (field_size != NULL_TREE)
2870 bitoffset_end = double_int_add (bitoffset,
2871 tree_to_double_int (field_size));
2872 else
2873 bitoffset_end = double_int_zero;
2874
b8b2b009
JJ
2875 access_end = double_int_add (uhwi_to_double_int (offset),
2876 uhwi_to_double_int (size));
2877
2878 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2879 [BITOFFSET, BITOFFSET_END)? */
2880 if (double_int_cmp (access_end, bitoffset, 0) > 0
cfef45c8
RG
2881 && (field_size == NULL_TREE
2882 || double_int_cmp (uhwi_to_double_int (offset),
2883 bitoffset_end, 0) < 0))
2884 {
cfef45c8
RG
2885 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2886 bitoffset);
2887 /* We do have overlap. Now see if field is large enough to
2888 cover the access. Give up for accesses spanning multiple
2889 fields. */
2890 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2891 return NULL_TREE;
b8b2b009
JJ
2892 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2893 return NULL_TREE;
cfef45c8
RG
2894 return fold_ctor_reference (type, cval,
2895 double_int_to_uhwi (inner_offset), size);
2896 }
2897 }
2898 /* When memory is not explicitely mentioned in constructor, it is 0. */
2899 return build_zero_cst (type);
2900}
2901
2902/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2903 to the memory at bit OFFSET. */
2904
2905static tree
2906fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2907 unsigned HOST_WIDE_INT size)
2908{
2909 tree ret;
2910
2911 /* We found the field with exact match. */
2912 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2913 && !offset)
2914 return canonicalize_constructor_val (ctor);
2915
2916 /* We are at the end of walk, see if we can view convert the
2917 result. */
2918 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2919 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2920 && operand_equal_p (TYPE_SIZE (type),
2921 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2922 {
2923 ret = canonicalize_constructor_val (ctor);
2924 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2925 if (ret)
2926 STRIP_NOPS (ret);
2927 return ret;
2928 }
2929 if (TREE_CODE (ctor) == STRING_CST)
2930 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2931 if (TREE_CODE (ctor) == CONSTRUCTOR)
2932 {
2933
eb8f1123
RG
2934 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2935 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
cfef45c8
RG
2936 return fold_array_ctor_reference (type, ctor, offset, size);
2937 else
2938 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2939 }
2940
2941 return NULL_TREE;
2942}
2943
2944/* Return the tree representing the element referenced by T if T is an
2945 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2946 names using VALUEIZE. Return NULL_TREE otherwise. */
2947
2948tree
2949fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2950{
2951 tree ctor, idx, base;
2952 HOST_WIDE_INT offset, size, max_size;
2953 tree tem;
2954
f8a7df45
RG
2955 if (TREE_THIS_VOLATILE (t))
2956 return NULL_TREE;
2957
cfef45c8
RG
2958 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2959 return get_symbol_constant_value (t);
2960
2961 tem = fold_read_from_constant_string (t);
2962 if (tem)
2963 return tem;
2964
2965 switch (TREE_CODE (t))
2966 {
2967 case ARRAY_REF:
2968 case ARRAY_RANGE_REF:
2969 /* Constant indexes are handled well by get_base_constructor.
2970 Only special case variable offsets.
2971 FIXME: This code can't handle nested references with variable indexes
2972 (they will be handled only by iteration of ccp). Perhaps we can bring
2973 get_ref_base_and_extent here and make it use a valueize callback. */
2974 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2975 && valueize
2976 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 2977 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
2978 {
2979 tree low_bound, unit_size;
b48e22b2 2980 double_int doffset;
cfef45c8
RG
2981
2982 /* If the resulting bit-offset is constant, track it. */
2983 if ((low_bound = array_ref_low_bound (t),
b48e22b2 2984 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 2985 && (unit_size = array_ref_element_size (t),
b48e22b2
EB
2986 host_integerp (unit_size, 1))
2987 && (doffset = double_int_sext
2988 (double_int_sub (TREE_INT_CST (idx),
2989 TREE_INT_CST (low_bound)),
2990 TYPE_PRECISION (TREE_TYPE (idx))),
2991 double_int_fits_in_shwi_p (doffset)))
cfef45c8 2992 {
b48e22b2 2993 offset = double_int_to_shwi (doffset);
cfef45c8
RG
2994 offset *= TREE_INT_CST_LOW (unit_size);
2995 offset *= BITS_PER_UNIT;
2996
2997 base = TREE_OPERAND (t, 0);
2998 ctor = get_base_constructor (base, &offset, valueize);
2999 /* Empty constructor. Always fold to 0. */
3000 if (ctor == error_mark_node)
3001 return build_zero_cst (TREE_TYPE (t));
3002 /* Out of bound array access. Value is undefined,
3003 but don't fold. */
3004 if (offset < 0)
3005 return NULL_TREE;
3006 /* We can not determine ctor. */
3007 if (!ctor)
3008 return NULL_TREE;
3009 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3010 TREE_INT_CST_LOW (unit_size)
3011 * BITS_PER_UNIT);
3012 }
3013 }
3014 /* Fallthru. */
3015
3016 case COMPONENT_REF:
3017 case BIT_FIELD_REF:
3018 case TARGET_MEM_REF:
3019 case MEM_REF:
3020 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3021 ctor = get_base_constructor (base, &offset, valueize);
3022
3023 /* Empty constructor. Always fold to 0. */
3024 if (ctor == error_mark_node)
3025 return build_zero_cst (TREE_TYPE (t));
3026 /* We do not know precise address. */
3027 if (max_size == -1 || max_size != size)
3028 return NULL_TREE;
3029 /* We can not determine ctor. */
3030 if (!ctor)
3031 return NULL_TREE;
3032
3033 /* Out of bound array access. Value is undefined, but don't fold. */
3034 if (offset < 0)
3035 return NULL_TREE;
3036
3037 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3038
3039 case REALPART_EXPR:
3040 case IMAGPART_EXPR:
3041 {
3042 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3043 if (c && TREE_CODE (c) == COMPLEX_CST)
3044 return fold_build1_loc (EXPR_LOCATION (t),
3045 TREE_CODE (t), TREE_TYPE (t), c);
3046 break;
3047 }
3048
3049 default:
3050 break;
3051 }
3052
3053 return NULL_TREE;
3054}
3055
3056tree
3057fold_const_aggregate_ref (tree t)
3058{
3059 return fold_const_aggregate_ref_1 (t, NULL);
3060}
06bc3ec7 3061
81fa35bd
MJ
3062/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3063 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3064 KNOWN_BINFO carries the binfo describing the true type of
3065 OBJ_TYPE_REF_OBJECT(REF). */
3066
3067tree
3068gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3069{
3070 unsigned HOST_WIDE_INT offset, size;
3071 tree v, fn;
3072
3073 v = BINFO_VTABLE (known_binfo);
3074 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3075 if (!v)
3076 return NULL_TREE;
3077
3078 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3079 {
3080 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3081 v = TREE_OPERAND (v, 0);
3082 }
3083 else
3084 offset = 0;
3085
3086 if (TREE_CODE (v) != ADDR_EXPR)
3087 return NULL_TREE;
3088 v = TREE_OPERAND (v, 0);
3089
3090 if (TREE_CODE (v) != VAR_DECL
3091 || !DECL_VIRTUAL_P (v)
5548ca35
JH
3092 || !DECL_INITIAL (v)
3093 || DECL_INITIAL (v) == error_mark_node)
81fa35bd
MJ
3094 return NULL_TREE;
3095 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3096 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3097 offset += token * size;
3098 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3099 offset, size);
8e8483e6 3100 if (!fn || integer_zerop (fn))
81fa35bd
MJ
3101 return NULL_TREE;
3102 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3103 || TREE_CODE (fn) == FDESC_EXPR);
3104 fn = TREE_OPERAND (fn, 0);
3105 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3106
3107 /* When cgraph node is missing and function is not public, we cannot
3108 devirtualize. This can happen in WHOPR when the actual method
3109 ends up in other partition, because we found devirtualization
3110 possibility too late. */
3111 if (!can_refer_decl_in_current_unit_p (fn))
3112 return NULL_TREE;
3113
7501ca28
RG
3114 /* Make sure we create a cgraph node for functions we'll reference.
3115 They can be non-existent if the reference comes from an entry
3116 of an external vtable for example. */
3117 cgraph_get_create_node (fn);
3118
81fa35bd
MJ
3119 return fn;
3120}
3121
06bc3ec7
BS
3122/* Return true iff VAL is a gimple expression that is known to be
3123 non-negative. Restricted to floating-point inputs. */
3124
3125bool
3126gimple_val_nonnegative_real_p (tree val)
3127{
3128 gimple def_stmt;
3129
3130 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3131
3132 /* Use existing logic for non-gimple trees. */
3133 if (tree_expr_nonnegative_p (val))
3134 return true;
3135
3136 if (TREE_CODE (val) != SSA_NAME)
3137 return false;
3138
3139 /* Currently we look only at the immediately defining statement
3140 to make this determination, since recursion on defining
3141 statements of operands can lead to quadratic behavior in the
3142 worst case. This is expected to catch almost all occurrences
3143 in practice. It would be possible to implement limited-depth
3144 recursion if important cases are lost. Alternatively, passes
3145 that need this information (such as the pow/powi lowering code
3146 in the cse_sincos pass) could be revised to provide it through
3147 dataflow propagation. */
3148
3149 def_stmt = SSA_NAME_DEF_STMT (val);
3150
3151 if (is_gimple_assign (def_stmt))
3152 {
3153 tree op0, op1;
3154
3155 /* See fold-const.c:tree_expr_nonnegative_p for additional
3156 cases that could be handled with recursion. */
3157
3158 switch (gimple_assign_rhs_code (def_stmt))
3159 {
3160 case ABS_EXPR:
3161 /* Always true for floating-point operands. */
3162 return true;
3163
3164 case MULT_EXPR:
3165 /* True if the two operands are identical (since we are
3166 restricted to floating-point inputs). */
3167 op0 = gimple_assign_rhs1 (def_stmt);
3168 op1 = gimple_assign_rhs2 (def_stmt);
3169
3170 if (op0 == op1
3171 || operand_equal_p (op0, op1, 0))
3172 return true;
3173
3174 default:
3175 return false;
3176 }
3177 }
3178 else if (is_gimple_call (def_stmt))
3179 {
3180 tree fndecl = gimple_call_fndecl (def_stmt);
3181 if (fndecl
3182 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3183 {
3184 tree arg1;
3185
3186 switch (DECL_FUNCTION_CODE (fndecl))
3187 {
3188 CASE_FLT_FN (BUILT_IN_ACOS):
3189 CASE_FLT_FN (BUILT_IN_ACOSH):
3190 CASE_FLT_FN (BUILT_IN_CABS):
3191 CASE_FLT_FN (BUILT_IN_COSH):
3192 CASE_FLT_FN (BUILT_IN_ERFC):
3193 CASE_FLT_FN (BUILT_IN_EXP):
3194 CASE_FLT_FN (BUILT_IN_EXP10):
3195 CASE_FLT_FN (BUILT_IN_EXP2):
3196 CASE_FLT_FN (BUILT_IN_FABS):
3197 CASE_FLT_FN (BUILT_IN_FDIM):
3198 CASE_FLT_FN (BUILT_IN_HYPOT):
3199 CASE_FLT_FN (BUILT_IN_POW10):
3200 return true;
3201
3202 CASE_FLT_FN (BUILT_IN_SQRT):
3203 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3204 nonnegative inputs. */
3205 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3206 return true;
3207
3208 break;
3209
3210 CASE_FLT_FN (BUILT_IN_POWI):
3211 /* True if the second argument is an even integer. */
3212 arg1 = gimple_call_arg (def_stmt, 1);
3213
3214 if (TREE_CODE (arg1) == INTEGER_CST
3215 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3216 return true;
3217
3218 break;
3219
3220 CASE_FLT_FN (BUILT_IN_POW):
3221 /* True if the second argument is an even integer-valued
3222 real. */
3223 arg1 = gimple_call_arg (def_stmt, 1);
3224
3225 if (TREE_CODE (arg1) == REAL_CST)
3226 {
3227 REAL_VALUE_TYPE c;
3228 HOST_WIDE_INT n;
3229
3230 c = TREE_REAL_CST (arg1);
3231 n = real_to_integer (&c);
3232
3233 if ((n & 1) == 0)
3234 {
3235 REAL_VALUE_TYPE cint;
3236 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3237 if (real_identical (&c, &cint))
3238 return true;
3239 }
3240 }
3241
3242 break;
3243
3244 default:
3245 return false;
3246 }
3247 }
3248 }
3249
3250 return false;
3251}