]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
pr48235.c: Add dg-require-effective-target freorder.
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4
RG
1/* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
cbdd87d4 27#include "function.h"
cbdd87d4
RG
28#include "tree-dump.h"
29#include "tree-flow.h"
30#include "tree-pass.h"
31#include "tree-ssa-propagate.h"
cbdd87d4 32#include "target.h"
cfef45c8 33#include "gimple-fold.h"
cbdd87d4 34
b3b9f3d0
JH
35/* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
1389294c 38
1389294c
JH
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
1389294c 56static bool
b3b9f3d0 57can_refer_decl_in_current_unit_p (tree decl)
1389294c
JH
58{
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
b3b9f3d0
JH
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
1389294c
JH
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
b3b9f3d0
JH
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
1389294c
JH
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
b3b9f3d0 74 return false;
1389294c 75 }
b3b9f3d0
JH
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
2e9bb6ba
JH
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
b3b9f3d0
JH
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
1389294c
JH
92 if (TREE_CODE (decl) == FUNCTION_DECL)
93 {
94 node = cgraph_get_node (decl);
b3b9f3d0
JH
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
1389294c
JH
102 }
103 else if (TREE_CODE (decl) == VAR_DECL)
104 {
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
b3b9f3d0 107 return false;
1389294c 108 }
b3b9f3d0 109 return true;
1389294c
JH
110}
111
0038d4e0 112/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
17f39a39
JH
113 acceptable form for is_gimple_min_invariant. */
114
115tree
116canonicalize_constructor_val (tree cval)
117{
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
120 {
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
124 TREE_TYPE (cval));
125 if (t)
126 cval = t;
127 }
128 if (TREE_CODE (cval) == ADDR_EXPR)
129 {
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
b3b9f3d0 131
1389294c
JH
132 if (base
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
b3b9f3d0 135 && !can_refer_decl_in_current_unit_p (base))
1389294c 136 return NULL_TREE;
0038d4e0 137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
17f39a39 138 add_referenced_var (base);
0038d4e0 139 /* Fixup types in global initializers. */
73aef89e
RG
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
17f39a39
JH
142 }
143 return cval;
144}
cbdd87d4
RG
145
146/* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
148
149tree
150get_symbol_constant_value (tree sym)
151{
64e0f5ff 152 if (const_value_known_p (sym))
cbdd87d4
RG
153 {
154 tree val = DECL_INITIAL (sym);
155 if (val)
156 {
17f39a39 157 val = canonicalize_constructor_val (val);
1389294c 158 if (val && is_gimple_min_invariant (val))
17f39a39 159 return val;
1389294c
JH
160 else
161 return NULL_TREE;
cbdd87d4
RG
162 }
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
166 if (!val
cbdd87d4
RG
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 169 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
170 }
171
172 return NULL_TREE;
173}
174
175
176/* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
178
179bool
180may_propagate_address_into_dereference (tree addr, tree deref)
181{
70f34814 182 gcc_assert (TREE_CODE (deref) == MEM_REF
cbdd87d4
RG
183 && TREE_CODE (addr) == ADDR_EXPR);
184
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
187 return false;
188
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
197
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
201 TREE_TYPE (addr))
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
204}
205
206
207/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
70f34814 208 BASE is an array type. OFFSET is a byte displacement.
cbdd87d4
RG
209
210 LOC is the location of the original expression. */
211
212static tree
70f34814 213maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
cbdd87d4
RG
214{
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
217 tree domain_type;
218
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
222
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
225 {
226 tree low_bound = array_ref_low_bound (base);
227
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
231 return NULL_TREE;
232
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
234 base = TREE_OPERAND (base, 0);
235 }
236
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
240 return NULL_TREE;
241 elt_type = TREE_TYPE (array_type);
cbdd87d4
RG
242
243 /* Use signed size type for intermediate computation on the index. */
3b9e5d95 244 idx_type = ssizetype;
cbdd87d4
RG
245
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
251 if (!elt_size)
252 return NULL;
253 if (integer_zerop (offset))
254 {
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
257
258 idx = build_int_cst (idx_type, 0);
259 }
260 else
261 {
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
264 double_int soffset;
265
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
277 || lrem || hrem)
278 return NULL_TREE;
279
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
281 }
282
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
285 low bound. */
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
288 if (domain_type)
289 {
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
293 else
294 min_idx = fold_convert (idx_type, min_idx);
295
296 if (TREE_CODE (min_idx) != INTEGER_CST)
297 return NULL_TREE;
298
299 elt_offset = fold_convert (idx_type, elt_offset);
300 }
301
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
306
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
309
310 /* We don't want to construct access past array bounds. For example
311 char *(c[4]);
312 c[3][2];
313 should not be simplified into (*c)[14] or tree-vrp will
70f34814
RG
314 give false warnings.
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
317 && domain_type)
cbdd87d4 318 {
70f34814
RG
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
cbdd87d4 322 return NULL_TREE;
70f34814
RG
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
326 return NULL_TREE;
327 else if (compare_tree_int (idx, 0) < 0)
cbdd87d4
RG
328 return NULL_TREE;
329 }
cbdd87d4
RG
330
331 {
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
334 return t;
335 }
336}
337
338
70f34814 339/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
cbdd87d4
RG
340 LOC is the location of original expression.
341
70f34814 342 Before attempting the conversion strip off existing ADDR_EXPRs. */
cbdd87d4
RG
343
344tree
345maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
346 tree orig_type)
347{
348 tree ret;
cbdd87d4
RG
349
350 STRIP_NOPS (base);
351 if (TREE_CODE (base) != ADDR_EXPR)
352 return NULL_TREE;
353
354 base = TREE_OPERAND (base, 0);
70f34814 355 if (types_compatible_p (orig_type, TREE_TYPE (base))
cbdd87d4
RG
356 && integer_zerop (offset))
357 return base;
cbdd87d4 358
70f34814
RG
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
361 return ret;
362 return NULL_TREE;
cbdd87d4
RG
363}
364
70f34814
RG
365/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
cbdd87d4
RG
367
368tree
369maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
370 tree orig_type)
371{
70f34814 372 tree base, ret;
cbdd87d4 373
70f34814
RG
374 STRIP_NOPS (addr);
375 if (TREE_CODE (addr) != ADDR_EXPR)
376 return NULL_TREE;
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
379 if (ret)
cbdd87d4 380 {
70f34814
RG
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
cbdd87d4 383 return NULL_TREE;
70f34814 384 SET_EXPR_LOCATION (ret, loc);
cbdd87d4
RG
385 }
386
70f34814 387 return ret;
cbdd87d4
RG
388}
389
390
391/* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
394
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
400 &array[x]
401 which may be able to propagate further. */
402
403tree
404maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
405{
406 tree ptd_type;
407 tree t;
408
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
411 return NULL_TREE;
412 op0 = TREE_OPERAND (op0, 0);
413
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
416 {
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
c946a318
RG
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
70f34814
RG
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
426 {
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
c946a318
RG
431 return NULL_TREE;
432
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
437 return NULL_TREE;
438
cbdd87d4
RG
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
70f34814 441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
cbdd87d4
RG
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
70f34814 448 else if (integer_onep (elsz)
cbdd87d4
RG
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
453 op1,
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
456 }
70f34814
RG
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
458 /* Dto. */
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
461 {
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
466 return NULL_TREE;
467
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
470 if (!DECL_P (op0)
471 && !handled_component_p (op0))
472 return NULL_TREE;
473
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
485 op0, op1,
486 integer_zero_node, NULL_TREE));
487 }
488
cbdd87d4
RG
489 return NULL_TREE;
490 }
491
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
495 {
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
500 tree min_idx;
501
502 if (TREE_CODE (array_idx) != INTEGER_CST)
503 break;
504 if (TREE_CODE (elt_size) != INTEGER_CST)
505 break;
506
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
509 if (min_idx)
510 {
511 min_idx = TYPE_MIN_VALUE (min_idx);
512 if (min_idx)
513 {
514 if (TREE_CODE (min_idx) != INTEGER_CST)
515 break;
516
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
520 min_idx, 0);
521 }
522 }
523
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
527
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
530 array_idx, op1, 0);
531 op0 = array_obj;
532 }
533
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
541
542 /* At which point we can try some of the same things as for indirects. */
70f34814 543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
cbdd87d4
RG
544 if (t)
545 {
70f34814
RG
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
548 return NULL_TREE;
cbdd87d4
RG
549 SET_EXPR_LOCATION (t, loc);
550 }
551
552 return t;
553}
554
555/* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
559
560static tree
561maybe_fold_reference (tree expr, bool is_lhs)
562{
563 tree *t = &expr;
17f39a39 564 tree result;
cbdd87d4 565
f0eddb90
RG
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
571 TREE_CODE (expr),
572 TREE_TYPE (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
577 TREE_CODE (expr),
578 TREE_TYPE (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
582
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
cbdd87d4 585
f0eddb90
RG
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 590 {
f0eddb90
RG
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
595 if (tem)
596 {
597 TREE_THIS_VOLATILE (tem) = volatile_p;
598 *t = tem;
599 tem = maybe_fold_reference (expr, is_lhs);
600 if (tem)
601 return tem;
602 return expr;
603 }
cbdd87d4
RG
604 }
605
f0eddb90
RG
606 if (!is_lhs
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
609 return result;
cbdd87d4 610
70f34814
RG
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
623 compatibility. */
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
f0eddb90 626 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 627 {
70f34814
RG
628 tree tem;
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
631 if (tem)
632 return tem;
633 return expr;
634 }
4d948885
RG
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
636 {
637 tree tem = maybe_fold_tmr (*t);
638 if (tem)
639 {
640 *t = tem;
641 tem = maybe_fold_reference (expr, is_lhs);
642 if (tem)
643 return tem;
644 return expr;
645 }
646 }
cbdd87d4
RG
647
648 return NULL_TREE;
649}
650
651
652/* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
655 folded. */
656
657static tree
658fold_gimple_assign (gimple_stmt_iterator *si)
659{
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
663
664 tree result = NULL_TREE;
665
666 switch (get_gimple_rhs_class (subcode))
667 {
668 case GIMPLE_SINGLE_RHS:
669 {
670 tree rhs = gimple_assign_rhs1 (stmt);
671
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
674 {
675 tree op0 = COND_EXPR_COND (rhs);
676 tree tem;
677 bool set = false;
678 location_t cond_loc = EXPR_LOCATION (rhs);
679
680 if (COMPARISON_CLASS_P (op0))
681 {
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
693 }
694 else if (is_gimple_min_invariant (op0))
695 {
696 tem = op0;
697 set = true;
698 }
699 else
700 return NULL_TREE;
701
702 if (set)
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
705 }
706
cbdd87d4
RG
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
709
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
711 {
70f34814
RG
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
714 if (tem
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
718 else if (tem)
cbdd87d4
RG
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
724 }
725
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
730 {
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
732 unsigned i;
733 tree val;
734
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
739 return NULL_TREE;
740
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
743 }
744
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
747
748 /* If we couldn't fold the RHS, hand over to the generic
749 fold routines. */
750 if (result == NULL_TREE)
751 result = fold (rhs);
752
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
757
758 if (result != rhs && valid_gimple_rhs_p (result))
759 return result;
760
761 return NULL_TREE;
762 }
763 break;
764
765 case GIMPLE_UNARY_RHS:
766 {
767 tree rhs = gimple_assign_rhs1 (stmt);
768
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
770 if (result)
771 {
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
781
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
784 return result;
785 }
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
789 {
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
794 if (t)
795 return t;
796 }
797 }
798 break;
799
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
803 {
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
806 {
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
811 }
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
813 type,
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
816 }
817
818 if (!result)
819 result = fold_binary_loc (loc, subcode,
820 TREE_TYPE (gimple_assign_lhs (stmt)),
821 gimple_assign_rhs1 (stmt),
822 gimple_assign_rhs2 (stmt));
823
824 if (result)
825 {
826 STRIP_USELESS_TYPE_CONVERSION (result);
827 if (valid_gimple_rhs_p (result))
828 return result;
829
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
833 produced. */
834 if (commutative_tree_code (subcode)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
836 gimple_assign_rhs2 (stmt), false))
837 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
838 gimple_assign_rhs2 (stmt),
839 gimple_assign_rhs1 (stmt));
840 }
841 break;
842
0354c0c7
BS
843 case GIMPLE_TERNARY_RHS:
844 result = fold_ternary_loc (loc, subcode,
845 TREE_TYPE (gimple_assign_lhs (stmt)),
846 gimple_assign_rhs1 (stmt),
847 gimple_assign_rhs2 (stmt),
848 gimple_assign_rhs3 (stmt));
849
850 if (result)
851 {
852 STRIP_USELESS_TYPE_CONVERSION (result);
853 if (valid_gimple_rhs_p (result))
854 return result;
855
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
859 produced. */
860 if (commutative_ternary_tree_code (subcode)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
862 gimple_assign_rhs2 (stmt), false))
863 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
864 gimple_assign_rhs2 (stmt),
865 gimple_assign_rhs1 (stmt),
866 gimple_assign_rhs3 (stmt));
867 }
868 break;
869
cbdd87d4
RG
870 case GIMPLE_INVALID_RHS:
871 gcc_unreachable ();
872 }
873
874 return NULL_TREE;
875}
876
877/* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
881
882static bool
883fold_gimple_cond (gimple stmt)
884{
885 tree result = fold_binary_loc (gimple_location (stmt),
886 gimple_cond_code (stmt),
887 boolean_type_node,
888 gimple_cond_lhs (stmt),
889 gimple_cond_rhs (stmt));
890
891 if (result)
892 {
893 STRIP_USELESS_TYPE_CONVERSION (result);
894 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
895 {
896 gimple_cond_set_condition_from_tree (stmt, result);
897 return true;
898 }
899 }
900
901 return false;
902}
903
904/* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
cbdd87d4
RG
913
914void
915gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
916{
917 tree lhs;
918 tree tmp = NULL_TREE; /* Silence warning. */
919 gimple stmt, new_stmt;
920 gimple_stmt_iterator i;
921 gimple_seq stmts = gimple_seq_alloc();
922 struct gimplify_ctx gctx;
923 gimple last = NULL;
fe2ef088
MM
924 gimple laststore = NULL;
925 tree reaching_vuse;
cbdd87d4
RG
926
927 stmt = gsi_stmt (*si_p);
928
929 gcc_assert (is_gimple_call (stmt));
930
931 lhs = gimple_call_lhs (stmt);
fe2ef088 932 reaching_vuse = gimple_vuse (stmt);
cbdd87d4
RG
933
934 push_gimplify_context (&gctx);
935
936 if (lhs == NULL_TREE)
6e572326
RG
937 {
938 gimplify_and_add (expr, &stmts);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts))
942 {
9fdc58de 943 pop_gimplify_context (NULL);
6e572326
RG
944 if (gimple_in_ssa_p (cfun))
945 {
946 unlink_stmt_vdef (stmt);
947 release_defs (stmt);
948 }
949 gsi_remove (si_p, true);
950 return;
951 }
952 }
cbdd87d4
RG
953 else
954 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
955
956 pop_gimplify_context (NULL);
957
958 if (gimple_has_location (stmt))
959 annotate_all_with_location (stmts, gimple_location (stmt));
960
961 /* The replacement can expose previously unreferenced variables. */
962 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
963 {
964 if (last)
965 {
966 gsi_insert_before (si_p, last, GSI_NEW_STMT);
967 gsi_next (si_p);
968 }
969 new_stmt = gsi_stmt (i);
edc74207
RG
970 if (gimple_in_ssa_p (cfun))
971 {
972 find_new_referenced_vars (new_stmt);
973 mark_symbols_for_renaming (new_stmt);
974 }
fe2ef088
MM
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt))
978 {
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
981 if (laststore)
982 {
983 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
984 gimple_set_vdef (laststore, reaching_vuse);
985 update_stmt (laststore);
986 laststore = NULL;
987 }
988 gimple_set_vuse (new_stmt, reaching_vuse);
989 gimple_set_modified (new_stmt, true);
990 }
991 if (gimple_assign_single_p (new_stmt)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
993 {
994 laststore = new_stmt;
995 }
cbdd87d4
RG
996 last = new_stmt;
997 }
998
999 if (lhs == NULL_TREE)
1000 {
fe2ef088
MM
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt) && laststore)
1006 {
1007 gimple_set_vdef (laststore, gimple_vdef (stmt));
8a1561bc
MM
1008 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
fe2ef088
MM
1010 update_stmt (laststore);
1011 }
edc74207 1012 else if (gimple_in_ssa_p (cfun))
fe2ef088
MM
1013 {
1014 unlink_stmt_vdef (stmt);
1015 release_defs (stmt);
1016 }
cbdd87d4
RG
1017 new_stmt = last;
1018 }
1019 else
1020 {
1021 if (last)
1022 {
1023 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1024 gsi_next (si_p);
1025 }
8a1561bc
MM
1026 if (laststore && is_gimple_reg (lhs))
1027 {
1028 gimple_set_vdef (laststore, gimple_vdef (stmt));
1029 update_stmt (laststore);
1030 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1032 laststore = NULL;
1033 }
1034 else if (laststore)
fe2ef088
MM
1035 {
1036 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1037 gimple_set_vdef (laststore, reaching_vuse);
1038 update_stmt (laststore);
1039 laststore = NULL;
1040 }
cbdd87d4 1041 new_stmt = gimple_build_assign (lhs, tmp);
8a1561bc
MM
1042 if (!is_gimple_reg (tmp))
1043 gimple_set_vuse (new_stmt, reaching_vuse);
1044 if (!is_gimple_reg (lhs))
1045 {
1046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1047 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1049 }
07db7d35
RG
1050 else if (reaching_vuse == gimple_vuse (stmt))
1051 unlink_stmt_vdef (stmt);
cbdd87d4
RG
1052 }
1053
1054 gimple_set_location (new_stmt, gimple_location (stmt));
1055 gsi_replace (si_p, new_stmt, false);
1056}
1057
1058/* Return the string length, maximum string length or maximum value of
1059 ARG in LENGTH.
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1066
1067static bool
1068get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1069{
1070 tree var, val;
1071 gimple def_stmt;
1072
1073 if (TREE_CODE (arg) != SSA_NAME)
1074 {
1075 if (TREE_CODE (arg) == COND_EXPR)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1082 {
1083 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1084 if (TREE_CODE (aop0) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1086 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1087 length, visited, type);
1088 }
1089
1090 if (type == 2)
1091 {
1092 val = arg;
1093 if (TREE_CODE (val) != INTEGER_CST
1094 || tree_int_cst_sgn (val) < 0)
1095 return false;
1096 }
1097 else
1098 val = c_strlen (arg, 1);
1099 if (!val)
1100 return false;
1101
1102 if (*length)
1103 {
1104 if (type > 0)
1105 {
1106 if (TREE_CODE (*length) != INTEGER_CST
1107 || TREE_CODE (val) != INTEGER_CST)
1108 return false;
1109
1110 if (tree_int_cst_lt (*length, val))
1111 *length = val;
1112 return true;
1113 }
1114 else if (simple_cst_equal (val, *length) != 1)
1115 return false;
1116 }
1117
1118 *length = val;
1119 return true;
1120 }
1121
1122 /* If we were already here, break the infinite cycle. */
fcaa4ca4 1123 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
cbdd87d4 1124 return true;
cbdd87d4
RG
1125
1126 var = arg;
1127 def_stmt = SSA_NAME_DEF_STMT (var);
1128
1129 switch (gimple_code (def_stmt))
1130 {
1131 case GIMPLE_ASSIGN:
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1134 length. */
1135 if (gimple_assign_single_p (def_stmt)
1136 || gimple_assign_unary_nop_p (def_stmt))
1137 {
1138 tree rhs = gimple_assign_rhs1 (def_stmt);
1139 return get_maxval_strlen (rhs, length, visited, type);
1140 }
1141 return false;
1142
1143 case GIMPLE_PHI:
1144 {
1145 /* All the arguments of the PHI node must have the same constant
1146 length. */
1147 unsigned i;
1148
1149 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1150 {
1151 tree arg = gimple_phi_arg (def_stmt, i)->def;
1152
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg == gimple_phi_result (def_stmt))
1160 continue;
1161
1162 if (!get_maxval_strlen (arg, length, visited, type))
1163 return false;
1164 }
1165 }
1166 return true;
1167
1168 default:
1169 return false;
1170 }
1171}
1172
1173
1174/* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1180
1181tree
1182gimple_fold_builtin (gimple stmt)
1183{
1184 tree result, val[3];
1185 tree callee, a;
1186 int arg_idx, type;
1187 bitmap visited;
1188 bool ignore;
1189 int nargs;
1190 location_t loc = gimple_location (stmt);
1191
1192 gcc_assert (is_gimple_call (stmt));
1193
1194 ignore = (gimple_call_lhs (stmt) == NULL);
1195
1196 /* First try the generic builtin folder. If that succeeds, return the
1197 result directly. */
1198 result = fold_call_stmt (stmt, ignore);
1199 if (result)
1200 {
1201 if (ignore)
1202 STRIP_NOPS (result);
1203 return result;
1204 }
1205
1206 /* Ignore MD builtins. */
1207 callee = gimple_call_fndecl (stmt);
1208 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1209 return NULL_TREE;
1210
1211 /* If the builtin could not be folded, and it has no argument list,
1212 we're done. */
1213 nargs = gimple_call_num_args (stmt);
1214 if (nargs == 0)
1215 return NULL_TREE;
1216
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee))
1219 {
1220 case BUILT_IN_STRLEN:
1221 case BUILT_IN_FPUTS:
1222 case BUILT_IN_FPUTS_UNLOCKED:
1223 arg_idx = 0;
1224 type = 0;
1225 break;
1226 case BUILT_IN_STRCPY:
1227 case BUILT_IN_STRNCPY:
1228 arg_idx = 1;
1229 type = 0;
1230 break;
1231 case BUILT_IN_MEMCPY_CHK:
1232 case BUILT_IN_MEMPCPY_CHK:
1233 case BUILT_IN_MEMMOVE_CHK:
1234 case BUILT_IN_MEMSET_CHK:
1235 case BUILT_IN_STRNCPY_CHK:
1236 arg_idx = 2;
1237 type = 2;
1238 break;
1239 case BUILT_IN_STRCPY_CHK:
1240 case BUILT_IN_STPCPY_CHK:
1241 arg_idx = 1;
1242 type = 1;
1243 break;
1244 case BUILT_IN_SNPRINTF_CHK:
1245 case BUILT_IN_VSNPRINTF_CHK:
1246 arg_idx = 1;
1247 type = 2;
1248 break;
1249 default:
1250 return NULL_TREE;
1251 }
1252
1253 if (arg_idx >= nargs)
1254 return NULL_TREE;
1255
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited = BITMAP_ALLOC (NULL);
1258 bitmap_clear (visited);
1259
1260 memset (val, 0, sizeof (val));
1261 a = gimple_call_arg (stmt, arg_idx);
1262 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1263 val[arg_idx] = NULL_TREE;
1264
1265 BITMAP_FREE (visited);
1266
1267 result = NULL_TREE;
1268 switch (DECL_FUNCTION_CODE (callee))
1269 {
1270 case BUILT_IN_STRLEN:
1271 if (val[0] && nargs == 1)
1272 {
1273 tree new_val =
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1275
1276 /* If the result is not a valid gimple value, or not a cast
6e4da084 1277 of a valid gimple value, then we cannot use the result. */
cbdd87d4 1278 if (is_gimple_val (new_val)
3dbe9454 1279 || (CONVERT_EXPR_P (new_val)
cbdd87d4
RG
1280 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1281 return new_val;
1282 }
1283 break;
1284
1285 case BUILT_IN_STRCPY:
1286 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1287 result = fold_builtin_strcpy (loc, callee,
1288 gimple_call_arg (stmt, 0),
1289 gimple_call_arg (stmt, 1),
1290 val[1]);
1291 break;
1292
1293 case BUILT_IN_STRNCPY:
1294 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1295 result = fold_builtin_strncpy (loc, callee,
1296 gimple_call_arg (stmt, 0),
1297 gimple_call_arg (stmt, 1),
1298 gimple_call_arg (stmt, 2),
1299 val[1]);
1300 break;
1301
1302 case BUILT_IN_FPUTS:
1303 if (nargs == 2)
1304 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1305 gimple_call_arg (stmt, 1),
1306 ignore, false, val[0]);
1307 break;
1308
1309 case BUILT_IN_FPUTS_UNLOCKED:
1310 if (nargs == 2)
1311 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 ignore, true, val[0]);
1314 break;
1315
1316 case BUILT_IN_MEMCPY_CHK:
1317 case BUILT_IN_MEMPCPY_CHK:
1318 case BUILT_IN_MEMMOVE_CHK:
1319 case BUILT_IN_MEMSET_CHK:
1320 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1321 result = fold_builtin_memory_chk (loc, callee,
1322 gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 gimple_call_arg (stmt, 2),
1325 gimple_call_arg (stmt, 3),
1326 val[2], ignore,
1327 DECL_FUNCTION_CODE (callee));
1328 break;
1329
1330 case BUILT_IN_STRCPY_CHK:
1331 case BUILT_IN_STPCPY_CHK:
1332 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1333 result = fold_builtin_stxcpy_chk (loc, callee,
1334 gimple_call_arg (stmt, 0),
1335 gimple_call_arg (stmt, 1),
1336 gimple_call_arg (stmt, 2),
1337 val[1], ignore,
1338 DECL_FUNCTION_CODE (callee));
1339 break;
1340
1341 case BUILT_IN_STRNCPY_CHK:
1342 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1343 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1344 gimple_call_arg (stmt, 1),
1345 gimple_call_arg (stmt, 2),
1346 gimple_call_arg (stmt, 3),
1347 val[2]);
1348 break;
1349
1350 case BUILT_IN_SNPRINTF_CHK:
1351 case BUILT_IN_VSNPRINTF_CHK:
1352 if (val[1] && is_gimple_val (val[1]))
1353 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1354 DECL_FUNCTION_CODE (callee));
1355 break;
1356
1357 default:
1358 gcc_unreachable ();
1359 }
1360
1361 if (result && ignore)
1362 result = fold_ignored_result (result);
1363 return result;
1364}
1365
ceeffab0
MJ
1366/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1ae6fe9b
MJ
1373
1374tree
ceeffab0
MJ
1375gimple_get_virt_mehtod_for_binfo (HOST_WIDE_INT token, tree known_binfo,
1376 tree *delta, bool refuse_thunks)
1ae6fe9b
MJ
1377{
1378 HOST_WIDE_INT i;
ceeffab0
MJ
1379 tree v, fndecl;
1380 struct cgraph_node *node;
1ae6fe9b
MJ
1381
1382 v = BINFO_VIRTUALS (known_binfo);
ceeffab0
MJ
1383 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1384 if (!v)
1385 return NULL_TREE;
1ae6fe9b
MJ
1386 i = 0;
1387 while (i != token)
1388 {
1389 i += (TARGET_VTABLE_USES_DESCRIPTORS
1390 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1391 v = TREE_CHAIN (v);
1392 }
1393
1394 fndecl = TREE_VALUE (v);
ceeffab0
MJ
1395 node = cgraph_get_node_or_alias (fndecl);
1396 if (refuse_thunks
1397 && (!node
1398 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1399 thunks are represented by a constant in TREE_PURPOSE of items in
1400 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1401 yet.
1402
1403 FIXME: Remove the following condition once we are able to represent
1404 thunk information on call graph edges. */
1405 || (node->same_body_alias && node->thunk.thunk_p)))
1406 return NULL_TREE;
3f1f0ae3 1407
6e4da084
JH
1408 /* When cgraph node is missing and function is not public, we cannot
1409 devirtualize. This can happen in WHOPR when the actual method
1410 ends up in other partition, because we found devirtualization
1411 possibility too late. */
ceeffab0
MJ
1412 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1413 return NULL_TREE;
1414
1415 *delta = TREE_PURPOSE (v);
1416 gcc_checking_assert (host_integerp (*delta, 0));
1417 return fndecl;
1ae6fe9b
MJ
1418}
1419
ceeffab0
MJ
1420/* Generate code adjusting the first parameter of a call statement determined
1421 by GSI by DELTA. */
1422
1423void
1424gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1425{
1426 gimple call_stmt = gsi_stmt (*gsi);
1427 tree parm, tmp;
1428 gimple new_stmt;
1429
1430 delta = fold_convert (sizetype, delta);
1431 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1432 parm = gimple_call_arg (call_stmt, 0);
1433 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1434 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1435 add_referenced_var (tmp);
1436
1437 tmp = make_ssa_name (tmp, NULL);
1438 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1439 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1440 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1441 gimple_call_set_arg (call_stmt, 0, tmp);
1442}
1ae6fe9b 1443
cbdd87d4
RG
1444/* Attempt to fold a call statement referenced by the statement iterator GSI.
1445 The statement may be replaced by another statement, e.g., if the call
1446 simplifies to a constant value. Return true if any changes were made.
1447 It is assumed that the operands have been previously folded. */
1448
ceeffab0
MJ
1449bool
1450gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
1451{
1452 gimple stmt = gsi_stmt (*gsi);
1453
1454 tree callee = gimple_call_fndecl (stmt);
1455
1456 /* Check for builtins that CCP can handle using information not
1457 available in the generic fold routines. */
23c1da7a 1458 if (!inplace && callee && DECL_BUILT_IN (callee))
cbdd87d4
RG
1459 {
1460 tree result = gimple_fold_builtin (stmt);
1461
1462 if (result)
1463 {
1464 if (!update_call_from_tree (gsi, result))
1465 gimplify_and_update_call_from_tree (gsi, result);
1466 return true;
1467 }
1468 }
cbdd87d4
RG
1469 return false;
1470}
1471
1472/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1473 distinguishes both cases. */
1474
1475static bool
1476fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1477{
1478 bool changed = false;
1479 gimple stmt = gsi_stmt (*gsi);
1480 unsigned i;
1481
1482 /* Fold the main computation performed by the statement. */
1483 switch (gimple_code (stmt))
1484 {
1485 case GIMPLE_ASSIGN:
1486 {
1487 unsigned old_num_ops = gimple_num_ops (stmt);
1488 tree new_rhs = fold_gimple_assign (gsi);
1489 tree lhs = gimple_assign_lhs (stmt);
1490 if (new_rhs
1491 && !useless_type_conversion_p (TREE_TYPE (lhs),
1492 TREE_TYPE (new_rhs)))
1493 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1494 if (new_rhs
1495 && (!inplace
1496 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1497 {
1498 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1499 changed = true;
1500 }
1501 break;
1502 }
1503
1504 case GIMPLE_COND:
1505 changed |= fold_gimple_cond (stmt);
1506 break;
1507
1508 case GIMPLE_CALL:
1509 /* Fold *& in call arguments. */
1510 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1511 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1512 {
1513 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1514 if (tmp)
1515 {
1516 gimple_call_set_arg (stmt, i, tmp);
1517 changed = true;
1518 }
1519 }
ceeffab0 1520 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
1521 break;
1522
1523 case GIMPLE_ASM:
1524 /* Fold *& in asm operands. */
1525 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1526 {
1527 tree link = gimple_asm_output_op (stmt, i);
1528 tree op = TREE_VALUE (link);
1529 if (REFERENCE_CLASS_P (op)
1530 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1531 {
1532 TREE_VALUE (link) = op;
1533 changed = true;
1534 }
1535 }
1536 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1537 {
1538 tree link = gimple_asm_input_op (stmt, i);
1539 tree op = TREE_VALUE (link);
1540 if (REFERENCE_CLASS_P (op)
1541 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1542 {
1543 TREE_VALUE (link) = op;
1544 changed = true;
1545 }
1546 }
1547 break;
1548
bd422c4a
RG
1549 case GIMPLE_DEBUG:
1550 if (gimple_debug_bind_p (stmt))
1551 {
1552 tree val = gimple_debug_bind_get_value (stmt);
1553 if (val
1554 && REFERENCE_CLASS_P (val))
1555 {
1556 tree tem = maybe_fold_reference (val, false);
1557 if (tem)
1558 {
1559 gimple_debug_bind_set_value (stmt, tem);
1560 changed = true;
1561 }
1562 }
1563 }
1564 break;
1565
cbdd87d4
RG
1566 default:;
1567 }
1568
1569 stmt = gsi_stmt (*gsi);
1570
1571 /* Fold *& on the lhs. */
1572 if (gimple_has_lhs (stmt))
1573 {
1574 tree lhs = gimple_get_lhs (stmt);
1575 if (lhs && REFERENCE_CLASS_P (lhs))
1576 {
1577 tree new_lhs = maybe_fold_reference (lhs, true);
1578 if (new_lhs)
1579 {
1580 gimple_set_lhs (stmt, new_lhs);
1581 changed = true;
1582 }
1583 }
1584 }
1585
1586 return changed;
1587}
1588
1589/* Fold the statement pointed to by GSI. In some cases, this function may
1590 replace the whole statement with a new one. Returns true iff folding
1591 makes any changes.
1592 The statement pointed to by GSI should be in valid gimple form but may
1593 be in unfolded state as resulting from for example constant propagation
1594 which can produce *&x = 0. */
1595
1596bool
1597fold_stmt (gimple_stmt_iterator *gsi)
1598{
1599 return fold_stmt_1 (gsi, false);
1600}
1601
1602/* Perform the minimal folding on statement STMT. Only operations like
1603 *&x created by constant propagation are handled. The statement cannot
1604 be replaced with a new one. Return true if the statement was
1605 changed, false otherwise.
1606 The statement STMT should be in valid gimple form but may
1607 be in unfolded state as resulting from for example constant propagation
1608 which can produce *&x = 0. */
1609
1610bool
1611fold_stmt_inplace (gimple stmt)
1612{
1613 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1614 bool changed = fold_stmt_1 (&gsi, true);
1615 gcc_assert (gsi_stmt (gsi) == stmt);
1616 return changed;
1617}
1618
e89065a1
SL
1619/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1620 if EXPR is null or we don't know how.
1621 If non-null, the result always has boolean type. */
1622
1623static tree
1624canonicalize_bool (tree expr, bool invert)
1625{
1626 if (!expr)
1627 return NULL_TREE;
1628 else if (invert)
1629 {
1630 if (integer_nonzerop (expr))
1631 return boolean_false_node;
1632 else if (integer_zerop (expr))
1633 return boolean_true_node;
1634 else if (TREE_CODE (expr) == SSA_NAME)
1635 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1636 build_int_cst (TREE_TYPE (expr), 0));
1637 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1638 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1639 boolean_type_node,
1640 TREE_OPERAND (expr, 0),
1641 TREE_OPERAND (expr, 1));
1642 else
1643 return NULL_TREE;
1644 }
1645 else
1646 {
1647 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1648 return expr;
1649 if (integer_nonzerop (expr))
1650 return boolean_true_node;
1651 else if (integer_zerop (expr))
1652 return boolean_false_node;
1653 else if (TREE_CODE (expr) == SSA_NAME)
1654 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1655 build_int_cst (TREE_TYPE (expr), 0));
1656 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1657 return fold_build2 (TREE_CODE (expr),
1658 boolean_type_node,
1659 TREE_OPERAND (expr, 0),
1660 TREE_OPERAND (expr, 1));
1661 else
1662 return NULL_TREE;
1663 }
1664}
1665
1666/* Check to see if a boolean expression EXPR is logically equivalent to the
1667 comparison (OP1 CODE OP2). Check for various identities involving
1668 SSA_NAMEs. */
1669
1670static bool
1671same_bool_comparison_p (const_tree expr, enum tree_code code,
1672 const_tree op1, const_tree op2)
1673{
1674 gimple s;
1675
1676 /* The obvious case. */
1677 if (TREE_CODE (expr) == code
1678 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1679 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1680 return true;
1681
1682 /* Check for comparing (name, name != 0) and the case where expr
1683 is an SSA_NAME with a definition matching the comparison. */
1684 if (TREE_CODE (expr) == SSA_NAME
1685 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1686 {
1687 if (operand_equal_p (expr, op1, 0))
1688 return ((code == NE_EXPR && integer_zerop (op2))
1689 || (code == EQ_EXPR && integer_nonzerop (op2)));
1690 s = SSA_NAME_DEF_STMT (expr);
1691 if (is_gimple_assign (s)
1692 && gimple_assign_rhs_code (s) == code
1693 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1694 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1695 return true;
1696 }
1697
1698 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1699 of name is a comparison, recurse. */
1700 if (TREE_CODE (op1) == SSA_NAME
1701 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1702 {
1703 s = SSA_NAME_DEF_STMT (op1);
1704 if (is_gimple_assign (s)
1705 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1706 {
1707 enum tree_code c = gimple_assign_rhs_code (s);
1708 if ((c == NE_EXPR && integer_zerop (op2))
1709 || (c == EQ_EXPR && integer_nonzerop (op2)))
1710 return same_bool_comparison_p (expr, c,
1711 gimple_assign_rhs1 (s),
1712 gimple_assign_rhs2 (s));
1713 if ((c == EQ_EXPR && integer_zerop (op2))
1714 || (c == NE_EXPR && integer_nonzerop (op2)))
1715 return same_bool_comparison_p (expr,
1716 invert_tree_comparison (c, false),
1717 gimple_assign_rhs1 (s),
1718 gimple_assign_rhs2 (s));
1719 }
1720 }
1721 return false;
1722}
1723
1724/* Check to see if two boolean expressions OP1 and OP2 are logically
1725 equivalent. */
1726
1727static bool
1728same_bool_result_p (const_tree op1, const_tree op2)
1729{
1730 /* Simple cases first. */
1731 if (operand_equal_p (op1, op2, 0))
1732 return true;
1733
1734 /* Check the cases where at least one of the operands is a comparison.
1735 These are a bit smarter than operand_equal_p in that they apply some
1736 identifies on SSA_NAMEs. */
1737 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1738 && same_bool_comparison_p (op1, TREE_CODE (op2),
1739 TREE_OPERAND (op2, 0),
1740 TREE_OPERAND (op2, 1)))
1741 return true;
1742 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1743 && same_bool_comparison_p (op2, TREE_CODE (op1),
1744 TREE_OPERAND (op1, 0),
1745 TREE_OPERAND (op1, 1)))
1746 return true;
1747
1748 /* Default case. */
1749 return false;
1750}
1751
1752/* Forward declarations for some mutually recursive functions. */
1753
1754static tree
1755and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1756 enum tree_code code2, tree op2a, tree op2b);
1757static tree
1758and_var_with_comparison (tree var, bool invert,
1759 enum tree_code code2, tree op2a, tree op2b);
1760static tree
1761and_var_with_comparison_1 (gimple stmt,
1762 enum tree_code code2, tree op2a, tree op2b);
1763static tree
1764or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1765 enum tree_code code2, tree op2a, tree op2b);
1766static tree
1767or_var_with_comparison (tree var, bool invert,
1768 enum tree_code code2, tree op2a, tree op2b);
1769static tree
1770or_var_with_comparison_1 (gimple stmt,
1771 enum tree_code code2, tree op2a, tree op2b);
1772
1773/* Helper function for and_comparisons_1: try to simplify the AND of the
1774 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1775 If INVERT is true, invert the value of the VAR before doing the AND.
1776 Return NULL_EXPR if we can't simplify this to a single expression. */
1777
1778static tree
1779and_var_with_comparison (tree var, bool invert,
1780 enum tree_code code2, tree op2a, tree op2b)
1781{
1782 tree t;
1783 gimple stmt = SSA_NAME_DEF_STMT (var);
1784
1785 /* We can only deal with variables whose definitions are assignments. */
1786 if (!is_gimple_assign (stmt))
1787 return NULL_TREE;
1788
1789 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1790 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1791 Then we only have to consider the simpler non-inverted cases. */
1792 if (invert)
1793 t = or_var_with_comparison_1 (stmt,
1794 invert_tree_comparison (code2, false),
1795 op2a, op2b);
1796 else
1797 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1798 return canonicalize_bool (t, invert);
1799}
1800
1801/* Try to simplify the AND of the ssa variable defined by the assignment
1802 STMT with the comparison specified by (OP2A CODE2 OP2B).
1803 Return NULL_EXPR if we can't simplify this to a single expression. */
1804
1805static tree
1806and_var_with_comparison_1 (gimple stmt,
1807 enum tree_code code2, tree op2a, tree op2b)
1808{
1809 tree var = gimple_assign_lhs (stmt);
1810 tree true_test_var = NULL_TREE;
1811 tree false_test_var = NULL_TREE;
1812 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1813
1814 /* Check for identities like (var AND (var == 0)) => false. */
1815 if (TREE_CODE (op2a) == SSA_NAME
1816 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1817 {
1818 if ((code2 == NE_EXPR && integer_zerop (op2b))
1819 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1820 {
1821 true_test_var = op2a;
1822 if (var == true_test_var)
1823 return var;
1824 }
1825 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1826 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1827 {
1828 false_test_var = op2a;
1829 if (var == false_test_var)
1830 return boolean_false_node;
1831 }
1832 }
1833
1834 /* If the definition is a comparison, recurse on it. */
1835 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1836 {
1837 tree t = and_comparisons_1 (innercode,
1838 gimple_assign_rhs1 (stmt),
1839 gimple_assign_rhs2 (stmt),
1840 code2,
1841 op2a,
1842 op2b);
1843 if (t)
1844 return t;
1845 }
1846
1847 /* If the definition is an AND or OR expression, we may be able to
1848 simplify by reassociating. */
1849 if (innercode == TRUTH_AND_EXPR
1850 || innercode == TRUTH_OR_EXPR
1851 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1852 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1853 {
1854 tree inner1 = gimple_assign_rhs1 (stmt);
1855 tree inner2 = gimple_assign_rhs2 (stmt);
1856 gimple s;
1857 tree t;
1858 tree partial = NULL_TREE;
1859 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1860
1861 /* Check for boolean identities that don't require recursive examination
1862 of inner1/inner2:
1863 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1864 inner1 AND (inner1 OR inner2) => inner1
1865 !inner1 AND (inner1 AND inner2) => false
1866 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1867 Likewise for similar cases involving inner2. */
1868 if (inner1 == true_test_var)
1869 return (is_and ? var : inner1);
1870 else if (inner2 == true_test_var)
1871 return (is_and ? var : inner2);
1872 else if (inner1 == false_test_var)
1873 return (is_and
1874 ? boolean_false_node
1875 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1876 else if (inner2 == false_test_var)
1877 return (is_and
1878 ? boolean_false_node
1879 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1880
1881 /* Next, redistribute/reassociate the AND across the inner tests.
1882 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1883 if (TREE_CODE (inner1) == SSA_NAME
1884 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1885 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1886 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1887 gimple_assign_rhs1 (s),
1888 gimple_assign_rhs2 (s),
1889 code2, op2a, op2b)))
1890 {
1891 /* Handle the AND case, where we are reassociating:
1892 (inner1 AND inner2) AND (op2a code2 op2b)
1893 => (t AND inner2)
1894 If the partial result t is a constant, we win. Otherwise
1895 continue on to try reassociating with the other inner test. */
1896 if (is_and)
1897 {
1898 if (integer_onep (t))
1899 return inner2;
1900 else if (integer_zerop (t))
1901 return boolean_false_node;
1902 }
1903
1904 /* Handle the OR case, where we are redistributing:
1905 (inner1 OR inner2) AND (op2a code2 op2b)
1906 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
1907 else if (integer_onep (t))
1908 return boolean_true_node;
1909
1910 /* Save partial result for later. */
1911 partial = t;
e89065a1
SL
1912 }
1913
1914 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1915 if (TREE_CODE (inner2) == SSA_NAME
1916 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1917 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1918 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1919 gimple_assign_rhs1 (s),
1920 gimple_assign_rhs2 (s),
1921 code2, op2a, op2b)))
1922 {
1923 /* Handle the AND case, where we are reassociating:
1924 (inner1 AND inner2) AND (op2a code2 op2b)
1925 => (inner1 AND t) */
1926 if (is_and)
1927 {
1928 if (integer_onep (t))
1929 return inner1;
1930 else if (integer_zerop (t))
1931 return boolean_false_node;
8236c8eb
JJ
1932 /* If both are the same, we can apply the identity
1933 (x AND x) == x. */
1934 else if (partial && same_bool_result_p (t, partial))
1935 return t;
e89065a1
SL
1936 }
1937
1938 /* Handle the OR case. where we are redistributing:
1939 (inner1 OR inner2) AND (op2a code2 op2b)
1940 => (t OR (inner1 AND (op2a code2 op2b)))
1941 => (t OR partial) */
1942 else
1943 {
1944 if (integer_onep (t))
1945 return boolean_true_node;
1946 else if (partial)
1947 {
1948 /* We already got a simplification for the other
1949 operand to the redistributed OR expression. The
1950 interesting case is when at least one is false.
1951 Or, if both are the same, we can apply the identity
1952 (x OR x) == x. */
1953 if (integer_zerop (partial))
1954 return t;
1955 else if (integer_zerop (t))
1956 return partial;
1957 else if (same_bool_result_p (t, partial))
1958 return t;
1959 }
1960 }
1961 }
1962 }
1963 return NULL_TREE;
1964}
1965
1966/* Try to simplify the AND of two comparisons defined by
1967 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1968 If this can be done without constructing an intermediate value,
1969 return the resulting tree; otherwise NULL_TREE is returned.
1970 This function is deliberately asymmetric as it recurses on SSA_DEFs
1971 in the first comparison but not the second. */
1972
1973static tree
1974and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1975 enum tree_code code2, tree op2a, tree op2b)
1976{
1977 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1978 if (operand_equal_p (op1a, op2a, 0)
1979 && operand_equal_p (op1b, op2b, 0))
1980 {
1981 tree t = combine_comparisons (UNKNOWN_LOCATION,
1982 TRUTH_ANDIF_EXPR, code1, code2,
1983 boolean_type_node, op1a, op1b);
1984 if (t)
1985 return t;
1986 }
1987
1988 /* Likewise the swapped case of the above. */
1989 if (operand_equal_p (op1a, op2b, 0)
1990 && operand_equal_p (op1b, op2a, 0))
1991 {
1992 tree t = combine_comparisons (UNKNOWN_LOCATION,
1993 TRUTH_ANDIF_EXPR, code1,
1994 swap_tree_comparison (code2),
1995 boolean_type_node, op1a, op1b);
1996 if (t)
1997 return t;
1998 }
1999
2000 /* If both comparisons are of the same value against constants, we might
2001 be able to merge them. */
2002 if (operand_equal_p (op1a, op2a, 0)
2003 && TREE_CODE (op1b) == INTEGER_CST
2004 && TREE_CODE (op2b) == INTEGER_CST)
2005 {
2006 int cmp = tree_int_cst_compare (op1b, op2b);
2007
2008 /* If we have (op1a == op1b), we should either be able to
2009 return that or FALSE, depending on whether the constant op1b
2010 also satisfies the other comparison against op2b. */
2011 if (code1 == EQ_EXPR)
2012 {
2013 bool done = true;
2014 bool val;
2015 switch (code2)
2016 {
2017 case EQ_EXPR: val = (cmp == 0); break;
2018 case NE_EXPR: val = (cmp != 0); break;
2019 case LT_EXPR: val = (cmp < 0); break;
2020 case GT_EXPR: val = (cmp > 0); break;
2021 case LE_EXPR: val = (cmp <= 0); break;
2022 case GE_EXPR: val = (cmp >= 0); break;
2023 default: done = false;
2024 }
2025 if (done)
2026 {
2027 if (val)
2028 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2029 else
2030 return boolean_false_node;
2031 }
2032 }
2033 /* Likewise if the second comparison is an == comparison. */
2034 else if (code2 == EQ_EXPR)
2035 {
2036 bool done = true;
2037 bool val;
2038 switch (code1)
2039 {
2040 case EQ_EXPR: val = (cmp == 0); break;
2041 case NE_EXPR: val = (cmp != 0); break;
2042 case LT_EXPR: val = (cmp > 0); break;
2043 case GT_EXPR: val = (cmp < 0); break;
2044 case LE_EXPR: val = (cmp >= 0); break;
2045 case GE_EXPR: val = (cmp <= 0); break;
2046 default: done = false;
2047 }
2048 if (done)
2049 {
2050 if (val)
2051 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2052 else
2053 return boolean_false_node;
2054 }
2055 }
2056
2057 /* Same business with inequality tests. */
2058 else if (code1 == NE_EXPR)
2059 {
2060 bool val;
2061 switch (code2)
2062 {
2063 case EQ_EXPR: val = (cmp != 0); break;
2064 case NE_EXPR: val = (cmp == 0); break;
2065 case LT_EXPR: val = (cmp >= 0); break;
2066 case GT_EXPR: val = (cmp <= 0); break;
2067 case LE_EXPR: val = (cmp > 0); break;
2068 case GE_EXPR: val = (cmp < 0); break;
2069 default:
2070 val = false;
2071 }
2072 if (val)
2073 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2074 }
2075 else if (code2 == NE_EXPR)
2076 {
2077 bool val;
2078 switch (code1)
2079 {
2080 case EQ_EXPR: val = (cmp == 0); break;
2081 case NE_EXPR: val = (cmp != 0); break;
2082 case LT_EXPR: val = (cmp <= 0); break;
2083 case GT_EXPR: val = (cmp >= 0); break;
2084 case LE_EXPR: val = (cmp < 0); break;
2085 case GE_EXPR: val = (cmp > 0); break;
2086 default:
2087 val = false;
2088 }
2089 if (val)
2090 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2091 }
2092
2093 /* Chose the more restrictive of two < or <= comparisons. */
2094 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2095 && (code2 == LT_EXPR || code2 == LE_EXPR))
2096 {
2097 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2098 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2099 else
2100 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2101 }
2102
2103 /* Likewise chose the more restrictive of two > or >= comparisons. */
2104 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2105 && (code2 == GT_EXPR || code2 == GE_EXPR))
2106 {
2107 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2108 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2109 else
2110 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2111 }
2112
2113 /* Check for singleton ranges. */
2114 else if (cmp == 0
2115 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2116 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2117 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2118
2119 /* Check for disjoint ranges. */
2120 else if (cmp <= 0
2121 && (code1 == LT_EXPR || code1 == LE_EXPR)
2122 && (code2 == GT_EXPR || code2 == GE_EXPR))
2123 return boolean_false_node;
2124 else if (cmp >= 0
2125 && (code1 == GT_EXPR || code1 == GE_EXPR)
2126 && (code2 == LT_EXPR || code2 == LE_EXPR))
2127 return boolean_false_node;
2128 }
2129
2130 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2131 NAME's definition is a truth value. See if there are any simplifications
2132 that can be done against the NAME's definition. */
2133 if (TREE_CODE (op1a) == SSA_NAME
2134 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2135 && (integer_zerop (op1b) || integer_onep (op1b)))
2136 {
2137 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2138 || (code1 == NE_EXPR && integer_onep (op1b)));
2139 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2140 switch (gimple_code (stmt))
2141 {
2142 case GIMPLE_ASSIGN:
2143 /* Try to simplify by copy-propagating the definition. */
2144 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2145
2146 case GIMPLE_PHI:
2147 /* If every argument to the PHI produces the same result when
2148 ANDed with the second comparison, we win.
2149 Do not do this unless the type is bool since we need a bool
2150 result here anyway. */
2151 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2152 {
2153 tree result = NULL_TREE;
2154 unsigned i;
2155 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2156 {
2157 tree arg = gimple_phi_arg_def (stmt, i);
2158
2159 /* If this PHI has itself as an argument, ignore it.
2160 If all the other args produce the same result,
2161 we're still OK. */
2162 if (arg == gimple_phi_result (stmt))
2163 continue;
2164 else if (TREE_CODE (arg) == INTEGER_CST)
2165 {
2166 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2167 {
2168 if (!result)
2169 result = boolean_false_node;
2170 else if (!integer_zerop (result))
2171 return NULL_TREE;
2172 }
2173 else if (!result)
2174 result = fold_build2 (code2, boolean_type_node,
2175 op2a, op2b);
2176 else if (!same_bool_comparison_p (result,
2177 code2, op2a, op2b))
2178 return NULL_TREE;
2179 }
2180 else if (TREE_CODE (arg) == SSA_NAME)
2181 {
2182 tree temp = and_var_with_comparison (arg, invert,
2183 code2, op2a, op2b);
2184 if (!temp)
2185 return NULL_TREE;
2186 else if (!result)
2187 result = temp;
2188 else if (!same_bool_result_p (result, temp))
2189 return NULL_TREE;
2190 }
2191 else
2192 return NULL_TREE;
2193 }
2194 return result;
2195 }
2196
2197 default:
2198 break;
2199 }
2200 }
2201 return NULL_TREE;
2202}
2203
2204/* Try to simplify the AND of two comparisons, specified by
2205 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2206 If this can be simplified to a single expression (without requiring
2207 introducing more SSA variables to hold intermediate values),
2208 return the resulting tree. Otherwise return NULL_TREE.
2209 If the result expression is non-null, it has boolean type. */
2210
2211tree
2212maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2213 enum tree_code code2, tree op2a, tree op2b)
2214{
2215 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2216 if (t)
2217 return t;
2218 else
2219 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2220}
2221
2222/* Helper function for or_comparisons_1: try to simplify the OR of the
2223 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2224 If INVERT is true, invert the value of VAR before doing the OR.
2225 Return NULL_EXPR if we can't simplify this to a single expression. */
2226
2227static tree
2228or_var_with_comparison (tree var, bool invert,
2229 enum tree_code code2, tree op2a, tree op2b)
2230{
2231 tree t;
2232 gimple stmt = SSA_NAME_DEF_STMT (var);
2233
2234 /* We can only deal with variables whose definitions are assignments. */
2235 if (!is_gimple_assign (stmt))
2236 return NULL_TREE;
2237
2238 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2239 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2240 Then we only have to consider the simpler non-inverted cases. */
2241 if (invert)
2242 t = and_var_with_comparison_1 (stmt,
2243 invert_tree_comparison (code2, false),
2244 op2a, op2b);
2245 else
2246 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2247 return canonicalize_bool (t, invert);
2248}
2249
2250/* Try to simplify the OR of the ssa variable defined by the assignment
2251 STMT with the comparison specified by (OP2A CODE2 OP2B).
2252 Return NULL_EXPR if we can't simplify this to a single expression. */
2253
2254static tree
2255or_var_with_comparison_1 (gimple stmt,
2256 enum tree_code code2, tree op2a, tree op2b)
2257{
2258 tree var = gimple_assign_lhs (stmt);
2259 tree true_test_var = NULL_TREE;
2260 tree false_test_var = NULL_TREE;
2261 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2262
2263 /* Check for identities like (var OR (var != 0)) => true . */
2264 if (TREE_CODE (op2a) == SSA_NAME
2265 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2266 {
2267 if ((code2 == NE_EXPR && integer_zerop (op2b))
2268 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2269 {
2270 true_test_var = op2a;
2271 if (var == true_test_var)
2272 return var;
2273 }
2274 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2275 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2276 {
2277 false_test_var = op2a;
2278 if (var == false_test_var)
2279 return boolean_true_node;
2280 }
2281 }
2282
2283 /* If the definition is a comparison, recurse on it. */
2284 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2285 {
2286 tree t = or_comparisons_1 (innercode,
2287 gimple_assign_rhs1 (stmt),
2288 gimple_assign_rhs2 (stmt),
2289 code2,
2290 op2a,
2291 op2b);
2292 if (t)
2293 return t;
2294 }
2295
2296 /* If the definition is an AND or OR expression, we may be able to
2297 simplify by reassociating. */
2298 if (innercode == TRUTH_AND_EXPR
2299 || innercode == TRUTH_OR_EXPR
2300 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2301 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2302 {
2303 tree inner1 = gimple_assign_rhs1 (stmt);
2304 tree inner2 = gimple_assign_rhs2 (stmt);
2305 gimple s;
2306 tree t;
2307 tree partial = NULL_TREE;
2308 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2309
2310 /* Check for boolean identities that don't require recursive examination
2311 of inner1/inner2:
2312 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2313 inner1 OR (inner1 AND inner2) => inner1
2314 !inner1 OR (inner1 OR inner2) => true
2315 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2316 */
2317 if (inner1 == true_test_var)
2318 return (is_or ? var : inner1);
2319 else if (inner2 == true_test_var)
2320 return (is_or ? var : inner2);
2321 else if (inner1 == false_test_var)
2322 return (is_or
2323 ? boolean_true_node
2324 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2325 else if (inner2 == false_test_var)
2326 return (is_or
2327 ? boolean_true_node
2328 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2329
2330 /* Next, redistribute/reassociate the OR across the inner tests.
2331 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2332 if (TREE_CODE (inner1) == SSA_NAME
2333 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2334 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2335 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2336 gimple_assign_rhs1 (s),
2337 gimple_assign_rhs2 (s),
2338 code2, op2a, op2b)))
2339 {
2340 /* Handle the OR case, where we are reassociating:
2341 (inner1 OR inner2) OR (op2a code2 op2b)
2342 => (t OR inner2)
2343 If the partial result t is a constant, we win. Otherwise
2344 continue on to try reassociating with the other inner test. */
8236c8eb 2345 if (is_or)
e89065a1
SL
2346 {
2347 if (integer_onep (t))
2348 return boolean_true_node;
2349 else if (integer_zerop (t))
2350 return inner2;
2351 }
2352
2353 /* Handle the AND case, where we are redistributing:
2354 (inner1 AND inner2) OR (op2a code2 op2b)
2355 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
2356 else if (integer_zerop (t))
2357 return boolean_false_node;
2358
2359 /* Save partial result for later. */
2360 partial = t;
e89065a1
SL
2361 }
2362
2363 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2364 if (TREE_CODE (inner2) == SSA_NAME
2365 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2366 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2367 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2368 gimple_assign_rhs1 (s),
2369 gimple_assign_rhs2 (s),
2370 code2, op2a, op2b)))
2371 {
2372 /* Handle the OR case, where we are reassociating:
2373 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
2374 => (inner1 OR t)
2375 => (t OR partial) */
2376 if (is_or)
e89065a1
SL
2377 {
2378 if (integer_zerop (t))
2379 return inner1;
2380 else if (integer_onep (t))
2381 return boolean_true_node;
8236c8eb
JJ
2382 /* If both are the same, we can apply the identity
2383 (x OR x) == x. */
2384 else if (partial && same_bool_result_p (t, partial))
2385 return t;
e89065a1
SL
2386 }
2387
2388 /* Handle the AND case, where we are redistributing:
2389 (inner1 AND inner2) OR (op2a code2 op2b)
2390 => (t AND (inner1 OR (op2a code2 op2b)))
2391 => (t AND partial) */
2392 else
2393 {
2394 if (integer_zerop (t))
2395 return boolean_false_node;
2396 else if (partial)
2397 {
2398 /* We already got a simplification for the other
2399 operand to the redistributed AND expression. The
2400 interesting case is when at least one is true.
2401 Or, if both are the same, we can apply the identity
8236c8eb 2402 (x AND x) == x. */
e89065a1
SL
2403 if (integer_onep (partial))
2404 return t;
2405 else if (integer_onep (t))
2406 return partial;
2407 else if (same_bool_result_p (t, partial))
8236c8eb 2408 return t;
e89065a1
SL
2409 }
2410 }
2411 }
2412 }
2413 return NULL_TREE;
2414}
2415
2416/* Try to simplify the OR of two comparisons defined by
2417 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2418 If this can be done without constructing an intermediate value,
2419 return the resulting tree; otherwise NULL_TREE is returned.
2420 This function is deliberately asymmetric as it recurses on SSA_DEFs
2421 in the first comparison but not the second. */
2422
2423static tree
2424or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2425 enum tree_code code2, tree op2a, tree op2b)
2426{
2427 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2428 if (operand_equal_p (op1a, op2a, 0)
2429 && operand_equal_p (op1b, op2b, 0))
2430 {
2431 tree t = combine_comparisons (UNKNOWN_LOCATION,
2432 TRUTH_ORIF_EXPR, code1, code2,
2433 boolean_type_node, op1a, op1b);
2434 if (t)
2435 return t;
2436 }
2437
2438 /* Likewise the swapped case of the above. */
2439 if (operand_equal_p (op1a, op2b, 0)
2440 && operand_equal_p (op1b, op2a, 0))
2441 {
2442 tree t = combine_comparisons (UNKNOWN_LOCATION,
2443 TRUTH_ORIF_EXPR, code1,
2444 swap_tree_comparison (code2),
2445 boolean_type_node, op1a, op1b);
2446 if (t)
2447 return t;
2448 }
2449
2450 /* If both comparisons are of the same value against constants, we might
2451 be able to merge them. */
2452 if (operand_equal_p (op1a, op2a, 0)
2453 && TREE_CODE (op1b) == INTEGER_CST
2454 && TREE_CODE (op2b) == INTEGER_CST)
2455 {
2456 int cmp = tree_int_cst_compare (op1b, op2b);
2457
2458 /* If we have (op1a != op1b), we should either be able to
2459 return that or TRUE, depending on whether the constant op1b
2460 also satisfies the other comparison against op2b. */
2461 if (code1 == NE_EXPR)
2462 {
2463 bool done = true;
2464 bool val;
2465 switch (code2)
2466 {
2467 case EQ_EXPR: val = (cmp == 0); break;
2468 case NE_EXPR: val = (cmp != 0); break;
2469 case LT_EXPR: val = (cmp < 0); break;
2470 case GT_EXPR: val = (cmp > 0); break;
2471 case LE_EXPR: val = (cmp <= 0); break;
2472 case GE_EXPR: val = (cmp >= 0); break;
2473 default: done = false;
2474 }
2475 if (done)
2476 {
2477 if (val)
2478 return boolean_true_node;
2479 else
2480 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2481 }
2482 }
2483 /* Likewise if the second comparison is a != comparison. */
2484 else if (code2 == NE_EXPR)
2485 {
2486 bool done = true;
2487 bool val;
2488 switch (code1)
2489 {
2490 case EQ_EXPR: val = (cmp == 0); break;
2491 case NE_EXPR: val = (cmp != 0); break;
2492 case LT_EXPR: val = (cmp > 0); break;
2493 case GT_EXPR: val = (cmp < 0); break;
2494 case LE_EXPR: val = (cmp >= 0); break;
2495 case GE_EXPR: val = (cmp <= 0); break;
2496 default: done = false;
2497 }
2498 if (done)
2499 {
2500 if (val)
2501 return boolean_true_node;
2502 else
2503 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2504 }
2505 }
2506
2507 /* See if an equality test is redundant with the other comparison. */
2508 else if (code1 == EQ_EXPR)
2509 {
2510 bool val;
2511 switch (code2)
2512 {
2513 case EQ_EXPR: val = (cmp == 0); break;
2514 case NE_EXPR: val = (cmp != 0); break;
2515 case LT_EXPR: val = (cmp < 0); break;
2516 case GT_EXPR: val = (cmp > 0); break;
2517 case LE_EXPR: val = (cmp <= 0); break;
2518 case GE_EXPR: val = (cmp >= 0); break;
2519 default:
2520 val = false;
2521 }
2522 if (val)
2523 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2524 }
2525 else if (code2 == EQ_EXPR)
2526 {
2527 bool val;
2528 switch (code1)
2529 {
2530 case EQ_EXPR: val = (cmp == 0); break;
2531 case NE_EXPR: val = (cmp != 0); break;
2532 case LT_EXPR: val = (cmp > 0); break;
2533 case GT_EXPR: val = (cmp < 0); break;
2534 case LE_EXPR: val = (cmp >= 0); break;
2535 case GE_EXPR: val = (cmp <= 0); break;
2536 default:
2537 val = false;
2538 }
2539 if (val)
2540 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2541 }
2542
2543 /* Chose the less restrictive of two < or <= comparisons. */
2544 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2545 && (code2 == LT_EXPR || code2 == LE_EXPR))
2546 {
2547 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2548 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2549 else
2550 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2551 }
2552
2553 /* Likewise chose the less restrictive of two > or >= comparisons. */
2554 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2555 && (code2 == GT_EXPR || code2 == GE_EXPR))
2556 {
2557 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2558 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2559 else
2560 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2561 }
2562
2563 /* Check for singleton ranges. */
2564 else if (cmp == 0
2565 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2566 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2567 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2568
2569 /* Check for less/greater pairs that don't restrict the range at all. */
2570 else if (cmp >= 0
2571 && (code1 == LT_EXPR || code1 == LE_EXPR)
2572 && (code2 == GT_EXPR || code2 == GE_EXPR))
2573 return boolean_true_node;
2574 else if (cmp <= 0
2575 && (code1 == GT_EXPR || code1 == GE_EXPR)
2576 && (code2 == LT_EXPR || code2 == LE_EXPR))
2577 return boolean_true_node;
2578 }
2579
2580 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2581 NAME's definition is a truth value. See if there are any simplifications
2582 that can be done against the NAME's definition. */
2583 if (TREE_CODE (op1a) == SSA_NAME
2584 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2585 && (integer_zerop (op1b) || integer_onep (op1b)))
2586 {
2587 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2588 || (code1 == NE_EXPR && integer_onep (op1b)));
2589 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2590 switch (gimple_code (stmt))
2591 {
2592 case GIMPLE_ASSIGN:
2593 /* Try to simplify by copy-propagating the definition. */
2594 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2595
2596 case GIMPLE_PHI:
2597 /* If every argument to the PHI produces the same result when
2598 ORed with the second comparison, we win.
2599 Do not do this unless the type is bool since we need a bool
2600 result here anyway. */
2601 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2602 {
2603 tree result = NULL_TREE;
2604 unsigned i;
2605 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2606 {
2607 tree arg = gimple_phi_arg_def (stmt, i);
2608
2609 /* If this PHI has itself as an argument, ignore it.
2610 If all the other args produce the same result,
2611 we're still OK. */
2612 if (arg == gimple_phi_result (stmt))
2613 continue;
2614 else if (TREE_CODE (arg) == INTEGER_CST)
2615 {
2616 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2617 {
2618 if (!result)
2619 result = boolean_true_node;
2620 else if (!integer_onep (result))
2621 return NULL_TREE;
2622 }
2623 else if (!result)
2624 result = fold_build2 (code2, boolean_type_node,
2625 op2a, op2b);
2626 else if (!same_bool_comparison_p (result,
2627 code2, op2a, op2b))
2628 return NULL_TREE;
2629 }
2630 else if (TREE_CODE (arg) == SSA_NAME)
2631 {
2632 tree temp = or_var_with_comparison (arg, invert,
2633 code2, op2a, op2b);
2634 if (!temp)
2635 return NULL_TREE;
2636 else if (!result)
2637 result = temp;
2638 else if (!same_bool_result_p (result, temp))
2639 return NULL_TREE;
2640 }
2641 else
2642 return NULL_TREE;
2643 }
2644 return result;
2645 }
2646
2647 default:
2648 break;
2649 }
2650 }
2651 return NULL_TREE;
2652}
2653
2654/* Try to simplify the OR of two comparisons, specified by
2655 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2656 If this can be simplified to a single expression (without requiring
2657 introducing more SSA variables to hold intermediate values),
2658 return the resulting tree. Otherwise return NULL_TREE.
2659 If the result expression is non-null, it has boolean type. */
2660
2661tree
2662maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2663 enum tree_code code2, tree op2a, tree op2b)
2664{
2665 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2666 if (t)
2667 return t;
2668 else
2669 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2670}
cfef45c8
RG
2671
2672
2673/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2674
2675 Either NULL_TREE, a simplified but non-constant or a constant
2676 is returned.
2677
2678 ??? This should go into a gimple-fold-inline.h file to be eventually
2679 privatized with the single valueize function used in the various TUs
2680 to avoid the indirect function call overhead. */
2681
2682tree
2683gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2684{
2685 location_t loc = gimple_location (stmt);
2686 switch (gimple_code (stmt))
2687 {
2688 case GIMPLE_ASSIGN:
2689 {
2690 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2691
2692 switch (get_gimple_rhs_class (subcode))
2693 {
2694 case GIMPLE_SINGLE_RHS:
2695 {
2696 tree rhs = gimple_assign_rhs1 (stmt);
2697 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2698
2699 if (TREE_CODE (rhs) == SSA_NAME)
2700 {
2701 /* If the RHS is an SSA_NAME, return its known constant value,
2702 if any. */
2703 return (*valueize) (rhs);
2704 }
2705 /* Handle propagating invariant addresses into address
2706 operations. */
2707 else if (TREE_CODE (rhs) == ADDR_EXPR
2708 && !is_gimple_min_invariant (rhs))
2709 {
2710 HOST_WIDE_INT offset;
2711 tree base;
2712 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2713 &offset,
2714 valueize);
2715 if (base
2716 && (CONSTANT_CLASS_P (base)
2717 || decl_address_invariant_p (base)))
2718 return build_invariant_address (TREE_TYPE (rhs),
2719 base, offset);
2720 }
2721 else if (TREE_CODE (rhs) == CONSTRUCTOR
2722 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2723 && (CONSTRUCTOR_NELTS (rhs)
2724 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2725 {
2726 unsigned i;
2727 tree val, list;
2728
2729 list = NULL_TREE;
2730 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2731 {
2732 val = (*valueize) (val);
2733 if (TREE_CODE (val) == INTEGER_CST
2734 || TREE_CODE (val) == REAL_CST
2735 || TREE_CODE (val) == FIXED_CST)
2736 list = tree_cons (NULL_TREE, val, list);
2737 else
2738 return NULL_TREE;
2739 }
2740
2741 return build_vector (TREE_TYPE (rhs), nreverse (list));
2742 }
2743
2744 if (kind == tcc_reference)
2745 {
2746 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2747 || TREE_CODE (rhs) == REALPART_EXPR
2748 || TREE_CODE (rhs) == IMAGPART_EXPR)
2749 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2750 {
2751 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2752 return fold_unary_loc (EXPR_LOCATION (rhs),
2753 TREE_CODE (rhs),
2754 TREE_TYPE (rhs), val);
2755 }
2756 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2757 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2758 {
2759 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2760 return fold_ternary_loc (EXPR_LOCATION (rhs),
2761 TREE_CODE (rhs),
2762 TREE_TYPE (rhs), val,
2763 TREE_OPERAND (rhs, 1),
2764 TREE_OPERAND (rhs, 2));
2765 }
2766 else if (TREE_CODE (rhs) == MEM_REF
2767 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2768 {
2769 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2770 if (TREE_CODE (val) == ADDR_EXPR
2771 && is_gimple_min_invariant (val))
2772 {
2773 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2774 unshare_expr (val),
2775 TREE_OPERAND (rhs, 1));
2776 if (tem)
2777 rhs = tem;
2778 }
2779 }
2780 return fold_const_aggregate_ref_1 (rhs, valueize);
2781 }
2782 else if (kind == tcc_declaration)
2783 return get_symbol_constant_value (rhs);
2784 return rhs;
2785 }
2786
2787 case GIMPLE_UNARY_RHS:
2788 {
2789 /* Handle unary operators that can appear in GIMPLE form.
2790 Note that we know the single operand must be a constant,
2791 so this should almost always return a simplified RHS. */
2792 tree lhs = gimple_assign_lhs (stmt);
2793 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2794
2795 /* Conversions are useless for CCP purposes if they are
2796 value-preserving. Thus the restrictions that
2797 useless_type_conversion_p places for pointer type conversions
2798 do not apply here. Substitution later will only substitute to
2799 allowed places. */
2800 if (CONVERT_EXPR_CODE_P (subcode)
2801 && POINTER_TYPE_P (TREE_TYPE (lhs))
2802 && POINTER_TYPE_P (TREE_TYPE (op0)))
2803 {
2804 tree tem;
2805 /* Try to re-construct array references on-the-fly. */
2806 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2807 TREE_TYPE (op0))
2808 && ((tem = maybe_fold_offset_to_address
2809 (loc,
2810 op0, integer_zero_node, TREE_TYPE (lhs)))
2811 != NULL_TREE))
2812 return tem;
2813 return op0;
2814 }
2815
2816 return
2817 fold_unary_ignore_overflow_loc (loc, subcode,
2818 gimple_expr_type (stmt), op0);
2819 }
2820
2821 case GIMPLE_BINARY_RHS:
2822 {
2823 /* Handle binary operators that can appear in GIMPLE form. */
2824 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2825 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2826
2827 /* Translate &x + CST into an invariant form suitable for
2828 further propagation. */
2829 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2830 && TREE_CODE (op0) == ADDR_EXPR
2831 && TREE_CODE (op1) == INTEGER_CST)
2832 {
2833 tree off = fold_convert (ptr_type_node, op1);
2834 return build_fold_addr_expr
2835 (fold_build2 (MEM_REF,
2836 TREE_TYPE (TREE_TYPE (op0)),
2837 unshare_expr (op0), off));
2838 }
2839
2840 return fold_binary_loc (loc, subcode,
2841 gimple_expr_type (stmt), op0, op1);
2842 }
2843
2844 case GIMPLE_TERNARY_RHS:
2845 {
2846 /* Handle ternary operators that can appear in GIMPLE form. */
2847 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2848 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2849 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2850
2851 return fold_ternary_loc (loc, subcode,
2852 gimple_expr_type (stmt), op0, op1, op2);
2853 }
2854
2855 default:
2856 gcc_unreachable ();
2857 }
2858 }
2859
2860 case GIMPLE_CALL:
2861 {
2862 tree fn = (*valueize) (gimple_call_fn (stmt));
2863 if (TREE_CODE (fn) == ADDR_EXPR
2864 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2865 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2866 {
2867 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2868 tree call, retval;
2869 unsigned i;
2870 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2871 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2872 call = build_call_array_loc (loc,
2873 gimple_call_return_type (stmt),
2874 fn, gimple_call_num_args (stmt), args);
2875 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2876 if (retval)
2877 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2878 STRIP_NOPS (retval);
2879 return retval;
2880 }
2881 return NULL_TREE;
2882 }
2883
2884 default:
2885 return NULL_TREE;
2886 }
2887}
2888
2889/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2890 Returns NULL_TREE if folding to a constant is not possible, otherwise
2891 returns a constant according to is_gimple_min_invariant. */
2892
2893tree
2894gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2895{
2896 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2897 if (res && is_gimple_min_invariant (res))
2898 return res;
2899 return NULL_TREE;
2900}
2901
2902
2903/* The following set of functions are supposed to fold references using
2904 their constant initializers. */
2905
2906static tree fold_ctor_reference (tree type, tree ctor,
2907 unsigned HOST_WIDE_INT offset,
2908 unsigned HOST_WIDE_INT size);
2909
2910/* See if we can find constructor defining value of BASE.
2911 When we know the consructor with constant offset (such as
2912 base is array[40] and we do know constructor of array), then
2913 BIT_OFFSET is adjusted accordingly.
2914
2915 As a special case, return error_mark_node when constructor
2916 is not explicitly available, but it is known to be zero
2917 such as 'static const int a;'. */
2918static tree
2919get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2920 tree (*valueize)(tree))
2921{
2922 HOST_WIDE_INT bit_offset2, size, max_size;
2923 if (TREE_CODE (base) == MEM_REF)
2924 {
2925 if (!integer_zerop (TREE_OPERAND (base, 1)))
2926 {
2927 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2928 return NULL_TREE;
2929 *bit_offset += (mem_ref_offset (base).low
2930 * BITS_PER_UNIT);
2931 }
2932
2933 if (valueize
2934 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2935 base = valueize (TREE_OPERAND (base, 0));
2936 if (!base || TREE_CODE (base) != ADDR_EXPR)
2937 return NULL_TREE;
2938 base = TREE_OPERAND (base, 0);
2939 }
2940
2941 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2942 DECL_INITIAL. If BASE is a nested reference into another
2943 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2944 the inner reference. */
2945 switch (TREE_CODE (base))
2946 {
2947 case VAR_DECL:
2948 if (!const_value_known_p (base))
2949 return NULL_TREE;
2950
2951 /* Fallthru. */
2952 case CONST_DECL:
2953 if (!DECL_INITIAL (base)
2954 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2955 return error_mark_node;
2956 return DECL_INITIAL (base);
2957
2958 case ARRAY_REF:
2959 case COMPONENT_REF:
2960 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2961 if (max_size == -1 || size != max_size)
2962 return NULL_TREE;
2963 *bit_offset += bit_offset2;
2964 return get_base_constructor (base, bit_offset, valueize);
2965
2966 case STRING_CST:
2967 case CONSTRUCTOR:
2968 return base;
2969
2970 default:
2971 return NULL_TREE;
2972 }
2973}
2974
2975/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2976 to the memory at bit OFFSET.
2977
2978 We do only simple job of folding byte accesses. */
2979
2980static tree
2981fold_string_cst_ctor_reference (tree type, tree ctor,
2982 unsigned HOST_WIDE_INT offset,
2983 unsigned HOST_WIDE_INT size)
2984{
2985 if (INTEGRAL_TYPE_P (type)
2986 && (TYPE_MODE (type)
2987 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2988 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2989 == MODE_INT)
2990 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2991 && size == BITS_PER_UNIT
2992 && !(offset % BITS_PER_UNIT))
2993 {
2994 offset /= BITS_PER_UNIT;
2995 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2996 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2997 [offset]));
2998 /* Folding
2999 const char a[20]="hello";
3000 return a[10];
3001
3002 might lead to offset greater than string length. In this case we
3003 know value is either initialized to 0 or out of bounds. Return 0
3004 in both cases. */
3005 return build_zero_cst (type);
3006 }
3007 return NULL_TREE;
3008}
3009
3010/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3011 SIZE to the memory at bit OFFSET. */
3012
3013static tree
3014fold_array_ctor_reference (tree type, tree ctor,
3015 unsigned HOST_WIDE_INT offset,
3016 unsigned HOST_WIDE_INT size)
3017{
3018 unsigned HOST_WIDE_INT cnt;
3019 tree cfield, cval;
3020 double_int low_bound, elt_size;
3021 double_int index, max_index;
3022 double_int access_index;
3023 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3024 HOST_WIDE_INT inner_offset;
3025
3026 /* Compute low bound and elt size. */
3027 if (domain_type && TYPE_MIN_VALUE (domain_type))
3028 {
3029 /* Static constructors for variably sized objects makes no sense. */
3030 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3031 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3032 }
3033 else
3034 low_bound = double_int_zero;
3035 /* Static constructors for variably sized objects makes no sense. */
3036 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3037 == INTEGER_CST);
3038 elt_size =
3039 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3040
3041
3042 /* We can handle only constantly sized accesses that are known to not
3043 be larger than size of array element. */
3044 if (!TYPE_SIZE_UNIT (type)
3045 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3046 || double_int_cmp (elt_size,
3047 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3048 return NULL_TREE;
3049
3050 /* Compute the array index we look for. */
3051 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3052 elt_size, TRUNC_DIV_EXPR);
3053 access_index = double_int_add (access_index, low_bound);
3054
3055 /* And offset within the access. */
3056 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3057
3058 /* See if the array field is large enough to span whole access. We do not
3059 care to fold accesses spanning multiple array indexes. */
3060 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3061 return NULL_TREE;
3062
3063 index = double_int_sub (low_bound, double_int_one);
3064 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3065 {
3066 /* Array constructor might explicitely set index, or specify range
3067 or leave index NULL meaning that it is next index after previous
3068 one. */
3069 if (cfield)
3070 {
3071 if (TREE_CODE (cfield) == INTEGER_CST)
3072 max_index = index = tree_to_double_int (cfield);
3073 else
3074 {
3075 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3076 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3077 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3078 }
3079 }
3080 else
3081 max_index = index = double_int_add (index, double_int_one);
3082
3083 /* Do we have match? */
3084 if (double_int_cmp (access_index, index, 1) >= 0
3085 && double_int_cmp (access_index, max_index, 1) <= 0)
3086 return fold_ctor_reference (type, cval, inner_offset, size);
3087 }
3088 /* When memory is not explicitely mentioned in constructor,
3089 it is 0 (or out of range). */
3090 return build_zero_cst (type);
3091}
3092
3093/* CTOR is CONSTRUCTOR of an aggregate or vector.
3094 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3095
3096static tree
3097fold_nonarray_ctor_reference (tree type, tree ctor,
3098 unsigned HOST_WIDE_INT offset,
3099 unsigned HOST_WIDE_INT size)
3100{
3101 unsigned HOST_WIDE_INT cnt;
3102 tree cfield, cval;
3103
3104 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3105 cval)
3106 {
3107 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3108 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3109 tree field_size = DECL_SIZE (cfield);
3110 double_int bitoffset;
3111 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3112 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3113 double_int bitoffset_end;
3114
3115 /* Variable sized objects in static constructors makes no sense,
3116 but field_size can be NULL for flexible array members. */
3117 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3118 && TREE_CODE (byte_offset) == INTEGER_CST
3119 && (field_size != NULL_TREE
3120 ? TREE_CODE (field_size) == INTEGER_CST
3121 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3122
3123 /* Compute bit offset of the field. */
3124 bitoffset = double_int_add (tree_to_double_int (field_offset),
3125 double_int_mul (byte_offset_cst,
3126 bits_per_unit_cst));
3127 /* Compute bit offset where the field ends. */
3128 if (field_size != NULL_TREE)
3129 bitoffset_end = double_int_add (bitoffset,
3130 tree_to_double_int (field_size));
3131 else
3132 bitoffset_end = double_int_zero;
3133
3134 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3135 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3136 && (field_size == NULL_TREE
3137 || double_int_cmp (uhwi_to_double_int (offset),
3138 bitoffset_end, 0) < 0))
3139 {
3140 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3141 uhwi_to_double_int (size));
3142 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3143 bitoffset);
3144 /* We do have overlap. Now see if field is large enough to
3145 cover the access. Give up for accesses spanning multiple
3146 fields. */
3147 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3148 return NULL_TREE;
3149 return fold_ctor_reference (type, cval,
3150 double_int_to_uhwi (inner_offset), size);
3151 }
3152 }
3153 /* When memory is not explicitely mentioned in constructor, it is 0. */
3154 return build_zero_cst (type);
3155}
3156
3157/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3158 to the memory at bit OFFSET. */
3159
3160static tree
3161fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3162 unsigned HOST_WIDE_INT size)
3163{
3164 tree ret;
3165
3166 /* We found the field with exact match. */
3167 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3168 && !offset)
3169 return canonicalize_constructor_val (ctor);
3170
3171 /* We are at the end of walk, see if we can view convert the
3172 result. */
3173 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3174 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3175 && operand_equal_p (TYPE_SIZE (type),
3176 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3177 {
3178 ret = canonicalize_constructor_val (ctor);
3179 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3180 if (ret)
3181 STRIP_NOPS (ret);
3182 return ret;
3183 }
3184 if (TREE_CODE (ctor) == STRING_CST)
3185 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3186 if (TREE_CODE (ctor) == CONSTRUCTOR)
3187 {
3188
3189 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3190 return fold_array_ctor_reference (type, ctor, offset, size);
3191 else
3192 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3193 }
3194
3195 return NULL_TREE;
3196}
3197
3198/* Return the tree representing the element referenced by T if T is an
3199 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3200 names using VALUEIZE. Return NULL_TREE otherwise. */
3201
3202tree
3203fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3204{
3205 tree ctor, idx, base;
3206 HOST_WIDE_INT offset, size, max_size;
3207 tree tem;
3208
3209 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3210 return get_symbol_constant_value (t);
3211
3212 tem = fold_read_from_constant_string (t);
3213 if (tem)
3214 return tem;
3215
3216 switch (TREE_CODE (t))
3217 {
3218 case ARRAY_REF:
3219 case ARRAY_RANGE_REF:
3220 /* Constant indexes are handled well by get_base_constructor.
3221 Only special case variable offsets.
3222 FIXME: This code can't handle nested references with variable indexes
3223 (they will be handled only by iteration of ccp). Perhaps we can bring
3224 get_ref_base_and_extent here and make it use a valueize callback. */
3225 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3226 && valueize
3227 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3228 && host_integerp (idx, 0))
3229 {
3230 tree low_bound, unit_size;
3231
3232 /* If the resulting bit-offset is constant, track it. */
3233 if ((low_bound = array_ref_low_bound (t),
3234 host_integerp (low_bound, 0))
3235 && (unit_size = array_ref_element_size (t),
3236 host_integerp (unit_size, 1)))
3237 {
3238 offset = TREE_INT_CST_LOW (idx);
3239 offset -= TREE_INT_CST_LOW (low_bound);
3240 offset *= TREE_INT_CST_LOW (unit_size);
3241 offset *= BITS_PER_UNIT;
3242
3243 base = TREE_OPERAND (t, 0);
3244 ctor = get_base_constructor (base, &offset, valueize);
3245 /* Empty constructor. Always fold to 0. */
3246 if (ctor == error_mark_node)
3247 return build_zero_cst (TREE_TYPE (t));
3248 /* Out of bound array access. Value is undefined,
3249 but don't fold. */
3250 if (offset < 0)
3251 return NULL_TREE;
3252 /* We can not determine ctor. */
3253 if (!ctor)
3254 return NULL_TREE;
3255 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3256 TREE_INT_CST_LOW (unit_size)
3257 * BITS_PER_UNIT);
3258 }
3259 }
3260 /* Fallthru. */
3261
3262 case COMPONENT_REF:
3263 case BIT_FIELD_REF:
3264 case TARGET_MEM_REF:
3265 case MEM_REF:
3266 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3267 ctor = get_base_constructor (base, &offset, valueize);
3268
3269 /* Empty constructor. Always fold to 0. */
3270 if (ctor == error_mark_node)
3271 return build_zero_cst (TREE_TYPE (t));
3272 /* We do not know precise address. */
3273 if (max_size == -1 || max_size != size)
3274 return NULL_TREE;
3275 /* We can not determine ctor. */
3276 if (!ctor)
3277 return NULL_TREE;
3278
3279 /* Out of bound array access. Value is undefined, but don't fold. */
3280 if (offset < 0)
3281 return NULL_TREE;
3282
3283 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3284
3285 case REALPART_EXPR:
3286 case IMAGPART_EXPR:
3287 {
3288 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3289 if (c && TREE_CODE (c) == COMPLEX_CST)
3290 return fold_build1_loc (EXPR_LOCATION (t),
3291 TREE_CODE (t), TREE_TYPE (t), c);
3292 break;
3293 }
3294
3295 default:
3296 break;
3297 }
3298
3299 return NULL_TREE;
3300}
3301
3302tree
3303fold_const_aggregate_ref (tree t)
3304{
3305 return fold_const_aggregate_ref_1 (t, NULL);
3306}