]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
re PR fortran/48588 (ICE (segfault) in gfc_get_nodesc_array_type)
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4
RG
1/* Statement simplification on GIMPLE.
2 Copyright (C) 2010 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
cbdd87d4 27#include "function.h"
cbdd87d4
RG
28#include "tree-dump.h"
29#include "tree-flow.h"
30#include "tree-pass.h"
31#include "tree-ssa-propagate.h"
cbdd87d4 32#include "target.h"
cfef45c8 33#include "gimple-fold.h"
cbdd87d4 34
b3b9f3d0
JH
35/* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
1389294c 38
1389294c
JH
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
1389294c 56static bool
b3b9f3d0 57can_refer_decl_in_current_unit_p (tree decl)
1389294c
JH
58{
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
b3b9f3d0
JH
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
1389294c
JH
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
b3b9f3d0
JH
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
1389294c
JH
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
b3b9f3d0 74 return false;
1389294c 75 }
b3b9f3d0
JH
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
2e9bb6ba
JH
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
b3b9f3d0
JH
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
1389294c
JH
92 if (TREE_CODE (decl) == FUNCTION_DECL)
93 {
94 node = cgraph_get_node (decl);
b3b9f3d0
JH
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
1389294c
JH
102 }
103 else if (TREE_CODE (decl) == VAR_DECL)
104 {
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
b3b9f3d0 107 return false;
1389294c 108 }
b3b9f3d0 109 return true;
1389294c
JH
110}
111
0038d4e0 112/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
17f39a39
JH
113 acceptable form for is_gimple_min_invariant. */
114
115tree
116canonicalize_constructor_val (tree cval)
117{
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
120 {
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
124 TREE_TYPE (cval));
125 if (t)
126 cval = t;
127 }
128 if (TREE_CODE (cval) == ADDR_EXPR)
129 {
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
b3b9f3d0 131
1389294c
JH
132 if (base
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
b3b9f3d0 135 && !can_refer_decl_in_current_unit_p (base))
1389294c 136 return NULL_TREE;
0038d4e0 137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
17f39a39 138 add_referenced_var (base);
0038d4e0 139 /* Fixup types in global initializers. */
73aef89e
RG
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
17f39a39
JH
142 }
143 return cval;
144}
cbdd87d4
RG
145
146/* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
148
149tree
150get_symbol_constant_value (tree sym)
151{
64e0f5ff 152 if (const_value_known_p (sym))
cbdd87d4
RG
153 {
154 tree val = DECL_INITIAL (sym);
155 if (val)
156 {
17f39a39 157 val = canonicalize_constructor_val (val);
1389294c 158 if (val && is_gimple_min_invariant (val))
17f39a39 159 return val;
1389294c
JH
160 else
161 return NULL_TREE;
cbdd87d4
RG
162 }
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
166 if (!val
cbdd87d4
RG
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 169 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
170 }
171
172 return NULL_TREE;
173}
174
175
176/* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
178
179bool
180may_propagate_address_into_dereference (tree addr, tree deref)
181{
70f34814 182 gcc_assert (TREE_CODE (deref) == MEM_REF
cbdd87d4
RG
183 && TREE_CODE (addr) == ADDR_EXPR);
184
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
187 return false;
188
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
197
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
201 TREE_TYPE (addr))
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
204}
205
206
207/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
70f34814 208 BASE is an array type. OFFSET is a byte displacement.
cbdd87d4
RG
209
210 LOC is the location of the original expression. */
211
212static tree
70f34814 213maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
cbdd87d4
RG
214{
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
217 tree domain_type;
218
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
222
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
225 {
226 tree low_bound = array_ref_low_bound (base);
227
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
231 return NULL_TREE;
232
233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
234 base = TREE_OPERAND (base, 0);
235 }
236
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
240 return NULL_TREE;
241 elt_type = TREE_TYPE (array_type);
cbdd87d4
RG
242
243 /* Use signed size type for intermediate computation on the index. */
3b9e5d95 244 idx_type = ssizetype;
cbdd87d4
RG
245
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
251 if (!elt_size)
252 return NULL;
253 if (integer_zerop (offset))
254 {
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
257
258 idx = build_int_cst (idx_type, 0);
259 }
260 else
261 {
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
264 double_int soffset;
265
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
277 || lrem || hrem)
278 return NULL_TREE;
279
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
281 }
282
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
285 low bound. */
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
288 if (domain_type)
289 {
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
293 else
294 min_idx = fold_convert (idx_type, min_idx);
295
296 if (TREE_CODE (min_idx) != INTEGER_CST)
297 return NULL_TREE;
298
299 elt_offset = fold_convert (idx_type, elt_offset);
300 }
301
302 if (!integer_zerop (min_idx))
303 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
304 if (!integer_zerop (elt_offset))
305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
306
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
309
310 /* We don't want to construct access past array bounds. For example
311 char *(c[4]);
312 c[3][2];
313 should not be simplified into (*c)[14] or tree-vrp will
70f34814
RG
314 give false warnings.
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
317 && domain_type)
cbdd87d4 318 {
70f34814
RG
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
cbdd87d4 322 return NULL_TREE;
70f34814
RG
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
326 return NULL_TREE;
327 else if (compare_tree_int (idx, 0) < 0)
cbdd87d4
RG
328 return NULL_TREE;
329 }
cbdd87d4
RG
330
331 {
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
334 return t;
335 }
336}
337
338
70f34814 339/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
cbdd87d4
RG
340 LOC is the location of original expression.
341
70f34814 342 Before attempting the conversion strip off existing ADDR_EXPRs. */
cbdd87d4
RG
343
344tree
345maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
346 tree orig_type)
347{
348 tree ret;
cbdd87d4
RG
349
350 STRIP_NOPS (base);
351 if (TREE_CODE (base) != ADDR_EXPR)
352 return NULL_TREE;
353
354 base = TREE_OPERAND (base, 0);
70f34814 355 if (types_compatible_p (orig_type, TREE_TYPE (base))
cbdd87d4
RG
356 && integer_zerop (offset))
357 return base;
cbdd87d4 358
70f34814
RG
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
361 return ret;
362 return NULL_TREE;
cbdd87d4
RG
363}
364
70f34814
RG
365/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
cbdd87d4
RG
367
368tree
369maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
370 tree orig_type)
371{
70f34814 372 tree base, ret;
cbdd87d4 373
70f34814
RG
374 STRIP_NOPS (addr);
375 if (TREE_CODE (addr) != ADDR_EXPR)
376 return NULL_TREE;
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
379 if (ret)
cbdd87d4 380 {
70f34814
RG
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
cbdd87d4 383 return NULL_TREE;
70f34814 384 SET_EXPR_LOCATION (ret, loc);
cbdd87d4
RG
385 }
386
70f34814 387 return ret;
cbdd87d4
RG
388}
389
390
391/* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
394
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
400 &array[x]
401 which may be able to propagate further. */
402
403tree
404maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
405{
406 tree ptd_type;
407 tree t;
408
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
411 return NULL_TREE;
412 op0 = TREE_OPERAND (op0, 0);
413
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
416 {
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
c946a318
RG
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
70f34814
RG
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
426 {
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
c946a318
RG
431 return NULL_TREE;
432
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
437 return NULL_TREE;
438
cbdd87d4
RG
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
70f34814 441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
cbdd87d4
RG
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
70f34814 448 else if (integer_onep (elsz)
cbdd87d4
RG
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
453 op1,
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
456 }
70f34814
RG
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
458 /* Dto. */
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
461 {
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
466 return NULL_TREE;
467
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
470 if (!DECL_P (op0)
471 && !handled_component_p (op0))
472 return NULL_TREE;
473
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
485 op0, op1,
486 integer_zero_node, NULL_TREE));
487 }
488
cbdd87d4
RG
489 return NULL_TREE;
490 }
491
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
495 {
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
500 tree min_idx;
501
502 if (TREE_CODE (array_idx) != INTEGER_CST)
503 break;
504 if (TREE_CODE (elt_size) != INTEGER_CST)
505 break;
506
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
509 if (min_idx)
510 {
511 min_idx = TYPE_MIN_VALUE (min_idx);
512 if (min_idx)
513 {
514 if (TREE_CODE (min_idx) != INTEGER_CST)
515 break;
516
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
520 min_idx, 0);
521 }
522 }
523
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
527
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
530 array_idx, op1, 0);
531 op0 = array_obj;
532 }
533
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
541
542 /* At which point we can try some of the same things as for indirects. */
70f34814 543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
cbdd87d4
RG
544 if (t)
545 {
70f34814
RG
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
548 return NULL_TREE;
cbdd87d4
RG
549 SET_EXPR_LOCATION (t, loc);
550 }
551
552 return t;
553}
554
555/* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
559
560static tree
561maybe_fold_reference (tree expr, bool is_lhs)
562{
563 tree *t = &expr;
17f39a39 564 tree result;
cbdd87d4 565
f0eddb90
RG
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
571 TREE_CODE (expr),
572 TREE_TYPE (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
577 TREE_CODE (expr),
578 TREE_TYPE (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
582
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
cbdd87d4 585
f0eddb90
RG
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 590 {
f0eddb90
RG
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
595 if (tem)
596 {
597 TREE_THIS_VOLATILE (tem) = volatile_p;
598 *t = tem;
599 tem = maybe_fold_reference (expr, is_lhs);
600 if (tem)
601 return tem;
602 return expr;
603 }
cbdd87d4
RG
604 }
605
f0eddb90
RG
606 if (!is_lhs
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
609 return result;
cbdd87d4 610
70f34814
RG
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
623 compatibility. */
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
f0eddb90 626 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 627 {
70f34814
RG
628 tree tem;
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
631 if (tem)
632 return tem;
633 return expr;
634 }
4d948885
RG
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
636 {
637 tree tem = maybe_fold_tmr (*t);
638 if (tem)
639 {
640 *t = tem;
641 tem = maybe_fold_reference (expr, is_lhs);
642 if (tem)
643 return tem;
644 return expr;
645 }
646 }
cbdd87d4
RG
647
648 return NULL_TREE;
649}
650
651
652/* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
655 folded. */
656
657static tree
658fold_gimple_assign (gimple_stmt_iterator *si)
659{
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
663
664 tree result = NULL_TREE;
665
666 switch (get_gimple_rhs_class (subcode))
667 {
668 case GIMPLE_SINGLE_RHS:
669 {
670 tree rhs = gimple_assign_rhs1 (stmt);
671
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
674 {
675 tree op0 = COND_EXPR_COND (rhs);
676 tree tem;
677 bool set = false;
678 location_t cond_loc = EXPR_LOCATION (rhs);
679
680 if (COMPARISON_CLASS_P (op0))
681 {
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
693 }
694 else if (is_gimple_min_invariant (op0))
695 {
696 tem = op0;
697 set = true;
698 }
699 else
700 return NULL_TREE;
701
702 if (set)
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
705 }
706
cbdd87d4
RG
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
709
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
711 {
70f34814
RG
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
714 if (tem
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
718 else if (tem)
cbdd87d4
RG
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
724 }
725
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
730 {
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
732 unsigned i;
733 tree val;
734
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
739 return NULL_TREE;
740
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
743 }
744
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
747
748 /* If we couldn't fold the RHS, hand over to the generic
749 fold routines. */
750 if (result == NULL_TREE)
751 result = fold (rhs);
752
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
757
758 if (result != rhs && valid_gimple_rhs_p (result))
759 return result;
760
761 return NULL_TREE;
762 }
763 break;
764
765 case GIMPLE_UNARY_RHS:
766 {
767 tree rhs = gimple_assign_rhs1 (stmt);
768
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
770 if (result)
771 {
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
781
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
784 return result;
785 }
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
789 {
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
794 if (t)
795 return t;
796 }
797 }
798 break;
799
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
803 {
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
806 {
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
811 }
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
813 type,
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
816 }
817
818 if (!result)
819 result = fold_binary_loc (loc, subcode,
820 TREE_TYPE (gimple_assign_lhs (stmt)),
821 gimple_assign_rhs1 (stmt),
822 gimple_assign_rhs2 (stmt));
823
824 if (result)
825 {
826 STRIP_USELESS_TYPE_CONVERSION (result);
827 if (valid_gimple_rhs_p (result))
828 return result;
829
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
833 produced. */
834 if (commutative_tree_code (subcode)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
836 gimple_assign_rhs2 (stmt), false))
837 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
838 gimple_assign_rhs2 (stmt),
839 gimple_assign_rhs1 (stmt));
840 }
841 break;
842
0354c0c7
BS
843 case GIMPLE_TERNARY_RHS:
844 result = fold_ternary_loc (loc, subcode,
845 TREE_TYPE (gimple_assign_lhs (stmt)),
846 gimple_assign_rhs1 (stmt),
847 gimple_assign_rhs2 (stmt),
848 gimple_assign_rhs3 (stmt));
849
850 if (result)
851 {
852 STRIP_USELESS_TYPE_CONVERSION (result);
853 if (valid_gimple_rhs_p (result))
854 return result;
855
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
859 produced. */
860 if (commutative_ternary_tree_code (subcode)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
862 gimple_assign_rhs2 (stmt), false))
863 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
864 gimple_assign_rhs2 (stmt),
865 gimple_assign_rhs1 (stmt),
866 gimple_assign_rhs3 (stmt));
867 }
868 break;
869
cbdd87d4
RG
870 case GIMPLE_INVALID_RHS:
871 gcc_unreachable ();
872 }
873
874 return NULL_TREE;
875}
876
877/* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
881
882static bool
883fold_gimple_cond (gimple stmt)
884{
885 tree result = fold_binary_loc (gimple_location (stmt),
886 gimple_cond_code (stmt),
887 boolean_type_node,
888 gimple_cond_lhs (stmt),
889 gimple_cond_rhs (stmt));
890
891 if (result)
892 {
893 STRIP_USELESS_TYPE_CONVERSION (result);
894 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
895 {
896 gimple_cond_set_condition_from_tree (stmt, result);
897 return true;
898 }
899 }
900
901 return false;
902}
903
904/* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
cbdd87d4
RG
913
914void
915gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
916{
917 tree lhs;
918 tree tmp = NULL_TREE; /* Silence warning. */
919 gimple stmt, new_stmt;
920 gimple_stmt_iterator i;
921 gimple_seq stmts = gimple_seq_alloc();
922 struct gimplify_ctx gctx;
923 gimple last = NULL;
fe2ef088
MM
924 gimple laststore = NULL;
925 tree reaching_vuse;
cbdd87d4
RG
926
927 stmt = gsi_stmt (*si_p);
928
929 gcc_assert (is_gimple_call (stmt));
930
931 lhs = gimple_call_lhs (stmt);
fe2ef088 932 reaching_vuse = gimple_vuse (stmt);
cbdd87d4
RG
933
934 push_gimplify_context (&gctx);
935
936 if (lhs == NULL_TREE)
6e572326
RG
937 {
938 gimplify_and_add (expr, &stmts);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts))
942 {
9fdc58de 943 pop_gimplify_context (NULL);
6e572326
RG
944 if (gimple_in_ssa_p (cfun))
945 {
946 unlink_stmt_vdef (stmt);
947 release_defs (stmt);
948 }
949 gsi_remove (si_p, true);
950 return;
951 }
952 }
cbdd87d4
RG
953 else
954 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
955
956 pop_gimplify_context (NULL);
957
958 if (gimple_has_location (stmt))
959 annotate_all_with_location (stmts, gimple_location (stmt));
960
961 /* The replacement can expose previously unreferenced variables. */
962 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
963 {
964 if (last)
965 {
966 gsi_insert_before (si_p, last, GSI_NEW_STMT);
967 gsi_next (si_p);
968 }
969 new_stmt = gsi_stmt (i);
edc74207
RG
970 if (gimple_in_ssa_p (cfun))
971 {
972 find_new_referenced_vars (new_stmt);
973 mark_symbols_for_renaming (new_stmt);
974 }
fe2ef088
MM
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt))
978 {
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
981 if (laststore)
982 {
983 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
984 gimple_set_vdef (laststore, reaching_vuse);
985 update_stmt (laststore);
986 laststore = NULL;
987 }
988 gimple_set_vuse (new_stmt, reaching_vuse);
989 gimple_set_modified (new_stmt, true);
990 }
991 if (gimple_assign_single_p (new_stmt)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
993 {
994 laststore = new_stmt;
995 }
cbdd87d4
RG
996 last = new_stmt;
997 }
998
999 if (lhs == NULL_TREE)
1000 {
fe2ef088
MM
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt) && laststore)
1006 {
1007 gimple_set_vdef (laststore, gimple_vdef (stmt));
8a1561bc
MM
1008 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
fe2ef088
MM
1010 update_stmt (laststore);
1011 }
edc74207 1012 else if (gimple_in_ssa_p (cfun))
fe2ef088
MM
1013 {
1014 unlink_stmt_vdef (stmt);
1015 release_defs (stmt);
1016 }
cbdd87d4
RG
1017 new_stmt = last;
1018 }
1019 else
1020 {
1021 if (last)
1022 {
1023 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1024 gsi_next (si_p);
1025 }
8a1561bc
MM
1026 if (laststore && is_gimple_reg (lhs))
1027 {
1028 gimple_set_vdef (laststore, gimple_vdef (stmt));
1029 update_stmt (laststore);
1030 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1032 laststore = NULL;
1033 }
1034 else if (laststore)
fe2ef088
MM
1035 {
1036 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1037 gimple_set_vdef (laststore, reaching_vuse);
1038 update_stmt (laststore);
1039 laststore = NULL;
1040 }
cbdd87d4 1041 new_stmt = gimple_build_assign (lhs, tmp);
8a1561bc
MM
1042 if (!is_gimple_reg (tmp))
1043 gimple_set_vuse (new_stmt, reaching_vuse);
1044 if (!is_gimple_reg (lhs))
1045 {
1046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1047 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1049 }
07db7d35
RG
1050 else if (reaching_vuse == gimple_vuse (stmt))
1051 unlink_stmt_vdef (stmt);
cbdd87d4
RG
1052 }
1053
1054 gimple_set_location (new_stmt, gimple_location (stmt));
1055 gsi_replace (si_p, new_stmt, false);
1056}
1057
1058/* Return the string length, maximum string length or maximum value of
1059 ARG in LENGTH.
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1066
1067static bool
1068get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1069{
1070 tree var, val;
1071 gimple def_stmt;
1072
1073 if (TREE_CODE (arg) != SSA_NAME)
1074 {
1075 if (TREE_CODE (arg) == COND_EXPR)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1082 {
1083 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1084 if (TREE_CODE (aop0) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1086 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1087 length, visited, type);
1088 }
1089
1090 if (type == 2)
1091 {
1092 val = arg;
1093 if (TREE_CODE (val) != INTEGER_CST
1094 || tree_int_cst_sgn (val) < 0)
1095 return false;
1096 }
1097 else
1098 val = c_strlen (arg, 1);
1099 if (!val)
1100 return false;
1101
1102 if (*length)
1103 {
1104 if (type > 0)
1105 {
1106 if (TREE_CODE (*length) != INTEGER_CST
1107 || TREE_CODE (val) != INTEGER_CST)
1108 return false;
1109
1110 if (tree_int_cst_lt (*length, val))
1111 *length = val;
1112 return true;
1113 }
1114 else if (simple_cst_equal (val, *length) != 1)
1115 return false;
1116 }
1117
1118 *length = val;
1119 return true;
1120 }
1121
1122 /* If we were already here, break the infinite cycle. */
fcaa4ca4 1123 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
cbdd87d4 1124 return true;
cbdd87d4
RG
1125
1126 var = arg;
1127 def_stmt = SSA_NAME_DEF_STMT (var);
1128
1129 switch (gimple_code (def_stmt))
1130 {
1131 case GIMPLE_ASSIGN:
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1134 length. */
1135 if (gimple_assign_single_p (def_stmt)
1136 || gimple_assign_unary_nop_p (def_stmt))
1137 {
1138 tree rhs = gimple_assign_rhs1 (def_stmt);
1139 return get_maxval_strlen (rhs, length, visited, type);
1140 }
1141 return false;
1142
1143 case GIMPLE_PHI:
1144 {
1145 /* All the arguments of the PHI node must have the same constant
1146 length. */
1147 unsigned i;
1148
1149 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1150 {
1151 tree arg = gimple_phi_arg (def_stmt, i)->def;
1152
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg == gimple_phi_result (def_stmt))
1160 continue;
1161
1162 if (!get_maxval_strlen (arg, length, visited, type))
1163 return false;
1164 }
1165 }
1166 return true;
1167
1168 default:
1169 return false;
1170 }
1171}
1172
1173
1174/* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1180
1181tree
1182gimple_fold_builtin (gimple stmt)
1183{
1184 tree result, val[3];
1185 tree callee, a;
1186 int arg_idx, type;
1187 bitmap visited;
1188 bool ignore;
1189 int nargs;
1190 location_t loc = gimple_location (stmt);
1191
1192 gcc_assert (is_gimple_call (stmt));
1193
1194 ignore = (gimple_call_lhs (stmt) == NULL);
1195
1196 /* First try the generic builtin folder. If that succeeds, return the
1197 result directly. */
1198 result = fold_call_stmt (stmt, ignore);
1199 if (result)
1200 {
1201 if (ignore)
1202 STRIP_NOPS (result);
1203 return result;
1204 }
1205
1206 /* Ignore MD builtins. */
1207 callee = gimple_call_fndecl (stmt);
1208 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1209 return NULL_TREE;
1210
1211 /* If the builtin could not be folded, and it has no argument list,
1212 we're done. */
1213 nargs = gimple_call_num_args (stmt);
1214 if (nargs == 0)
1215 return NULL_TREE;
1216
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee))
1219 {
1220 case BUILT_IN_STRLEN:
1221 case BUILT_IN_FPUTS:
1222 case BUILT_IN_FPUTS_UNLOCKED:
1223 arg_idx = 0;
1224 type = 0;
1225 break;
1226 case BUILT_IN_STRCPY:
1227 case BUILT_IN_STRNCPY:
1228 arg_idx = 1;
1229 type = 0;
1230 break;
1231 case BUILT_IN_MEMCPY_CHK:
1232 case BUILT_IN_MEMPCPY_CHK:
1233 case BUILT_IN_MEMMOVE_CHK:
1234 case BUILT_IN_MEMSET_CHK:
1235 case BUILT_IN_STRNCPY_CHK:
1236 arg_idx = 2;
1237 type = 2;
1238 break;
1239 case BUILT_IN_STRCPY_CHK:
1240 case BUILT_IN_STPCPY_CHK:
1241 arg_idx = 1;
1242 type = 1;
1243 break;
1244 case BUILT_IN_SNPRINTF_CHK:
1245 case BUILT_IN_VSNPRINTF_CHK:
1246 arg_idx = 1;
1247 type = 2;
1248 break;
1249 default:
1250 return NULL_TREE;
1251 }
1252
1253 if (arg_idx >= nargs)
1254 return NULL_TREE;
1255
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited = BITMAP_ALLOC (NULL);
1258 bitmap_clear (visited);
1259
1260 memset (val, 0, sizeof (val));
1261 a = gimple_call_arg (stmt, arg_idx);
1262 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1263 val[arg_idx] = NULL_TREE;
1264
1265 BITMAP_FREE (visited);
1266
1267 result = NULL_TREE;
1268 switch (DECL_FUNCTION_CODE (callee))
1269 {
1270 case BUILT_IN_STRLEN:
1271 if (val[0] && nargs == 1)
1272 {
1273 tree new_val =
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1275
1276 /* If the result is not a valid gimple value, or not a cast
6e4da084 1277 of a valid gimple value, then we cannot use the result. */
cbdd87d4 1278 if (is_gimple_val (new_val)
3dbe9454 1279 || (CONVERT_EXPR_P (new_val)
cbdd87d4
RG
1280 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1281 return new_val;
1282 }
1283 break;
1284
1285 case BUILT_IN_STRCPY:
1286 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1287 result = fold_builtin_strcpy (loc, callee,
1288 gimple_call_arg (stmt, 0),
1289 gimple_call_arg (stmt, 1),
1290 val[1]);
1291 break;
1292
1293 case BUILT_IN_STRNCPY:
1294 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1295 result = fold_builtin_strncpy (loc, callee,
1296 gimple_call_arg (stmt, 0),
1297 gimple_call_arg (stmt, 1),
1298 gimple_call_arg (stmt, 2),
1299 val[1]);
1300 break;
1301
1302 case BUILT_IN_FPUTS:
1303 if (nargs == 2)
1304 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1305 gimple_call_arg (stmt, 1),
1306 ignore, false, val[0]);
1307 break;
1308
1309 case BUILT_IN_FPUTS_UNLOCKED:
1310 if (nargs == 2)
1311 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 ignore, true, val[0]);
1314 break;
1315
1316 case BUILT_IN_MEMCPY_CHK:
1317 case BUILT_IN_MEMPCPY_CHK:
1318 case BUILT_IN_MEMMOVE_CHK:
1319 case BUILT_IN_MEMSET_CHK:
1320 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1321 result = fold_builtin_memory_chk (loc, callee,
1322 gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 gimple_call_arg (stmt, 2),
1325 gimple_call_arg (stmt, 3),
1326 val[2], ignore,
1327 DECL_FUNCTION_CODE (callee));
1328 break;
1329
1330 case BUILT_IN_STRCPY_CHK:
1331 case BUILT_IN_STPCPY_CHK:
1332 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1333 result = fold_builtin_stxcpy_chk (loc, callee,
1334 gimple_call_arg (stmt, 0),
1335 gimple_call_arg (stmt, 1),
1336 gimple_call_arg (stmt, 2),
1337 val[1], ignore,
1338 DECL_FUNCTION_CODE (callee));
1339 break;
1340
1341 case BUILT_IN_STRNCPY_CHK:
1342 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1343 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1344 gimple_call_arg (stmt, 1),
1345 gimple_call_arg (stmt, 2),
1346 gimple_call_arg (stmt, 3),
1347 val[2]);
1348 break;
1349
1350 case BUILT_IN_SNPRINTF_CHK:
1351 case BUILT_IN_VSNPRINTF_CHK:
1352 if (val[1] && is_gimple_val (val[1]))
1353 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1354 DECL_FUNCTION_CODE (callee));
1355 break;
1356
1357 default:
1358 gcc_unreachable ();
1359 }
1360
1361 if (result && ignore)
1362 result = fold_ignored_result (result);
1363 return result;
1364}
1365
ceeffab0
MJ
1366/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1ae6fe9b
MJ
1373
1374tree
84825707 1375gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
ceeffab0 1376 tree *delta, bool refuse_thunks)
1ae6fe9b
MJ
1377{
1378 HOST_WIDE_INT i;
ceeffab0
MJ
1379 tree v, fndecl;
1380 struct cgraph_node *node;
1ae6fe9b
MJ
1381
1382 v = BINFO_VIRTUALS (known_binfo);
ceeffab0
MJ
1383 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1384 if (!v)
1385 return NULL_TREE;
1ae6fe9b
MJ
1386 i = 0;
1387 while (i != token)
1388 {
1389 i += (TARGET_VTABLE_USES_DESCRIPTORS
1390 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1391 v = TREE_CHAIN (v);
1392 }
1393
84825707
JJ
1394 /* If BV_VCALL_INDEX is non-NULL, give up. */
1395 if (TREE_TYPE (v))
1396 return NULL_TREE;
1397
1ae6fe9b 1398 fndecl = TREE_VALUE (v);
ceeffab0
MJ
1399 node = cgraph_get_node_or_alias (fndecl);
1400 if (refuse_thunks
1401 && (!node
1402 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1403 thunks are represented by a constant in TREE_PURPOSE of items in
1404 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1405 yet.
1406
1407 FIXME: Remove the following condition once we are able to represent
1408 thunk information on call graph edges. */
1409 || (node->same_body_alias && node->thunk.thunk_p)))
1410 return NULL_TREE;
3f1f0ae3 1411
6e4da084
JH
1412 /* When cgraph node is missing and function is not public, we cannot
1413 devirtualize. This can happen in WHOPR when the actual method
1414 ends up in other partition, because we found devirtualization
1415 possibility too late. */
ceeffab0
MJ
1416 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1417 return NULL_TREE;
1418
1419 *delta = TREE_PURPOSE (v);
1420 gcc_checking_assert (host_integerp (*delta, 0));
1421 return fndecl;
1ae6fe9b
MJ
1422}
1423
ceeffab0
MJ
1424/* Generate code adjusting the first parameter of a call statement determined
1425 by GSI by DELTA. */
1426
1427void
1428gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1429{
1430 gimple call_stmt = gsi_stmt (*gsi);
1431 tree parm, tmp;
1432 gimple new_stmt;
1433
1434 delta = fold_convert (sizetype, delta);
1435 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1436 parm = gimple_call_arg (call_stmt, 0);
1437 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1438 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1439 add_referenced_var (tmp);
1440
1441 tmp = make_ssa_name (tmp, NULL);
1442 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1443 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1444 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1445 gimple_call_set_arg (call_stmt, 0, tmp);
1446}
1ae6fe9b 1447
cbdd87d4
RG
1448/* Attempt to fold a call statement referenced by the statement iterator GSI.
1449 The statement may be replaced by another statement, e.g., if the call
1450 simplifies to a constant value. Return true if any changes were made.
1451 It is assumed that the operands have been previously folded. */
1452
ceeffab0
MJ
1453bool
1454gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
1455{
1456 gimple stmt = gsi_stmt (*gsi);
3b45a007 1457 tree callee;
cbdd87d4
RG
1458
1459 /* Check for builtins that CCP can handle using information not
1460 available in the generic fold routines. */
3b45a007 1461 callee = gimple_call_fndecl (stmt);
23c1da7a 1462 if (!inplace && callee && DECL_BUILT_IN (callee))
cbdd87d4
RG
1463 {
1464 tree result = gimple_fold_builtin (stmt);
1465
1466 if (result)
1467 {
1468 if (!update_call_from_tree (gsi, result))
1469 gimplify_and_update_call_from_tree (gsi, result);
1470 return true;
1471 }
1472 }
3b45a007
RG
1473
1474 /* Check for virtual calls that became direct calls. */
1475 callee = gimple_call_fn (stmt);
1476 if (TREE_CODE (callee) == OBJ_TYPE_REF
1477 && gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1478 {
1479 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1480 return true;
1481 }
1482
cbdd87d4
RG
1483 return false;
1484}
1485
1486/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1487 distinguishes both cases. */
1488
1489static bool
1490fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1491{
1492 bool changed = false;
1493 gimple stmt = gsi_stmt (*gsi);
1494 unsigned i;
1495
1496 /* Fold the main computation performed by the statement. */
1497 switch (gimple_code (stmt))
1498 {
1499 case GIMPLE_ASSIGN:
1500 {
1501 unsigned old_num_ops = gimple_num_ops (stmt);
1502 tree new_rhs = fold_gimple_assign (gsi);
1503 tree lhs = gimple_assign_lhs (stmt);
1504 if (new_rhs
1505 && !useless_type_conversion_p (TREE_TYPE (lhs),
1506 TREE_TYPE (new_rhs)))
1507 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1508 if (new_rhs
1509 && (!inplace
1510 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1511 {
1512 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1513 changed = true;
1514 }
1515 break;
1516 }
1517
1518 case GIMPLE_COND:
1519 changed |= fold_gimple_cond (stmt);
1520 break;
1521
1522 case GIMPLE_CALL:
1523 /* Fold *& in call arguments. */
1524 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1525 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1526 {
1527 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1528 if (tmp)
1529 {
1530 gimple_call_set_arg (stmt, i, tmp);
1531 changed = true;
1532 }
1533 }
ceeffab0 1534 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
1535 break;
1536
1537 case GIMPLE_ASM:
1538 /* Fold *& in asm operands. */
1539 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1540 {
1541 tree link = gimple_asm_output_op (stmt, i);
1542 tree op = TREE_VALUE (link);
1543 if (REFERENCE_CLASS_P (op)
1544 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1545 {
1546 TREE_VALUE (link) = op;
1547 changed = true;
1548 }
1549 }
1550 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1551 {
1552 tree link = gimple_asm_input_op (stmt, i);
1553 tree op = TREE_VALUE (link);
1554 if (REFERENCE_CLASS_P (op)
1555 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1556 {
1557 TREE_VALUE (link) = op;
1558 changed = true;
1559 }
1560 }
1561 break;
1562
bd422c4a
RG
1563 case GIMPLE_DEBUG:
1564 if (gimple_debug_bind_p (stmt))
1565 {
1566 tree val = gimple_debug_bind_get_value (stmt);
1567 if (val
1568 && REFERENCE_CLASS_P (val))
1569 {
1570 tree tem = maybe_fold_reference (val, false);
1571 if (tem)
1572 {
1573 gimple_debug_bind_set_value (stmt, tem);
1574 changed = true;
1575 }
1576 }
1577 }
1578 break;
1579
cbdd87d4
RG
1580 default:;
1581 }
1582
1583 stmt = gsi_stmt (*gsi);
1584
1585 /* Fold *& on the lhs. */
1586 if (gimple_has_lhs (stmt))
1587 {
1588 tree lhs = gimple_get_lhs (stmt);
1589 if (lhs && REFERENCE_CLASS_P (lhs))
1590 {
1591 tree new_lhs = maybe_fold_reference (lhs, true);
1592 if (new_lhs)
1593 {
1594 gimple_set_lhs (stmt, new_lhs);
1595 changed = true;
1596 }
1597 }
1598 }
1599
1600 return changed;
1601}
1602
1603/* Fold the statement pointed to by GSI. In some cases, this function may
1604 replace the whole statement with a new one. Returns true iff folding
1605 makes any changes.
1606 The statement pointed to by GSI should be in valid gimple form but may
1607 be in unfolded state as resulting from for example constant propagation
1608 which can produce *&x = 0. */
1609
1610bool
1611fold_stmt (gimple_stmt_iterator *gsi)
1612{
1613 return fold_stmt_1 (gsi, false);
1614}
1615
1616/* Perform the minimal folding on statement STMT. Only operations like
1617 *&x created by constant propagation are handled. The statement cannot
1618 be replaced with a new one. Return true if the statement was
1619 changed, false otherwise.
1620 The statement STMT should be in valid gimple form but may
1621 be in unfolded state as resulting from for example constant propagation
1622 which can produce *&x = 0. */
1623
1624bool
1625fold_stmt_inplace (gimple stmt)
1626{
1627 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1628 bool changed = fold_stmt_1 (&gsi, true);
1629 gcc_assert (gsi_stmt (gsi) == stmt);
1630 return changed;
1631}
1632
e89065a1
SL
1633/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1634 if EXPR is null or we don't know how.
1635 If non-null, the result always has boolean type. */
1636
1637static tree
1638canonicalize_bool (tree expr, bool invert)
1639{
1640 if (!expr)
1641 return NULL_TREE;
1642 else if (invert)
1643 {
1644 if (integer_nonzerop (expr))
1645 return boolean_false_node;
1646 else if (integer_zerop (expr))
1647 return boolean_true_node;
1648 else if (TREE_CODE (expr) == SSA_NAME)
1649 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1650 build_int_cst (TREE_TYPE (expr), 0));
1651 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1652 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1653 boolean_type_node,
1654 TREE_OPERAND (expr, 0),
1655 TREE_OPERAND (expr, 1));
1656 else
1657 return NULL_TREE;
1658 }
1659 else
1660 {
1661 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1662 return expr;
1663 if (integer_nonzerop (expr))
1664 return boolean_true_node;
1665 else if (integer_zerop (expr))
1666 return boolean_false_node;
1667 else if (TREE_CODE (expr) == SSA_NAME)
1668 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1669 build_int_cst (TREE_TYPE (expr), 0));
1670 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1671 return fold_build2 (TREE_CODE (expr),
1672 boolean_type_node,
1673 TREE_OPERAND (expr, 0),
1674 TREE_OPERAND (expr, 1));
1675 else
1676 return NULL_TREE;
1677 }
1678}
1679
1680/* Check to see if a boolean expression EXPR is logically equivalent to the
1681 comparison (OP1 CODE OP2). Check for various identities involving
1682 SSA_NAMEs. */
1683
1684static bool
1685same_bool_comparison_p (const_tree expr, enum tree_code code,
1686 const_tree op1, const_tree op2)
1687{
1688 gimple s;
1689
1690 /* The obvious case. */
1691 if (TREE_CODE (expr) == code
1692 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1693 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1694 return true;
1695
1696 /* Check for comparing (name, name != 0) and the case where expr
1697 is an SSA_NAME with a definition matching the comparison. */
1698 if (TREE_CODE (expr) == SSA_NAME
1699 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1700 {
1701 if (operand_equal_p (expr, op1, 0))
1702 return ((code == NE_EXPR && integer_zerop (op2))
1703 || (code == EQ_EXPR && integer_nonzerop (op2)));
1704 s = SSA_NAME_DEF_STMT (expr);
1705 if (is_gimple_assign (s)
1706 && gimple_assign_rhs_code (s) == code
1707 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1708 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1709 return true;
1710 }
1711
1712 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1713 of name is a comparison, recurse. */
1714 if (TREE_CODE (op1) == SSA_NAME
1715 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1716 {
1717 s = SSA_NAME_DEF_STMT (op1);
1718 if (is_gimple_assign (s)
1719 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1720 {
1721 enum tree_code c = gimple_assign_rhs_code (s);
1722 if ((c == NE_EXPR && integer_zerop (op2))
1723 || (c == EQ_EXPR && integer_nonzerop (op2)))
1724 return same_bool_comparison_p (expr, c,
1725 gimple_assign_rhs1 (s),
1726 gimple_assign_rhs2 (s));
1727 if ((c == EQ_EXPR && integer_zerop (op2))
1728 || (c == NE_EXPR && integer_nonzerop (op2)))
1729 return same_bool_comparison_p (expr,
1730 invert_tree_comparison (c, false),
1731 gimple_assign_rhs1 (s),
1732 gimple_assign_rhs2 (s));
1733 }
1734 }
1735 return false;
1736}
1737
1738/* Check to see if two boolean expressions OP1 and OP2 are logically
1739 equivalent. */
1740
1741static bool
1742same_bool_result_p (const_tree op1, const_tree op2)
1743{
1744 /* Simple cases first. */
1745 if (operand_equal_p (op1, op2, 0))
1746 return true;
1747
1748 /* Check the cases where at least one of the operands is a comparison.
1749 These are a bit smarter than operand_equal_p in that they apply some
1750 identifies on SSA_NAMEs. */
1751 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1752 && same_bool_comparison_p (op1, TREE_CODE (op2),
1753 TREE_OPERAND (op2, 0),
1754 TREE_OPERAND (op2, 1)))
1755 return true;
1756 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1757 && same_bool_comparison_p (op2, TREE_CODE (op1),
1758 TREE_OPERAND (op1, 0),
1759 TREE_OPERAND (op1, 1)))
1760 return true;
1761
1762 /* Default case. */
1763 return false;
1764}
1765
1766/* Forward declarations for some mutually recursive functions. */
1767
1768static tree
1769and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1770 enum tree_code code2, tree op2a, tree op2b);
1771static tree
1772and_var_with_comparison (tree var, bool invert,
1773 enum tree_code code2, tree op2a, tree op2b);
1774static tree
1775and_var_with_comparison_1 (gimple stmt,
1776 enum tree_code code2, tree op2a, tree op2b);
1777static tree
1778or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1779 enum tree_code code2, tree op2a, tree op2b);
1780static tree
1781or_var_with_comparison (tree var, bool invert,
1782 enum tree_code code2, tree op2a, tree op2b);
1783static tree
1784or_var_with_comparison_1 (gimple stmt,
1785 enum tree_code code2, tree op2a, tree op2b);
1786
1787/* Helper function for and_comparisons_1: try to simplify the AND of the
1788 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1789 If INVERT is true, invert the value of the VAR before doing the AND.
1790 Return NULL_EXPR if we can't simplify this to a single expression. */
1791
1792static tree
1793and_var_with_comparison (tree var, bool invert,
1794 enum tree_code code2, tree op2a, tree op2b)
1795{
1796 tree t;
1797 gimple stmt = SSA_NAME_DEF_STMT (var);
1798
1799 /* We can only deal with variables whose definitions are assignments. */
1800 if (!is_gimple_assign (stmt))
1801 return NULL_TREE;
1802
1803 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1804 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1805 Then we only have to consider the simpler non-inverted cases. */
1806 if (invert)
1807 t = or_var_with_comparison_1 (stmt,
1808 invert_tree_comparison (code2, false),
1809 op2a, op2b);
1810 else
1811 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1812 return canonicalize_bool (t, invert);
1813}
1814
1815/* Try to simplify the AND of the ssa variable defined by the assignment
1816 STMT with the comparison specified by (OP2A CODE2 OP2B).
1817 Return NULL_EXPR if we can't simplify this to a single expression. */
1818
1819static tree
1820and_var_with_comparison_1 (gimple stmt,
1821 enum tree_code code2, tree op2a, tree op2b)
1822{
1823 tree var = gimple_assign_lhs (stmt);
1824 tree true_test_var = NULL_TREE;
1825 tree false_test_var = NULL_TREE;
1826 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1827
1828 /* Check for identities like (var AND (var == 0)) => false. */
1829 if (TREE_CODE (op2a) == SSA_NAME
1830 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1831 {
1832 if ((code2 == NE_EXPR && integer_zerop (op2b))
1833 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1834 {
1835 true_test_var = op2a;
1836 if (var == true_test_var)
1837 return var;
1838 }
1839 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1840 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1841 {
1842 false_test_var = op2a;
1843 if (var == false_test_var)
1844 return boolean_false_node;
1845 }
1846 }
1847
1848 /* If the definition is a comparison, recurse on it. */
1849 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1850 {
1851 tree t = and_comparisons_1 (innercode,
1852 gimple_assign_rhs1 (stmt),
1853 gimple_assign_rhs2 (stmt),
1854 code2,
1855 op2a,
1856 op2b);
1857 if (t)
1858 return t;
1859 }
1860
1861 /* If the definition is an AND or OR expression, we may be able to
1862 simplify by reassociating. */
1863 if (innercode == TRUTH_AND_EXPR
1864 || innercode == TRUTH_OR_EXPR
1865 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1866 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1867 {
1868 tree inner1 = gimple_assign_rhs1 (stmt);
1869 tree inner2 = gimple_assign_rhs2 (stmt);
1870 gimple s;
1871 tree t;
1872 tree partial = NULL_TREE;
1873 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1874
1875 /* Check for boolean identities that don't require recursive examination
1876 of inner1/inner2:
1877 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1878 inner1 AND (inner1 OR inner2) => inner1
1879 !inner1 AND (inner1 AND inner2) => false
1880 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1881 Likewise for similar cases involving inner2. */
1882 if (inner1 == true_test_var)
1883 return (is_and ? var : inner1);
1884 else if (inner2 == true_test_var)
1885 return (is_and ? var : inner2);
1886 else if (inner1 == false_test_var)
1887 return (is_and
1888 ? boolean_false_node
1889 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1890 else if (inner2 == false_test_var)
1891 return (is_and
1892 ? boolean_false_node
1893 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1894
1895 /* Next, redistribute/reassociate the AND across the inner tests.
1896 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1897 if (TREE_CODE (inner1) == SSA_NAME
1898 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1899 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1900 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1901 gimple_assign_rhs1 (s),
1902 gimple_assign_rhs2 (s),
1903 code2, op2a, op2b)))
1904 {
1905 /* Handle the AND case, where we are reassociating:
1906 (inner1 AND inner2) AND (op2a code2 op2b)
1907 => (t AND inner2)
1908 If the partial result t is a constant, we win. Otherwise
1909 continue on to try reassociating with the other inner test. */
1910 if (is_and)
1911 {
1912 if (integer_onep (t))
1913 return inner2;
1914 else if (integer_zerop (t))
1915 return boolean_false_node;
1916 }
1917
1918 /* Handle the OR case, where we are redistributing:
1919 (inner1 OR inner2) AND (op2a code2 op2b)
1920 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
1921 else if (integer_onep (t))
1922 return boolean_true_node;
1923
1924 /* Save partial result for later. */
1925 partial = t;
e89065a1
SL
1926 }
1927
1928 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1929 if (TREE_CODE (inner2) == SSA_NAME
1930 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1931 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1932 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1933 gimple_assign_rhs1 (s),
1934 gimple_assign_rhs2 (s),
1935 code2, op2a, op2b)))
1936 {
1937 /* Handle the AND case, where we are reassociating:
1938 (inner1 AND inner2) AND (op2a code2 op2b)
1939 => (inner1 AND t) */
1940 if (is_and)
1941 {
1942 if (integer_onep (t))
1943 return inner1;
1944 else if (integer_zerop (t))
1945 return boolean_false_node;
8236c8eb
JJ
1946 /* If both are the same, we can apply the identity
1947 (x AND x) == x. */
1948 else if (partial && same_bool_result_p (t, partial))
1949 return t;
e89065a1
SL
1950 }
1951
1952 /* Handle the OR case. where we are redistributing:
1953 (inner1 OR inner2) AND (op2a code2 op2b)
1954 => (t OR (inner1 AND (op2a code2 op2b)))
1955 => (t OR partial) */
1956 else
1957 {
1958 if (integer_onep (t))
1959 return boolean_true_node;
1960 else if (partial)
1961 {
1962 /* We already got a simplification for the other
1963 operand to the redistributed OR expression. The
1964 interesting case is when at least one is false.
1965 Or, if both are the same, we can apply the identity
1966 (x OR x) == x. */
1967 if (integer_zerop (partial))
1968 return t;
1969 else if (integer_zerop (t))
1970 return partial;
1971 else if (same_bool_result_p (t, partial))
1972 return t;
1973 }
1974 }
1975 }
1976 }
1977 return NULL_TREE;
1978}
1979
1980/* Try to simplify the AND of two comparisons defined by
1981 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1982 If this can be done without constructing an intermediate value,
1983 return the resulting tree; otherwise NULL_TREE is returned.
1984 This function is deliberately asymmetric as it recurses on SSA_DEFs
1985 in the first comparison but not the second. */
1986
1987static tree
1988and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1989 enum tree_code code2, tree op2a, tree op2b)
1990{
1991 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1992 if (operand_equal_p (op1a, op2a, 0)
1993 && operand_equal_p (op1b, op2b, 0))
1994 {
1995 tree t = combine_comparisons (UNKNOWN_LOCATION,
1996 TRUTH_ANDIF_EXPR, code1, code2,
1997 boolean_type_node, op1a, op1b);
1998 if (t)
1999 return t;
2000 }
2001
2002 /* Likewise the swapped case of the above. */
2003 if (operand_equal_p (op1a, op2b, 0)
2004 && operand_equal_p (op1b, op2a, 0))
2005 {
2006 tree t = combine_comparisons (UNKNOWN_LOCATION,
2007 TRUTH_ANDIF_EXPR, code1,
2008 swap_tree_comparison (code2),
2009 boolean_type_node, op1a, op1b);
2010 if (t)
2011 return t;
2012 }
2013
2014 /* If both comparisons are of the same value against constants, we might
2015 be able to merge them. */
2016 if (operand_equal_p (op1a, op2a, 0)
2017 && TREE_CODE (op1b) == INTEGER_CST
2018 && TREE_CODE (op2b) == INTEGER_CST)
2019 {
2020 int cmp = tree_int_cst_compare (op1b, op2b);
2021
2022 /* If we have (op1a == op1b), we should either be able to
2023 return that or FALSE, depending on whether the constant op1b
2024 also satisfies the other comparison against op2b. */
2025 if (code1 == EQ_EXPR)
2026 {
2027 bool done = true;
2028 bool val;
2029 switch (code2)
2030 {
2031 case EQ_EXPR: val = (cmp == 0); break;
2032 case NE_EXPR: val = (cmp != 0); break;
2033 case LT_EXPR: val = (cmp < 0); break;
2034 case GT_EXPR: val = (cmp > 0); break;
2035 case LE_EXPR: val = (cmp <= 0); break;
2036 case GE_EXPR: val = (cmp >= 0); break;
2037 default: done = false;
2038 }
2039 if (done)
2040 {
2041 if (val)
2042 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2043 else
2044 return boolean_false_node;
2045 }
2046 }
2047 /* Likewise if the second comparison is an == comparison. */
2048 else if (code2 == EQ_EXPR)
2049 {
2050 bool done = true;
2051 bool val;
2052 switch (code1)
2053 {
2054 case EQ_EXPR: val = (cmp == 0); break;
2055 case NE_EXPR: val = (cmp != 0); break;
2056 case LT_EXPR: val = (cmp > 0); break;
2057 case GT_EXPR: val = (cmp < 0); break;
2058 case LE_EXPR: val = (cmp >= 0); break;
2059 case GE_EXPR: val = (cmp <= 0); break;
2060 default: done = false;
2061 }
2062 if (done)
2063 {
2064 if (val)
2065 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2066 else
2067 return boolean_false_node;
2068 }
2069 }
2070
2071 /* Same business with inequality tests. */
2072 else if (code1 == NE_EXPR)
2073 {
2074 bool val;
2075 switch (code2)
2076 {
2077 case EQ_EXPR: val = (cmp != 0); break;
2078 case NE_EXPR: val = (cmp == 0); break;
2079 case LT_EXPR: val = (cmp >= 0); break;
2080 case GT_EXPR: val = (cmp <= 0); break;
2081 case LE_EXPR: val = (cmp > 0); break;
2082 case GE_EXPR: val = (cmp < 0); break;
2083 default:
2084 val = false;
2085 }
2086 if (val)
2087 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2088 }
2089 else if (code2 == NE_EXPR)
2090 {
2091 bool val;
2092 switch (code1)
2093 {
2094 case EQ_EXPR: val = (cmp == 0); break;
2095 case NE_EXPR: val = (cmp != 0); break;
2096 case LT_EXPR: val = (cmp <= 0); break;
2097 case GT_EXPR: val = (cmp >= 0); break;
2098 case LE_EXPR: val = (cmp < 0); break;
2099 case GE_EXPR: val = (cmp > 0); break;
2100 default:
2101 val = false;
2102 }
2103 if (val)
2104 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2105 }
2106
2107 /* Chose the more restrictive of two < or <= comparisons. */
2108 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2109 && (code2 == LT_EXPR || code2 == LE_EXPR))
2110 {
2111 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2112 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2113 else
2114 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2115 }
2116
2117 /* Likewise chose the more restrictive of two > or >= comparisons. */
2118 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2119 && (code2 == GT_EXPR || code2 == GE_EXPR))
2120 {
2121 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2122 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2123 else
2124 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2125 }
2126
2127 /* Check for singleton ranges. */
2128 else if (cmp == 0
2129 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2130 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2131 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2132
2133 /* Check for disjoint ranges. */
2134 else if (cmp <= 0
2135 && (code1 == LT_EXPR || code1 == LE_EXPR)
2136 && (code2 == GT_EXPR || code2 == GE_EXPR))
2137 return boolean_false_node;
2138 else if (cmp >= 0
2139 && (code1 == GT_EXPR || code1 == GE_EXPR)
2140 && (code2 == LT_EXPR || code2 == LE_EXPR))
2141 return boolean_false_node;
2142 }
2143
2144 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2145 NAME's definition is a truth value. See if there are any simplifications
2146 that can be done against the NAME's definition. */
2147 if (TREE_CODE (op1a) == SSA_NAME
2148 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2149 && (integer_zerop (op1b) || integer_onep (op1b)))
2150 {
2151 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2152 || (code1 == NE_EXPR && integer_onep (op1b)));
2153 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2154 switch (gimple_code (stmt))
2155 {
2156 case GIMPLE_ASSIGN:
2157 /* Try to simplify by copy-propagating the definition. */
2158 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2159
2160 case GIMPLE_PHI:
2161 /* If every argument to the PHI produces the same result when
2162 ANDed with the second comparison, we win.
2163 Do not do this unless the type is bool since we need a bool
2164 result here anyway. */
2165 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2166 {
2167 tree result = NULL_TREE;
2168 unsigned i;
2169 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2170 {
2171 tree arg = gimple_phi_arg_def (stmt, i);
2172
2173 /* If this PHI has itself as an argument, ignore it.
2174 If all the other args produce the same result,
2175 we're still OK. */
2176 if (arg == gimple_phi_result (stmt))
2177 continue;
2178 else if (TREE_CODE (arg) == INTEGER_CST)
2179 {
2180 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2181 {
2182 if (!result)
2183 result = boolean_false_node;
2184 else if (!integer_zerop (result))
2185 return NULL_TREE;
2186 }
2187 else if (!result)
2188 result = fold_build2 (code2, boolean_type_node,
2189 op2a, op2b);
2190 else if (!same_bool_comparison_p (result,
2191 code2, op2a, op2b))
2192 return NULL_TREE;
2193 }
2194 else if (TREE_CODE (arg) == SSA_NAME)
2195 {
2196 tree temp = and_var_with_comparison (arg, invert,
2197 code2, op2a, op2b);
2198 if (!temp)
2199 return NULL_TREE;
2200 else if (!result)
2201 result = temp;
2202 else if (!same_bool_result_p (result, temp))
2203 return NULL_TREE;
2204 }
2205 else
2206 return NULL_TREE;
2207 }
2208 return result;
2209 }
2210
2211 default:
2212 break;
2213 }
2214 }
2215 return NULL_TREE;
2216}
2217
2218/* Try to simplify the AND of two comparisons, specified by
2219 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2220 If this can be simplified to a single expression (without requiring
2221 introducing more SSA variables to hold intermediate values),
2222 return the resulting tree. Otherwise return NULL_TREE.
2223 If the result expression is non-null, it has boolean type. */
2224
2225tree
2226maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2227 enum tree_code code2, tree op2a, tree op2b)
2228{
2229 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2230 if (t)
2231 return t;
2232 else
2233 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2234}
2235
2236/* Helper function for or_comparisons_1: try to simplify the OR of the
2237 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2238 If INVERT is true, invert the value of VAR before doing the OR.
2239 Return NULL_EXPR if we can't simplify this to a single expression. */
2240
2241static tree
2242or_var_with_comparison (tree var, bool invert,
2243 enum tree_code code2, tree op2a, tree op2b)
2244{
2245 tree t;
2246 gimple stmt = SSA_NAME_DEF_STMT (var);
2247
2248 /* We can only deal with variables whose definitions are assignments. */
2249 if (!is_gimple_assign (stmt))
2250 return NULL_TREE;
2251
2252 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2253 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2254 Then we only have to consider the simpler non-inverted cases. */
2255 if (invert)
2256 t = and_var_with_comparison_1 (stmt,
2257 invert_tree_comparison (code2, false),
2258 op2a, op2b);
2259 else
2260 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2261 return canonicalize_bool (t, invert);
2262}
2263
2264/* Try to simplify the OR of the ssa variable defined by the assignment
2265 STMT with the comparison specified by (OP2A CODE2 OP2B).
2266 Return NULL_EXPR if we can't simplify this to a single expression. */
2267
2268static tree
2269or_var_with_comparison_1 (gimple stmt,
2270 enum tree_code code2, tree op2a, tree op2b)
2271{
2272 tree var = gimple_assign_lhs (stmt);
2273 tree true_test_var = NULL_TREE;
2274 tree false_test_var = NULL_TREE;
2275 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2276
2277 /* Check for identities like (var OR (var != 0)) => true . */
2278 if (TREE_CODE (op2a) == SSA_NAME
2279 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2280 {
2281 if ((code2 == NE_EXPR && integer_zerop (op2b))
2282 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2283 {
2284 true_test_var = op2a;
2285 if (var == true_test_var)
2286 return var;
2287 }
2288 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2289 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2290 {
2291 false_test_var = op2a;
2292 if (var == false_test_var)
2293 return boolean_true_node;
2294 }
2295 }
2296
2297 /* If the definition is a comparison, recurse on it. */
2298 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2299 {
2300 tree t = or_comparisons_1 (innercode,
2301 gimple_assign_rhs1 (stmt),
2302 gimple_assign_rhs2 (stmt),
2303 code2,
2304 op2a,
2305 op2b);
2306 if (t)
2307 return t;
2308 }
2309
2310 /* If the definition is an AND or OR expression, we may be able to
2311 simplify by reassociating. */
2312 if (innercode == TRUTH_AND_EXPR
2313 || innercode == TRUTH_OR_EXPR
2314 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2315 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2316 {
2317 tree inner1 = gimple_assign_rhs1 (stmt);
2318 tree inner2 = gimple_assign_rhs2 (stmt);
2319 gimple s;
2320 tree t;
2321 tree partial = NULL_TREE;
2322 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2323
2324 /* Check for boolean identities that don't require recursive examination
2325 of inner1/inner2:
2326 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2327 inner1 OR (inner1 AND inner2) => inner1
2328 !inner1 OR (inner1 OR inner2) => true
2329 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2330 */
2331 if (inner1 == true_test_var)
2332 return (is_or ? var : inner1);
2333 else if (inner2 == true_test_var)
2334 return (is_or ? var : inner2);
2335 else if (inner1 == false_test_var)
2336 return (is_or
2337 ? boolean_true_node
2338 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2339 else if (inner2 == false_test_var)
2340 return (is_or
2341 ? boolean_true_node
2342 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2343
2344 /* Next, redistribute/reassociate the OR across the inner tests.
2345 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2346 if (TREE_CODE (inner1) == SSA_NAME
2347 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2348 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2349 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2350 gimple_assign_rhs1 (s),
2351 gimple_assign_rhs2 (s),
2352 code2, op2a, op2b)))
2353 {
2354 /* Handle the OR case, where we are reassociating:
2355 (inner1 OR inner2) OR (op2a code2 op2b)
2356 => (t OR inner2)
2357 If the partial result t is a constant, we win. Otherwise
2358 continue on to try reassociating with the other inner test. */
8236c8eb 2359 if (is_or)
e89065a1
SL
2360 {
2361 if (integer_onep (t))
2362 return boolean_true_node;
2363 else if (integer_zerop (t))
2364 return inner2;
2365 }
2366
2367 /* Handle the AND case, where we are redistributing:
2368 (inner1 AND inner2) OR (op2a code2 op2b)
2369 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
2370 else if (integer_zerop (t))
2371 return boolean_false_node;
2372
2373 /* Save partial result for later. */
2374 partial = t;
e89065a1
SL
2375 }
2376
2377 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2378 if (TREE_CODE (inner2) == SSA_NAME
2379 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2380 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2381 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2382 gimple_assign_rhs1 (s),
2383 gimple_assign_rhs2 (s),
2384 code2, op2a, op2b)))
2385 {
2386 /* Handle the OR case, where we are reassociating:
2387 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
2388 => (inner1 OR t)
2389 => (t OR partial) */
2390 if (is_or)
e89065a1
SL
2391 {
2392 if (integer_zerop (t))
2393 return inner1;
2394 else if (integer_onep (t))
2395 return boolean_true_node;
8236c8eb
JJ
2396 /* If both are the same, we can apply the identity
2397 (x OR x) == x. */
2398 else if (partial && same_bool_result_p (t, partial))
2399 return t;
e89065a1
SL
2400 }
2401
2402 /* Handle the AND case, where we are redistributing:
2403 (inner1 AND inner2) OR (op2a code2 op2b)
2404 => (t AND (inner1 OR (op2a code2 op2b)))
2405 => (t AND partial) */
2406 else
2407 {
2408 if (integer_zerop (t))
2409 return boolean_false_node;
2410 else if (partial)
2411 {
2412 /* We already got a simplification for the other
2413 operand to the redistributed AND expression. The
2414 interesting case is when at least one is true.
2415 Or, if both are the same, we can apply the identity
8236c8eb 2416 (x AND x) == x. */
e89065a1
SL
2417 if (integer_onep (partial))
2418 return t;
2419 else if (integer_onep (t))
2420 return partial;
2421 else if (same_bool_result_p (t, partial))
8236c8eb 2422 return t;
e89065a1
SL
2423 }
2424 }
2425 }
2426 }
2427 return NULL_TREE;
2428}
2429
2430/* Try to simplify the OR of two comparisons defined by
2431 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2432 If this can be done without constructing an intermediate value,
2433 return the resulting tree; otherwise NULL_TREE is returned.
2434 This function is deliberately asymmetric as it recurses on SSA_DEFs
2435 in the first comparison but not the second. */
2436
2437static tree
2438or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2439 enum tree_code code2, tree op2a, tree op2b)
2440{
2441 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2442 if (operand_equal_p (op1a, op2a, 0)
2443 && operand_equal_p (op1b, op2b, 0))
2444 {
2445 tree t = combine_comparisons (UNKNOWN_LOCATION,
2446 TRUTH_ORIF_EXPR, code1, code2,
2447 boolean_type_node, op1a, op1b);
2448 if (t)
2449 return t;
2450 }
2451
2452 /* Likewise the swapped case of the above. */
2453 if (operand_equal_p (op1a, op2b, 0)
2454 && operand_equal_p (op1b, op2a, 0))
2455 {
2456 tree t = combine_comparisons (UNKNOWN_LOCATION,
2457 TRUTH_ORIF_EXPR, code1,
2458 swap_tree_comparison (code2),
2459 boolean_type_node, op1a, op1b);
2460 if (t)
2461 return t;
2462 }
2463
2464 /* If both comparisons are of the same value against constants, we might
2465 be able to merge them. */
2466 if (operand_equal_p (op1a, op2a, 0)
2467 && TREE_CODE (op1b) == INTEGER_CST
2468 && TREE_CODE (op2b) == INTEGER_CST)
2469 {
2470 int cmp = tree_int_cst_compare (op1b, op2b);
2471
2472 /* If we have (op1a != op1b), we should either be able to
2473 return that or TRUE, depending on whether the constant op1b
2474 also satisfies the other comparison against op2b. */
2475 if (code1 == NE_EXPR)
2476 {
2477 bool done = true;
2478 bool val;
2479 switch (code2)
2480 {
2481 case EQ_EXPR: val = (cmp == 0); break;
2482 case NE_EXPR: val = (cmp != 0); break;
2483 case LT_EXPR: val = (cmp < 0); break;
2484 case GT_EXPR: val = (cmp > 0); break;
2485 case LE_EXPR: val = (cmp <= 0); break;
2486 case GE_EXPR: val = (cmp >= 0); break;
2487 default: done = false;
2488 }
2489 if (done)
2490 {
2491 if (val)
2492 return boolean_true_node;
2493 else
2494 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2495 }
2496 }
2497 /* Likewise if the second comparison is a != comparison. */
2498 else if (code2 == NE_EXPR)
2499 {
2500 bool done = true;
2501 bool val;
2502 switch (code1)
2503 {
2504 case EQ_EXPR: val = (cmp == 0); break;
2505 case NE_EXPR: val = (cmp != 0); break;
2506 case LT_EXPR: val = (cmp > 0); break;
2507 case GT_EXPR: val = (cmp < 0); break;
2508 case LE_EXPR: val = (cmp >= 0); break;
2509 case GE_EXPR: val = (cmp <= 0); break;
2510 default: done = false;
2511 }
2512 if (done)
2513 {
2514 if (val)
2515 return boolean_true_node;
2516 else
2517 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2518 }
2519 }
2520
2521 /* See if an equality test is redundant with the other comparison. */
2522 else if (code1 == EQ_EXPR)
2523 {
2524 bool val;
2525 switch (code2)
2526 {
2527 case EQ_EXPR: val = (cmp == 0); break;
2528 case NE_EXPR: val = (cmp != 0); break;
2529 case LT_EXPR: val = (cmp < 0); break;
2530 case GT_EXPR: val = (cmp > 0); break;
2531 case LE_EXPR: val = (cmp <= 0); break;
2532 case GE_EXPR: val = (cmp >= 0); break;
2533 default:
2534 val = false;
2535 }
2536 if (val)
2537 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2538 }
2539 else if (code2 == EQ_EXPR)
2540 {
2541 bool val;
2542 switch (code1)
2543 {
2544 case EQ_EXPR: val = (cmp == 0); break;
2545 case NE_EXPR: val = (cmp != 0); break;
2546 case LT_EXPR: val = (cmp > 0); break;
2547 case GT_EXPR: val = (cmp < 0); break;
2548 case LE_EXPR: val = (cmp >= 0); break;
2549 case GE_EXPR: val = (cmp <= 0); break;
2550 default:
2551 val = false;
2552 }
2553 if (val)
2554 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2555 }
2556
2557 /* Chose the less restrictive of two < or <= comparisons. */
2558 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2559 && (code2 == LT_EXPR || code2 == LE_EXPR))
2560 {
2561 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2562 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2563 else
2564 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2565 }
2566
2567 /* Likewise chose the less restrictive of two > or >= comparisons. */
2568 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2569 && (code2 == GT_EXPR || code2 == GE_EXPR))
2570 {
2571 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2572 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2573 else
2574 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2575 }
2576
2577 /* Check for singleton ranges. */
2578 else if (cmp == 0
2579 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2580 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2581 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2582
2583 /* Check for less/greater pairs that don't restrict the range at all. */
2584 else if (cmp >= 0
2585 && (code1 == LT_EXPR || code1 == LE_EXPR)
2586 && (code2 == GT_EXPR || code2 == GE_EXPR))
2587 return boolean_true_node;
2588 else if (cmp <= 0
2589 && (code1 == GT_EXPR || code1 == GE_EXPR)
2590 && (code2 == LT_EXPR || code2 == LE_EXPR))
2591 return boolean_true_node;
2592 }
2593
2594 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2595 NAME's definition is a truth value. See if there are any simplifications
2596 that can be done against the NAME's definition. */
2597 if (TREE_CODE (op1a) == SSA_NAME
2598 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2599 && (integer_zerop (op1b) || integer_onep (op1b)))
2600 {
2601 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2602 || (code1 == NE_EXPR && integer_onep (op1b)));
2603 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2604 switch (gimple_code (stmt))
2605 {
2606 case GIMPLE_ASSIGN:
2607 /* Try to simplify by copy-propagating the definition. */
2608 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2609
2610 case GIMPLE_PHI:
2611 /* If every argument to the PHI produces the same result when
2612 ORed with the second comparison, we win.
2613 Do not do this unless the type is bool since we need a bool
2614 result here anyway. */
2615 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2616 {
2617 tree result = NULL_TREE;
2618 unsigned i;
2619 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2620 {
2621 tree arg = gimple_phi_arg_def (stmt, i);
2622
2623 /* If this PHI has itself as an argument, ignore it.
2624 If all the other args produce the same result,
2625 we're still OK. */
2626 if (arg == gimple_phi_result (stmt))
2627 continue;
2628 else if (TREE_CODE (arg) == INTEGER_CST)
2629 {
2630 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2631 {
2632 if (!result)
2633 result = boolean_true_node;
2634 else if (!integer_onep (result))
2635 return NULL_TREE;
2636 }
2637 else if (!result)
2638 result = fold_build2 (code2, boolean_type_node,
2639 op2a, op2b);
2640 else if (!same_bool_comparison_p (result,
2641 code2, op2a, op2b))
2642 return NULL_TREE;
2643 }
2644 else if (TREE_CODE (arg) == SSA_NAME)
2645 {
2646 tree temp = or_var_with_comparison (arg, invert,
2647 code2, op2a, op2b);
2648 if (!temp)
2649 return NULL_TREE;
2650 else if (!result)
2651 result = temp;
2652 else if (!same_bool_result_p (result, temp))
2653 return NULL_TREE;
2654 }
2655 else
2656 return NULL_TREE;
2657 }
2658 return result;
2659 }
2660
2661 default:
2662 break;
2663 }
2664 }
2665 return NULL_TREE;
2666}
2667
2668/* Try to simplify the OR of two comparisons, specified by
2669 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2670 If this can be simplified to a single expression (without requiring
2671 introducing more SSA variables to hold intermediate values),
2672 return the resulting tree. Otherwise return NULL_TREE.
2673 If the result expression is non-null, it has boolean type. */
2674
2675tree
2676maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2677 enum tree_code code2, tree op2a, tree op2b)
2678{
2679 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2680 if (t)
2681 return t;
2682 else
2683 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2684}
cfef45c8
RG
2685
2686
2687/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2688
2689 Either NULL_TREE, a simplified but non-constant or a constant
2690 is returned.
2691
2692 ??? This should go into a gimple-fold-inline.h file to be eventually
2693 privatized with the single valueize function used in the various TUs
2694 to avoid the indirect function call overhead. */
2695
2696tree
2697gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2698{
2699 location_t loc = gimple_location (stmt);
2700 switch (gimple_code (stmt))
2701 {
2702 case GIMPLE_ASSIGN:
2703 {
2704 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2705
2706 switch (get_gimple_rhs_class (subcode))
2707 {
2708 case GIMPLE_SINGLE_RHS:
2709 {
2710 tree rhs = gimple_assign_rhs1 (stmt);
2711 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2712
2713 if (TREE_CODE (rhs) == SSA_NAME)
2714 {
2715 /* If the RHS is an SSA_NAME, return its known constant value,
2716 if any. */
2717 return (*valueize) (rhs);
2718 }
2719 /* Handle propagating invariant addresses into address
2720 operations. */
2721 else if (TREE_CODE (rhs) == ADDR_EXPR
2722 && !is_gimple_min_invariant (rhs))
2723 {
2724 HOST_WIDE_INT offset;
2725 tree base;
2726 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2727 &offset,
2728 valueize);
2729 if (base
2730 && (CONSTANT_CLASS_P (base)
2731 || decl_address_invariant_p (base)))
2732 return build_invariant_address (TREE_TYPE (rhs),
2733 base, offset);
2734 }
2735 else if (TREE_CODE (rhs) == CONSTRUCTOR
2736 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2737 && (CONSTRUCTOR_NELTS (rhs)
2738 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2739 {
2740 unsigned i;
2741 tree val, list;
2742
2743 list = NULL_TREE;
2744 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2745 {
2746 val = (*valueize) (val);
2747 if (TREE_CODE (val) == INTEGER_CST
2748 || TREE_CODE (val) == REAL_CST
2749 || TREE_CODE (val) == FIXED_CST)
2750 list = tree_cons (NULL_TREE, val, list);
2751 else
2752 return NULL_TREE;
2753 }
2754
2755 return build_vector (TREE_TYPE (rhs), nreverse (list));
2756 }
2757
2758 if (kind == tcc_reference)
2759 {
2760 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2761 || TREE_CODE (rhs) == REALPART_EXPR
2762 || TREE_CODE (rhs) == IMAGPART_EXPR)
2763 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2764 {
2765 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2766 return fold_unary_loc (EXPR_LOCATION (rhs),
2767 TREE_CODE (rhs),
2768 TREE_TYPE (rhs), val);
2769 }
2770 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2771 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2772 {
2773 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2774 return fold_ternary_loc (EXPR_LOCATION (rhs),
2775 TREE_CODE (rhs),
2776 TREE_TYPE (rhs), val,
2777 TREE_OPERAND (rhs, 1),
2778 TREE_OPERAND (rhs, 2));
2779 }
2780 else if (TREE_CODE (rhs) == MEM_REF
2781 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2782 {
2783 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2784 if (TREE_CODE (val) == ADDR_EXPR
2785 && is_gimple_min_invariant (val))
2786 {
2787 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2788 unshare_expr (val),
2789 TREE_OPERAND (rhs, 1));
2790 if (tem)
2791 rhs = tem;
2792 }
2793 }
2794 return fold_const_aggregate_ref_1 (rhs, valueize);
2795 }
2796 else if (kind == tcc_declaration)
2797 return get_symbol_constant_value (rhs);
2798 return rhs;
2799 }
2800
2801 case GIMPLE_UNARY_RHS:
2802 {
2803 /* Handle unary operators that can appear in GIMPLE form.
2804 Note that we know the single operand must be a constant,
2805 so this should almost always return a simplified RHS. */
2806 tree lhs = gimple_assign_lhs (stmt);
2807 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2808
2809 /* Conversions are useless for CCP purposes if they are
2810 value-preserving. Thus the restrictions that
2811 useless_type_conversion_p places for pointer type conversions
2812 do not apply here. Substitution later will only substitute to
2813 allowed places. */
2814 if (CONVERT_EXPR_CODE_P (subcode)
2815 && POINTER_TYPE_P (TREE_TYPE (lhs))
2816 && POINTER_TYPE_P (TREE_TYPE (op0)))
2817 {
2818 tree tem;
2819 /* Try to re-construct array references on-the-fly. */
2820 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2821 TREE_TYPE (op0))
2822 && ((tem = maybe_fold_offset_to_address
2823 (loc,
2824 op0, integer_zero_node, TREE_TYPE (lhs)))
2825 != NULL_TREE))
2826 return tem;
2827 return op0;
2828 }
2829
2830 return
2831 fold_unary_ignore_overflow_loc (loc, subcode,
2832 gimple_expr_type (stmt), op0);
2833 }
2834
2835 case GIMPLE_BINARY_RHS:
2836 {
2837 /* Handle binary operators that can appear in GIMPLE form. */
2838 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2839 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2840
2841 /* Translate &x + CST into an invariant form suitable for
2842 further propagation. */
2843 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2844 && TREE_CODE (op0) == ADDR_EXPR
2845 && TREE_CODE (op1) == INTEGER_CST)
2846 {
2847 tree off = fold_convert (ptr_type_node, op1);
2848 return build_fold_addr_expr
2849 (fold_build2 (MEM_REF,
2850 TREE_TYPE (TREE_TYPE (op0)),
2851 unshare_expr (op0), off));
2852 }
2853
2854 return fold_binary_loc (loc, subcode,
2855 gimple_expr_type (stmt), op0, op1);
2856 }
2857
2858 case GIMPLE_TERNARY_RHS:
2859 {
2860 /* Handle ternary operators that can appear in GIMPLE form. */
2861 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2862 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2863 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2864
2865 return fold_ternary_loc (loc, subcode,
2866 gimple_expr_type (stmt), op0, op1, op2);
2867 }
2868
2869 default:
2870 gcc_unreachable ();
2871 }
2872 }
2873
2874 case GIMPLE_CALL:
2875 {
2876 tree fn = (*valueize) (gimple_call_fn (stmt));
2877 if (TREE_CODE (fn) == ADDR_EXPR
2878 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2879 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2880 {
2881 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2882 tree call, retval;
2883 unsigned i;
2884 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2885 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2886 call = build_call_array_loc (loc,
2887 gimple_call_return_type (stmt),
2888 fn, gimple_call_num_args (stmt), args);
2889 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2890 if (retval)
2891 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2892 STRIP_NOPS (retval);
2893 return retval;
2894 }
2895 return NULL_TREE;
2896 }
2897
2898 default:
2899 return NULL_TREE;
2900 }
2901}
2902
2903/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2904 Returns NULL_TREE if folding to a constant is not possible, otherwise
2905 returns a constant according to is_gimple_min_invariant. */
2906
2907tree
2908gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2909{
2910 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2911 if (res && is_gimple_min_invariant (res))
2912 return res;
2913 return NULL_TREE;
2914}
2915
2916
2917/* The following set of functions are supposed to fold references using
2918 their constant initializers. */
2919
2920static tree fold_ctor_reference (tree type, tree ctor,
2921 unsigned HOST_WIDE_INT offset,
2922 unsigned HOST_WIDE_INT size);
2923
2924/* See if we can find constructor defining value of BASE.
2925 When we know the consructor with constant offset (such as
2926 base is array[40] and we do know constructor of array), then
2927 BIT_OFFSET is adjusted accordingly.
2928
2929 As a special case, return error_mark_node when constructor
2930 is not explicitly available, but it is known to be zero
2931 such as 'static const int a;'. */
2932static tree
2933get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2934 tree (*valueize)(tree))
2935{
2936 HOST_WIDE_INT bit_offset2, size, max_size;
2937 if (TREE_CODE (base) == MEM_REF)
2938 {
2939 if (!integer_zerop (TREE_OPERAND (base, 1)))
2940 {
2941 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2942 return NULL_TREE;
2943 *bit_offset += (mem_ref_offset (base).low
2944 * BITS_PER_UNIT);
2945 }
2946
2947 if (valueize
2948 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2949 base = valueize (TREE_OPERAND (base, 0));
2950 if (!base || TREE_CODE (base) != ADDR_EXPR)
2951 return NULL_TREE;
2952 base = TREE_OPERAND (base, 0);
2953 }
2954
2955 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2956 DECL_INITIAL. If BASE is a nested reference into another
2957 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2958 the inner reference. */
2959 switch (TREE_CODE (base))
2960 {
2961 case VAR_DECL:
2962 if (!const_value_known_p (base))
2963 return NULL_TREE;
2964
2965 /* Fallthru. */
2966 case CONST_DECL:
2967 if (!DECL_INITIAL (base)
2968 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2969 return error_mark_node;
2970 return DECL_INITIAL (base);
2971
2972 case ARRAY_REF:
2973 case COMPONENT_REF:
2974 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2975 if (max_size == -1 || size != max_size)
2976 return NULL_TREE;
2977 *bit_offset += bit_offset2;
2978 return get_base_constructor (base, bit_offset, valueize);
2979
2980 case STRING_CST:
2981 case CONSTRUCTOR:
2982 return base;
2983
2984 default:
2985 return NULL_TREE;
2986 }
2987}
2988
2989/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2990 to the memory at bit OFFSET.
2991
2992 We do only simple job of folding byte accesses. */
2993
2994static tree
2995fold_string_cst_ctor_reference (tree type, tree ctor,
2996 unsigned HOST_WIDE_INT offset,
2997 unsigned HOST_WIDE_INT size)
2998{
2999 if (INTEGRAL_TYPE_P (type)
3000 && (TYPE_MODE (type)
3001 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3002 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3003 == MODE_INT)
3004 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3005 && size == BITS_PER_UNIT
3006 && !(offset % BITS_PER_UNIT))
3007 {
3008 offset /= BITS_PER_UNIT;
3009 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3010 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3011 [offset]));
3012 /* Folding
3013 const char a[20]="hello";
3014 return a[10];
3015
3016 might lead to offset greater than string length. In this case we
3017 know value is either initialized to 0 or out of bounds. Return 0
3018 in both cases. */
3019 return build_zero_cst (type);
3020 }
3021 return NULL_TREE;
3022}
3023
3024/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3025 SIZE to the memory at bit OFFSET. */
3026
3027static tree
3028fold_array_ctor_reference (tree type, tree ctor,
3029 unsigned HOST_WIDE_INT offset,
3030 unsigned HOST_WIDE_INT size)
3031{
3032 unsigned HOST_WIDE_INT cnt;
3033 tree cfield, cval;
3034 double_int low_bound, elt_size;
3035 double_int index, max_index;
3036 double_int access_index;
3037 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3038 HOST_WIDE_INT inner_offset;
3039
3040 /* Compute low bound and elt size. */
3041 if (domain_type && TYPE_MIN_VALUE (domain_type))
3042 {
3043 /* Static constructors for variably sized objects makes no sense. */
3044 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3045 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3046 }
3047 else
3048 low_bound = double_int_zero;
3049 /* Static constructors for variably sized objects makes no sense. */
3050 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3051 == INTEGER_CST);
3052 elt_size =
3053 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3054
3055
3056 /* We can handle only constantly sized accesses that are known to not
3057 be larger than size of array element. */
3058 if (!TYPE_SIZE_UNIT (type)
3059 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3060 || double_int_cmp (elt_size,
3061 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3062 return NULL_TREE;
3063
3064 /* Compute the array index we look for. */
3065 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3066 elt_size, TRUNC_DIV_EXPR);
3067 access_index = double_int_add (access_index, low_bound);
3068
3069 /* And offset within the access. */
3070 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3071
3072 /* See if the array field is large enough to span whole access. We do not
3073 care to fold accesses spanning multiple array indexes. */
3074 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3075 return NULL_TREE;
3076
3077 index = double_int_sub (low_bound, double_int_one);
3078 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3079 {
3080 /* Array constructor might explicitely set index, or specify range
3081 or leave index NULL meaning that it is next index after previous
3082 one. */
3083 if (cfield)
3084 {
3085 if (TREE_CODE (cfield) == INTEGER_CST)
3086 max_index = index = tree_to_double_int (cfield);
3087 else
3088 {
3089 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3090 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3091 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3092 }
3093 }
3094 else
3095 max_index = index = double_int_add (index, double_int_one);
3096
3097 /* Do we have match? */
3098 if (double_int_cmp (access_index, index, 1) >= 0
3099 && double_int_cmp (access_index, max_index, 1) <= 0)
3100 return fold_ctor_reference (type, cval, inner_offset, size);
3101 }
3102 /* When memory is not explicitely mentioned in constructor,
3103 it is 0 (or out of range). */
3104 return build_zero_cst (type);
3105}
3106
3107/* CTOR is CONSTRUCTOR of an aggregate or vector.
3108 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3109
3110static tree
3111fold_nonarray_ctor_reference (tree type, tree ctor,
3112 unsigned HOST_WIDE_INT offset,
3113 unsigned HOST_WIDE_INT size)
3114{
3115 unsigned HOST_WIDE_INT cnt;
3116 tree cfield, cval;
3117
3118 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3119 cval)
3120 {
3121 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3122 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3123 tree field_size = DECL_SIZE (cfield);
3124 double_int bitoffset;
3125 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3126 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3127 double_int bitoffset_end;
3128
3129 /* Variable sized objects in static constructors makes no sense,
3130 but field_size can be NULL for flexible array members. */
3131 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3132 && TREE_CODE (byte_offset) == INTEGER_CST
3133 && (field_size != NULL_TREE
3134 ? TREE_CODE (field_size) == INTEGER_CST
3135 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3136
3137 /* Compute bit offset of the field. */
3138 bitoffset = double_int_add (tree_to_double_int (field_offset),
3139 double_int_mul (byte_offset_cst,
3140 bits_per_unit_cst));
3141 /* Compute bit offset where the field ends. */
3142 if (field_size != NULL_TREE)
3143 bitoffset_end = double_int_add (bitoffset,
3144 tree_to_double_int (field_size));
3145 else
3146 bitoffset_end = double_int_zero;
3147
3148 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3149 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3150 && (field_size == NULL_TREE
3151 || double_int_cmp (uhwi_to_double_int (offset),
3152 bitoffset_end, 0) < 0))
3153 {
3154 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3155 uhwi_to_double_int (size));
3156 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3157 bitoffset);
3158 /* We do have overlap. Now see if field is large enough to
3159 cover the access. Give up for accesses spanning multiple
3160 fields. */
3161 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3162 return NULL_TREE;
3163 return fold_ctor_reference (type, cval,
3164 double_int_to_uhwi (inner_offset), size);
3165 }
3166 }
3167 /* When memory is not explicitely mentioned in constructor, it is 0. */
3168 return build_zero_cst (type);
3169}
3170
3171/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3172 to the memory at bit OFFSET. */
3173
3174static tree
3175fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3176 unsigned HOST_WIDE_INT size)
3177{
3178 tree ret;
3179
3180 /* We found the field with exact match. */
3181 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3182 && !offset)
3183 return canonicalize_constructor_val (ctor);
3184
3185 /* We are at the end of walk, see if we can view convert the
3186 result. */
3187 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3188 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3189 && operand_equal_p (TYPE_SIZE (type),
3190 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3191 {
3192 ret = canonicalize_constructor_val (ctor);
3193 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3194 if (ret)
3195 STRIP_NOPS (ret);
3196 return ret;
3197 }
3198 if (TREE_CODE (ctor) == STRING_CST)
3199 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3200 if (TREE_CODE (ctor) == CONSTRUCTOR)
3201 {
3202
3203 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3204 return fold_array_ctor_reference (type, ctor, offset, size);
3205 else
3206 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3207 }
3208
3209 return NULL_TREE;
3210}
3211
3212/* Return the tree representing the element referenced by T if T is an
3213 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3214 names using VALUEIZE. Return NULL_TREE otherwise. */
3215
3216tree
3217fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3218{
3219 tree ctor, idx, base;
3220 HOST_WIDE_INT offset, size, max_size;
3221 tree tem;
3222
3223 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3224 return get_symbol_constant_value (t);
3225
3226 tem = fold_read_from_constant_string (t);
3227 if (tem)
3228 return tem;
3229
3230 switch (TREE_CODE (t))
3231 {
3232 case ARRAY_REF:
3233 case ARRAY_RANGE_REF:
3234 /* Constant indexes are handled well by get_base_constructor.
3235 Only special case variable offsets.
3236 FIXME: This code can't handle nested references with variable indexes
3237 (they will be handled only by iteration of ccp). Perhaps we can bring
3238 get_ref_base_and_extent here and make it use a valueize callback. */
3239 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3240 && valueize
3241 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3242 && host_integerp (idx, 0))
3243 {
3244 tree low_bound, unit_size;
3245
3246 /* If the resulting bit-offset is constant, track it. */
3247 if ((low_bound = array_ref_low_bound (t),
3248 host_integerp (low_bound, 0))
3249 && (unit_size = array_ref_element_size (t),
3250 host_integerp (unit_size, 1)))
3251 {
3252 offset = TREE_INT_CST_LOW (idx);
3253 offset -= TREE_INT_CST_LOW (low_bound);
3254 offset *= TREE_INT_CST_LOW (unit_size);
3255 offset *= BITS_PER_UNIT;
3256
3257 base = TREE_OPERAND (t, 0);
3258 ctor = get_base_constructor (base, &offset, valueize);
3259 /* Empty constructor. Always fold to 0. */
3260 if (ctor == error_mark_node)
3261 return build_zero_cst (TREE_TYPE (t));
3262 /* Out of bound array access. Value is undefined,
3263 but don't fold. */
3264 if (offset < 0)
3265 return NULL_TREE;
3266 /* We can not determine ctor. */
3267 if (!ctor)
3268 return NULL_TREE;
3269 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3270 TREE_INT_CST_LOW (unit_size)
3271 * BITS_PER_UNIT);
3272 }
3273 }
3274 /* Fallthru. */
3275
3276 case COMPONENT_REF:
3277 case BIT_FIELD_REF:
3278 case TARGET_MEM_REF:
3279 case MEM_REF:
3280 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3281 ctor = get_base_constructor (base, &offset, valueize);
3282
3283 /* Empty constructor. Always fold to 0. */
3284 if (ctor == error_mark_node)
3285 return build_zero_cst (TREE_TYPE (t));
3286 /* We do not know precise address. */
3287 if (max_size == -1 || max_size != size)
3288 return NULL_TREE;
3289 /* We can not determine ctor. */
3290 if (!ctor)
3291 return NULL_TREE;
3292
3293 /* Out of bound array access. Value is undefined, but don't fold. */
3294 if (offset < 0)
3295 return NULL_TREE;
3296
3297 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3298
3299 case REALPART_EXPR:
3300 case IMAGPART_EXPR:
3301 {
3302 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3303 if (c && TREE_CODE (c) == COMPLEX_CST)
3304 return fold_build1_loc (EXPR_LOCATION (t),
3305 TREE_CODE (t), TREE_TYPE (t), c);
3306 break;
3307 }
3308
3309 default:
3310 break;
3311 }
3312
3313 return NULL_TREE;
3314}
3315
3316tree
3317fold_const_aggregate_ref (tree t)
3318{
3319 return fold_const_aggregate_ref_1 (t, NULL);
3320}