]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
re PR fortran/48706 (Type extension inside subroutine)
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
6c66f733 2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
cbdd87d4 27#include "function.h"
cbdd87d4
RG
28#include "tree-dump.h"
29#include "tree-flow.h"
30#include "tree-pass.h"
31#include "tree-ssa-propagate.h"
cbdd87d4 32#include "target.h"
cfef45c8 33#include "gimple-fold.h"
cbdd87d4 34
b3b9f3d0
JH
35/* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
1389294c 38
1389294c
JH
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
1389294c 56static bool
b3b9f3d0 57can_refer_decl_in_current_unit_p (tree decl)
1389294c
JH
58{
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
b3b9f3d0
JH
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
1389294c
JH
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
b3b9f3d0
JH
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
1389294c
JH
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
b3b9f3d0 74 return false;
1389294c 75 }
b3b9f3d0
JH
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
2e9bb6ba
JH
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
b3b9f3d0
JH
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
1389294c
JH
92 if (TREE_CODE (decl) == FUNCTION_DECL)
93 {
94 node = cgraph_get_node (decl);
b3b9f3d0
JH
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
1389294c
JH
102 }
103 else if (TREE_CODE (decl) == VAR_DECL)
104 {
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
b3b9f3d0 107 return false;
1389294c 108 }
b3b9f3d0 109 return true;
1389294c
JH
110}
111
0038d4e0 112/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
17f39a39
JH
113 acceptable form for is_gimple_min_invariant. */
114
115tree
116canonicalize_constructor_val (tree cval)
117{
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR)
120 {
121 tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval),
122 TREE_OPERAND (cval, 0),
123 TREE_OPERAND (cval, 1),
124 TREE_TYPE (cval));
125 if (t)
126 cval = t;
127 }
128 if (TREE_CODE (cval) == ADDR_EXPR)
129 {
130 tree base = get_base_address (TREE_OPERAND (cval, 0));
b3b9f3d0 131
1389294c
JH
132 if (base
133 && (TREE_CODE (base) == VAR_DECL
134 || TREE_CODE (base) == FUNCTION_DECL)
b3b9f3d0 135 && !can_refer_decl_in_current_unit_p (base))
1389294c 136 return NULL_TREE;
0038d4e0 137 if (cfun && base && TREE_CODE (base) == VAR_DECL)
17f39a39 138 add_referenced_var (base);
0038d4e0 139 /* Fixup types in global initializers. */
73aef89e
RG
140 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
141 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
17f39a39
JH
142 }
143 return cval;
144}
cbdd87d4
RG
145
146/* If SYM is a constant variable with known value, return the value.
147 NULL_TREE is returned otherwise. */
148
149tree
150get_symbol_constant_value (tree sym)
151{
64e0f5ff 152 if (const_value_known_p (sym))
cbdd87d4
RG
153 {
154 tree val = DECL_INITIAL (sym);
155 if (val)
156 {
17f39a39 157 val = canonicalize_constructor_val (val);
1389294c 158 if (val && is_gimple_min_invariant (val))
17f39a39 159 return val;
1389294c
JH
160 else
161 return NULL_TREE;
cbdd87d4
RG
162 }
163 /* Variables declared 'const' without an initializer
164 have zero as the initializer if they may not be
165 overridden at link or run time. */
166 if (!val
cbdd87d4
RG
167 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
168 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 169 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
170 }
171
172 return NULL_TREE;
173}
174
175
176/* Return true if we may propagate the address expression ADDR into the
177 dereference DEREF and cancel them. */
178
179bool
180may_propagate_address_into_dereference (tree addr, tree deref)
181{
70f34814 182 gcc_assert (TREE_CODE (deref) == MEM_REF
cbdd87d4
RG
183 && TREE_CODE (addr) == ADDR_EXPR);
184
185 /* Don't propagate if ADDR's operand has incomplete type. */
186 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
187 return false;
188
189 /* If the address is invariant then we do not need to preserve restrict
190 qualifications. But we do need to preserve volatile qualifiers until
191 we can annotate the folded dereference itself properly. */
192 if (is_gimple_min_invariant (addr)
193 && (!TREE_THIS_VOLATILE (deref)
194 || TYPE_VOLATILE (TREE_TYPE (addr))))
195 return useless_type_conversion_p (TREE_TYPE (deref),
196 TREE_TYPE (TREE_OPERAND (addr, 0)));
197
198 /* Else both the address substitution and the folding must result in
199 a valid useless type conversion sequence. */
200 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
201 TREE_TYPE (addr))
202 && useless_type_conversion_p (TREE_TYPE (deref),
203 TREE_TYPE (TREE_OPERAND (addr, 0))));
204}
205
206
207/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
70f34814 208 BASE is an array type. OFFSET is a byte displacement.
cbdd87d4
RG
209
210 LOC is the location of the original expression. */
211
212static tree
70f34814 213maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
cbdd87d4
RG
214{
215 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
216 tree array_type, elt_type, elt_size;
217 tree domain_type;
218
219 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
220 measured in units of the size of elements type) from that ARRAY_REF).
221 We can't do anything if either is variable.
222
223 The case we handle here is *(&A[N]+O). */
224 if (TREE_CODE (base) == ARRAY_REF)
225 {
226 tree low_bound = array_ref_low_bound (base);
227
228 elt_offset = TREE_OPERAND (base, 1);
229 if (TREE_CODE (low_bound) != INTEGER_CST
230 || TREE_CODE (elt_offset) != INTEGER_CST)
231 return NULL_TREE;
232
d35936ab 233 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound);
cbdd87d4
RG
234 base = TREE_OPERAND (base, 0);
235 }
236
237 /* Ignore stupid user tricks of indexing non-array variables. */
238 array_type = TREE_TYPE (base);
239 if (TREE_CODE (array_type) != ARRAY_TYPE)
240 return NULL_TREE;
241 elt_type = TREE_TYPE (array_type);
cbdd87d4
RG
242
243 /* Use signed size type for intermediate computation on the index. */
3b9e5d95 244 idx_type = ssizetype;
cbdd87d4
RG
245
246 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
247 element type (so we can use the alignment if it's not constant).
248 Otherwise, compute the offset as an index by using a division. If the
249 division isn't exact, then don't do anything. */
250 elt_size = TYPE_SIZE_UNIT (elt_type);
251 if (!elt_size)
252 return NULL;
253 if (integer_zerop (offset))
254 {
255 if (TREE_CODE (elt_size) != INTEGER_CST)
256 elt_size = size_int (TYPE_ALIGN (elt_type));
257
258 idx = build_int_cst (idx_type, 0);
259 }
260 else
261 {
262 unsigned HOST_WIDE_INT lquo, lrem;
263 HOST_WIDE_INT hquo, hrem;
264 double_int soffset;
265
266 /* The final array offset should be signed, so we need
267 to sign-extend the (possibly pointer) offset here
268 and use signed division. */
269 soffset = double_int_sext (tree_to_double_int (offset),
270 TYPE_PRECISION (TREE_TYPE (offset)));
271 if (TREE_CODE (elt_size) != INTEGER_CST
272 || div_and_round_double (TRUNC_DIV_EXPR, 0,
273 soffset.low, soffset.high,
274 TREE_INT_CST_LOW (elt_size),
275 TREE_INT_CST_HIGH (elt_size),
276 &lquo, &hquo, &lrem, &hrem)
277 || lrem || hrem)
278 return NULL_TREE;
279
280 idx = build_int_cst_wide (idx_type, lquo, hquo);
281 }
282
283 /* Assume the low bound is zero. If there is a domain type, get the
284 low bound, if any, convert the index into that type, and add the
285 low bound. */
286 min_idx = build_int_cst (idx_type, 0);
287 domain_type = TYPE_DOMAIN (array_type);
288 if (domain_type)
289 {
290 idx_type = domain_type;
291 if (TYPE_MIN_VALUE (idx_type))
292 min_idx = TYPE_MIN_VALUE (idx_type);
293 else
294 min_idx = fold_convert (idx_type, min_idx);
295
296 if (TREE_CODE (min_idx) != INTEGER_CST)
297 return NULL_TREE;
298
299 elt_offset = fold_convert (idx_type, elt_offset);
300 }
301
302 if (!integer_zerop (min_idx))
d35936ab 303 idx = int_const_binop (PLUS_EXPR, idx, min_idx);
cbdd87d4 304 if (!integer_zerop (elt_offset))
d35936ab 305 idx = int_const_binop (PLUS_EXPR, idx, elt_offset);
cbdd87d4
RG
306
307 /* Make sure to possibly truncate late after offsetting. */
308 idx = fold_convert (idx_type, idx);
309
310 /* We don't want to construct access past array bounds. For example
311 char *(c[4]);
312 c[3][2];
313 should not be simplified into (*c)[14] or tree-vrp will
70f34814
RG
314 give false warnings.
315 This is only an issue for multi-dimensional arrays. */
316 if (TREE_CODE (elt_type) == ARRAY_TYPE
317 && domain_type)
cbdd87d4 318 {
70f34814
RG
319 if (TYPE_MAX_VALUE (domain_type)
320 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
321 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
cbdd87d4 322 return NULL_TREE;
70f34814
RG
323 else if (TYPE_MIN_VALUE (domain_type)
324 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
325 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
326 return NULL_TREE;
327 else if (compare_tree_int (idx, 0) < 0)
cbdd87d4
RG
328 return NULL_TREE;
329 }
cbdd87d4
RG
330
331 {
332 tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
333 SET_EXPR_LOCATION (t, loc);
334 return t;
335 }
336}
337
338
70f34814 339/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
cbdd87d4
RG
340 LOC is the location of original expression.
341
70f34814 342 Before attempting the conversion strip off existing ADDR_EXPRs. */
cbdd87d4
RG
343
344tree
345maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
346 tree orig_type)
347{
348 tree ret;
cbdd87d4
RG
349
350 STRIP_NOPS (base);
351 if (TREE_CODE (base) != ADDR_EXPR)
352 return NULL_TREE;
353
354 base = TREE_OPERAND (base, 0);
70f34814 355 if (types_compatible_p (orig_type, TREE_TYPE (base))
cbdd87d4
RG
356 && integer_zerop (offset))
357 return base;
cbdd87d4 358
70f34814
RG
359 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
360 if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
361 return ret;
362 return NULL_TREE;
cbdd87d4
RG
363}
364
70f34814
RG
365/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
366 LOC is the location of the original expression. */
cbdd87d4
RG
367
368tree
369maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
370 tree orig_type)
371{
70f34814 372 tree base, ret;
cbdd87d4 373
70f34814
RG
374 STRIP_NOPS (addr);
375 if (TREE_CODE (addr) != ADDR_EXPR)
376 return NULL_TREE;
377 base = TREE_OPERAND (addr, 0);
378 ret = maybe_fold_offset_to_array_ref (loc, base, offset);
379 if (ret)
cbdd87d4 380 {
70f34814
RG
381 ret = build_fold_addr_expr (ret);
382 if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
cbdd87d4 383 return NULL_TREE;
70f34814 384 SET_EXPR_LOCATION (ret, loc);
cbdd87d4
RG
385 }
386
70f34814 387 return ret;
cbdd87d4
RG
388}
389
390
391/* A quaint feature extant in our address arithmetic is that there
392 can be hidden type changes here. The type of the result need
393 not be the same as the type of the input pointer.
394
395 What we're after here is an expression of the form
396 (T *)(&array + const)
397 where array is OP0, const is OP1, RES_TYPE is T and
398 the cast doesn't actually exist, but is implicit in the
399 type of the POINTER_PLUS_EXPR. We'd like to turn this into
400 &array[x]
401 which may be able to propagate further. */
402
403tree
404maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
405{
406 tree ptd_type;
407 tree t;
408
409 /* The first operand should be an ADDR_EXPR. */
410 if (TREE_CODE (op0) != ADDR_EXPR)
411 return NULL_TREE;
412 op0 = TREE_OPERAND (op0, 0);
413
414 /* It had better be a constant. */
415 if (TREE_CODE (op1) != INTEGER_CST)
416 {
417 /* Or op0 should now be A[0] and the non-constant offset defined
418 via a multiplication by the array element size. */
419 if (TREE_CODE (op0) == ARRAY_REF
c946a318
RG
420 /* As we will end up creating a variable index array access
421 in the outermost array dimension make sure there isn't
422 a more inner array that the index could overflow to. */
70f34814
RG
423 && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
424 && integer_zerop (TREE_OPERAND (op0, 1))
425 && TREE_CODE (op1) == SSA_NAME)
426 {
427 gimple offset_def = SSA_NAME_DEF_STMT (op1);
428 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
429 if (!host_integerp (elsz, 1)
430 || !is_gimple_assign (offset_def))
c946a318
RG
431 return NULL_TREE;
432
433 /* Do not build array references of something that we can't
434 see the true number of array dimensions for. */
435 if (!DECL_P (TREE_OPERAND (op0, 0))
436 && !handled_component_p (TREE_OPERAND (op0, 0)))
437 return NULL_TREE;
438
cbdd87d4
RG
439 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
440 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
70f34814 441 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
cbdd87d4
RG
442 return build_fold_addr_expr
443 (build4 (ARRAY_REF, TREE_TYPE (op0),
444 TREE_OPERAND (op0, 0),
445 gimple_assign_rhs1 (offset_def),
446 TREE_OPERAND (op0, 2),
447 TREE_OPERAND (op0, 3)));
70f34814 448 else if (integer_onep (elsz)
cbdd87d4
RG
449 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
450 return build_fold_addr_expr
451 (build4 (ARRAY_REF, TREE_TYPE (op0),
452 TREE_OPERAND (op0, 0),
453 op1,
454 TREE_OPERAND (op0, 2),
455 TREE_OPERAND (op0, 3)));
456 }
70f34814
RG
457 else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
458 /* Dto. */
459 && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
460 && TREE_CODE (op1) == SSA_NAME)
461 {
462 gimple offset_def = SSA_NAME_DEF_STMT (op1);
463 tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
464 if (!host_integerp (elsz, 1)
465 || !is_gimple_assign (offset_def))
466 return NULL_TREE;
467
468 /* Do not build array references of something that we can't
469 see the true number of array dimensions for. */
470 if (!DECL_P (op0)
471 && !handled_component_p (op0))
472 return NULL_TREE;
473
474 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
475 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
476 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
477 return build_fold_addr_expr
478 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
479 op0, gimple_assign_rhs1 (offset_def),
480 integer_zero_node, NULL_TREE));
481 else if (integer_onep (elsz)
482 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
483 return build_fold_addr_expr
484 (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
485 op0, op1,
486 integer_zero_node, NULL_TREE));
487 }
488
cbdd87d4
RG
489 return NULL_TREE;
490 }
491
492 /* If the first operand is an ARRAY_REF, expand it so that we can fold
493 the offset into it. */
494 while (TREE_CODE (op0) == ARRAY_REF)
495 {
496 tree array_obj = TREE_OPERAND (op0, 0);
497 tree array_idx = TREE_OPERAND (op0, 1);
498 tree elt_type = TREE_TYPE (op0);
499 tree elt_size = TYPE_SIZE_UNIT (elt_type);
500 tree min_idx;
501
502 if (TREE_CODE (array_idx) != INTEGER_CST)
503 break;
504 if (TREE_CODE (elt_size) != INTEGER_CST)
505 break;
506
507 /* Un-bias the index by the min index of the array type. */
508 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
509 if (min_idx)
510 {
511 min_idx = TYPE_MIN_VALUE (min_idx);
512 if (min_idx)
513 {
514 if (TREE_CODE (min_idx) != INTEGER_CST)
515 break;
516
517 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
518 if (!integer_zerop (min_idx))
519 array_idx = int_const_binop (MINUS_EXPR, array_idx,
d35936ab 520 min_idx);
cbdd87d4
RG
521 }
522 }
523
524 /* Convert the index to a byte offset. */
525 array_idx = fold_convert (sizetype, array_idx);
d35936ab 526 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size);
cbdd87d4
RG
527
528 /* Update the operands for the next round, or for folding. */
529 op1 = int_const_binop (PLUS_EXPR,
d35936ab 530 array_idx, op1);
cbdd87d4
RG
531 op0 = array_obj;
532 }
533
534 ptd_type = TREE_TYPE (res_type);
535 /* If we want a pointer to void, reconstruct the reference from the
536 array element type. A pointer to that can be trivially converted
537 to void *. This happens as we fold (void *)(ptr p+ off). */
538 if (VOID_TYPE_P (ptd_type)
539 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
540 ptd_type = TREE_TYPE (TREE_TYPE (op0));
541
542 /* At which point we can try some of the same things as for indirects. */
70f34814 543 t = maybe_fold_offset_to_array_ref (loc, op0, op1);
cbdd87d4
RG
544 if (t)
545 {
70f34814
RG
546 t = build_fold_addr_expr (t);
547 if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
548 return NULL_TREE;
cbdd87d4
RG
549 SET_EXPR_LOCATION (t, loc);
550 }
551
552 return t;
553}
554
555/* Subroutine of fold_stmt. We perform several simplifications of the
556 memory reference tree EXPR and make sure to re-gimplify them properly
557 after propagation of constant addresses. IS_LHS is true if the
558 reference is supposed to be an lvalue. */
559
560static tree
561maybe_fold_reference (tree expr, bool is_lhs)
562{
563 tree *t = &expr;
17f39a39 564 tree result;
cbdd87d4 565
f0eddb90
RG
566 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
567 || TREE_CODE (expr) == REALPART_EXPR
568 || TREE_CODE (expr) == IMAGPART_EXPR)
569 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
570 return fold_unary_loc (EXPR_LOCATION (expr),
571 TREE_CODE (expr),
572 TREE_TYPE (expr),
573 TREE_OPERAND (expr, 0));
574 else if (TREE_CODE (expr) == BIT_FIELD_REF
575 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
576 return fold_ternary_loc (EXPR_LOCATION (expr),
577 TREE_CODE (expr),
578 TREE_TYPE (expr),
579 TREE_OPERAND (expr, 0),
580 TREE_OPERAND (expr, 1),
581 TREE_OPERAND (expr, 2));
582
583 while (handled_component_p (*t))
584 t = &TREE_OPERAND (*t, 0);
cbdd87d4 585
f0eddb90
RG
586 /* Canonicalize MEM_REFs invariant address operand. Do this first
587 to avoid feeding non-canonical MEM_REFs elsewhere. */
588 if (TREE_CODE (*t) == MEM_REF
589 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 590 {
f0eddb90
RG
591 bool volatile_p = TREE_THIS_VOLATILE (*t);
592 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
593 TREE_OPERAND (*t, 0),
594 TREE_OPERAND (*t, 1));
595 if (tem)
596 {
597 TREE_THIS_VOLATILE (tem) = volatile_p;
598 *t = tem;
599 tem = maybe_fold_reference (expr, is_lhs);
600 if (tem)
601 return tem;
602 return expr;
603 }
cbdd87d4
RG
604 }
605
f0eddb90
RG
606 if (!is_lhs
607 && (result = fold_const_aggregate_ref (expr))
608 && is_gimple_min_invariant (result))
609 return result;
cbdd87d4 610
70f34814
RG
611 /* Fold back MEM_REFs to reference trees. */
612 if (TREE_CODE (*t) == MEM_REF
613 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
614 && integer_zerop (TREE_OPERAND (*t, 1))
615 && (TREE_THIS_VOLATILE (*t)
616 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
617 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
618 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
619 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
620 /* We have to look out here to not drop a required conversion
621 from the rhs to the lhs if is_lhs, but we don't have the
622 rhs here to verify that. Thus require strict type
623 compatibility. */
624 && types_compatible_p (TREE_TYPE (*t),
625 TREE_TYPE (TREE_OPERAND
f0eddb90 626 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 627 {
70f34814
RG
628 tree tem;
629 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
630 tem = maybe_fold_reference (expr, is_lhs);
631 if (tem)
632 return tem;
633 return expr;
634 }
4d948885
RG
635 else if (TREE_CODE (*t) == TARGET_MEM_REF)
636 {
637 tree tem = maybe_fold_tmr (*t);
638 if (tem)
639 {
640 *t = tem;
641 tem = maybe_fold_reference (expr, is_lhs);
642 if (tem)
643 return tem;
644 return expr;
645 }
646 }
cbdd87d4
RG
647
648 return NULL_TREE;
649}
650
651
652/* Attempt to fold an assignment statement pointed-to by SI. Returns a
653 replacement rhs for the statement or NULL_TREE if no simplification
654 could be made. It is assumed that the operands have been previously
655 folded. */
656
657static tree
658fold_gimple_assign (gimple_stmt_iterator *si)
659{
660 gimple stmt = gsi_stmt (*si);
661 enum tree_code subcode = gimple_assign_rhs_code (stmt);
662 location_t loc = gimple_location (stmt);
663
664 tree result = NULL_TREE;
665
666 switch (get_gimple_rhs_class (subcode))
667 {
668 case GIMPLE_SINGLE_RHS:
669 {
670 tree rhs = gimple_assign_rhs1 (stmt);
671
672 /* Try to fold a conditional expression. */
673 if (TREE_CODE (rhs) == COND_EXPR)
674 {
675 tree op0 = COND_EXPR_COND (rhs);
676 tree tem;
677 bool set = false;
678 location_t cond_loc = EXPR_LOCATION (rhs);
679
680 if (COMPARISON_CLASS_P (op0))
681 {
682 fold_defer_overflow_warnings ();
683 tem = fold_binary_loc (cond_loc,
684 TREE_CODE (op0), TREE_TYPE (op0),
685 TREE_OPERAND (op0, 0),
686 TREE_OPERAND (op0, 1));
687 /* This is actually a conditional expression, not a GIMPLE
688 conditional statement, however, the valid_gimple_rhs_p
689 test still applies. */
690 set = (tem && is_gimple_condexpr (tem)
691 && valid_gimple_rhs_p (tem));
692 fold_undefer_overflow_warnings (set, stmt, 0);
693 }
694 else if (is_gimple_min_invariant (op0))
695 {
696 tem = op0;
697 set = true;
698 }
699 else
700 return NULL_TREE;
701
702 if (set)
703 result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
704 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
705 }
706
cbdd87d4
RG
707 else if (REFERENCE_CLASS_P (rhs))
708 return maybe_fold_reference (rhs, false);
709
710 else if (TREE_CODE (rhs) == ADDR_EXPR)
711 {
70f34814
RG
712 tree ref = TREE_OPERAND (rhs, 0);
713 tree tem = maybe_fold_reference (ref, true);
714 if (tem
715 && TREE_CODE (tem) == MEM_REF
716 && integer_zerop (TREE_OPERAND (tem, 1)))
717 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
718 else if (tem)
cbdd87d4
RG
719 result = fold_convert (TREE_TYPE (rhs),
720 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
721 else if (TREE_CODE (ref) == MEM_REF
722 && integer_zerop (TREE_OPERAND (ref, 1)))
723 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
724 }
725
726 else if (TREE_CODE (rhs) == CONSTRUCTOR
727 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
728 && (CONSTRUCTOR_NELTS (rhs)
729 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
730 {
731 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
732 unsigned i;
733 tree val;
734
735 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
736 if (TREE_CODE (val) != INTEGER_CST
737 && TREE_CODE (val) != REAL_CST
738 && TREE_CODE (val) != FIXED_CST)
739 return NULL_TREE;
740
741 return build_vector_from_ctor (TREE_TYPE (rhs),
742 CONSTRUCTOR_ELTS (rhs));
743 }
744
745 else if (DECL_P (rhs))
746 return unshare_expr (get_symbol_constant_value (rhs));
747
748 /* If we couldn't fold the RHS, hand over to the generic
749 fold routines. */
750 if (result == NULL_TREE)
751 result = fold (rhs);
752
753 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
754 that may have been added by fold, and "useless" type
755 conversions that might now be apparent due to propagation. */
756 STRIP_USELESS_TYPE_CONVERSION (result);
757
758 if (result != rhs && valid_gimple_rhs_p (result))
759 return result;
760
761 return NULL_TREE;
762 }
763 break;
764
765 case GIMPLE_UNARY_RHS:
766 {
767 tree rhs = gimple_assign_rhs1 (stmt);
768
769 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
770 if (result)
771 {
772 /* If the operation was a conversion do _not_ mark a
773 resulting constant with TREE_OVERFLOW if the original
774 constant was not. These conversions have implementation
775 defined behavior and retaining the TREE_OVERFLOW flag
776 here would confuse later passes such as VRP. */
777 if (CONVERT_EXPR_CODE_P (subcode)
778 && TREE_CODE (result) == INTEGER_CST
779 && TREE_CODE (rhs) == INTEGER_CST)
780 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
781
782 STRIP_USELESS_TYPE_CONVERSION (result);
783 if (valid_gimple_rhs_p (result))
784 return result;
785 }
786 else if (CONVERT_EXPR_CODE_P (subcode)
787 && POINTER_TYPE_P (gimple_expr_type (stmt))
788 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
789 {
790 tree type = gimple_expr_type (stmt);
791 tree t = maybe_fold_offset_to_address (loc,
792 gimple_assign_rhs1 (stmt),
793 integer_zero_node, type);
794 if (t)
795 return t;
796 }
797 }
798 break;
799
800 case GIMPLE_BINARY_RHS:
801 /* Try to fold pointer addition. */
802 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
803 {
804 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
805 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
806 {
807 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
808 if (!useless_type_conversion_p
809 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
810 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
811 }
812 result = maybe_fold_stmt_addition (gimple_location (stmt),
813 type,
814 gimple_assign_rhs1 (stmt),
815 gimple_assign_rhs2 (stmt));
816 }
817
818 if (!result)
819 result = fold_binary_loc (loc, subcode,
820 TREE_TYPE (gimple_assign_lhs (stmt)),
821 gimple_assign_rhs1 (stmt),
822 gimple_assign_rhs2 (stmt));
823
824 if (result)
825 {
826 STRIP_USELESS_TYPE_CONVERSION (result);
827 if (valid_gimple_rhs_p (result))
828 return result;
829
830 /* Fold might have produced non-GIMPLE, so if we trust it blindly
831 we lose canonicalization opportunities. Do not go again
832 through fold here though, or the same non-GIMPLE will be
833 produced. */
834 if (commutative_tree_code (subcode)
835 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
836 gimple_assign_rhs2 (stmt), false))
837 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
838 gimple_assign_rhs2 (stmt),
839 gimple_assign_rhs1 (stmt));
840 }
841 break;
842
0354c0c7
BS
843 case GIMPLE_TERNARY_RHS:
844 result = fold_ternary_loc (loc, subcode,
845 TREE_TYPE (gimple_assign_lhs (stmt)),
846 gimple_assign_rhs1 (stmt),
847 gimple_assign_rhs2 (stmt),
848 gimple_assign_rhs3 (stmt));
849
850 if (result)
851 {
852 STRIP_USELESS_TYPE_CONVERSION (result);
853 if (valid_gimple_rhs_p (result))
854 return result;
855
856 /* Fold might have produced non-GIMPLE, so if we trust it blindly
857 we lose canonicalization opportunities. Do not go again
858 through fold here though, or the same non-GIMPLE will be
859 produced. */
860 if (commutative_ternary_tree_code (subcode)
861 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
862 gimple_assign_rhs2 (stmt), false))
863 return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
864 gimple_assign_rhs2 (stmt),
865 gimple_assign_rhs1 (stmt),
866 gimple_assign_rhs3 (stmt));
867 }
868 break;
869
cbdd87d4
RG
870 case GIMPLE_INVALID_RHS:
871 gcc_unreachable ();
872 }
873
874 return NULL_TREE;
875}
876
877/* Attempt to fold a conditional statement. Return true if any changes were
878 made. We only attempt to fold the condition expression, and do not perform
879 any transformation that would require alteration of the cfg. It is
880 assumed that the operands have been previously folded. */
881
882static bool
883fold_gimple_cond (gimple stmt)
884{
885 tree result = fold_binary_loc (gimple_location (stmt),
886 gimple_cond_code (stmt),
887 boolean_type_node,
888 gimple_cond_lhs (stmt),
889 gimple_cond_rhs (stmt));
890
891 if (result)
892 {
893 STRIP_USELESS_TYPE_CONVERSION (result);
894 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
895 {
896 gimple_cond_set_condition_from_tree (stmt, result);
897 return true;
898 }
899 }
900
901 return false;
902}
903
904/* Convert EXPR into a GIMPLE value suitable for substitution on the
905 RHS of an assignment. Insert the necessary statements before
906 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
907 is replaced. If the call is expected to produces a result, then it
908 is replaced by an assignment of the new RHS to the result variable.
909 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
910 GIMPLE_NOP. A proper VDEF chain is retained by making the first
911 VUSE and the last VDEF of the whole sequence be the same as the replaced
912 statement and using new SSA names for stores in between. */
cbdd87d4
RG
913
914void
915gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
916{
917 tree lhs;
918 tree tmp = NULL_TREE; /* Silence warning. */
919 gimple stmt, new_stmt;
920 gimple_stmt_iterator i;
921 gimple_seq stmts = gimple_seq_alloc();
922 struct gimplify_ctx gctx;
923 gimple last = NULL;
fe2ef088
MM
924 gimple laststore = NULL;
925 tree reaching_vuse;
cbdd87d4
RG
926
927 stmt = gsi_stmt (*si_p);
928
929 gcc_assert (is_gimple_call (stmt));
930
931 lhs = gimple_call_lhs (stmt);
fe2ef088 932 reaching_vuse = gimple_vuse (stmt);
cbdd87d4
RG
933
934 push_gimplify_context (&gctx);
935
936 if (lhs == NULL_TREE)
6e572326
RG
937 {
938 gimplify_and_add (expr, &stmts);
939 /* We can end up with folding a memcpy of an empty class assignment
940 which gets optimized away by C++ gimplification. */
941 if (gimple_seq_empty_p (stmts))
942 {
9fdc58de 943 pop_gimplify_context (NULL);
6e572326
RG
944 if (gimple_in_ssa_p (cfun))
945 {
946 unlink_stmt_vdef (stmt);
947 release_defs (stmt);
948 }
949 gsi_remove (si_p, true);
950 return;
951 }
952 }
cbdd87d4
RG
953 else
954 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
955
956 pop_gimplify_context (NULL);
957
958 if (gimple_has_location (stmt))
959 annotate_all_with_location (stmts, gimple_location (stmt));
960
961 /* The replacement can expose previously unreferenced variables. */
962 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
963 {
964 if (last)
965 {
966 gsi_insert_before (si_p, last, GSI_NEW_STMT);
967 gsi_next (si_p);
968 }
969 new_stmt = gsi_stmt (i);
edc74207
RG
970 if (gimple_in_ssa_p (cfun))
971 {
972 find_new_referenced_vars (new_stmt);
973 mark_symbols_for_renaming (new_stmt);
974 }
fe2ef088
MM
975 /* If the new statement has a VUSE, update it with exact SSA name we
976 know will reach this one. */
977 if (gimple_vuse (new_stmt))
978 {
979 /* If we've also seen a previous store create a new VDEF for
980 the latter one, and make that the new reaching VUSE. */
981 if (laststore)
982 {
983 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
984 gimple_set_vdef (laststore, reaching_vuse);
985 update_stmt (laststore);
986 laststore = NULL;
987 }
988 gimple_set_vuse (new_stmt, reaching_vuse);
989 gimple_set_modified (new_stmt, true);
990 }
991 if (gimple_assign_single_p (new_stmt)
992 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
993 {
994 laststore = new_stmt;
995 }
cbdd87d4
RG
996 last = new_stmt;
997 }
998
999 if (lhs == NULL_TREE)
1000 {
fe2ef088
MM
1001 /* If we replace a call without LHS that has a VDEF and our new
1002 sequence ends with a store we must make that store have the same
1003 vdef in order not to break the sequencing. This can happen
1004 for instance when folding memcpy calls into assignments. */
1005 if (gimple_vdef (stmt) && laststore)
1006 {
1007 gimple_set_vdef (laststore, gimple_vdef (stmt));
8a1561bc
MM
1008 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1009 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
fe2ef088
MM
1010 update_stmt (laststore);
1011 }
edc74207 1012 else if (gimple_in_ssa_p (cfun))
fe2ef088
MM
1013 {
1014 unlink_stmt_vdef (stmt);
1015 release_defs (stmt);
1016 }
cbdd87d4
RG
1017 new_stmt = last;
1018 }
1019 else
1020 {
1021 if (last)
1022 {
1023 gsi_insert_before (si_p, last, GSI_NEW_STMT);
1024 gsi_next (si_p);
1025 }
8a1561bc
MM
1026 if (laststore && is_gimple_reg (lhs))
1027 {
1028 gimple_set_vdef (laststore, gimple_vdef (stmt));
1029 update_stmt (laststore);
1030 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1031 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
1032 laststore = NULL;
1033 }
1034 else if (laststore)
fe2ef088
MM
1035 {
1036 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
1037 gimple_set_vdef (laststore, reaching_vuse);
1038 update_stmt (laststore);
1039 laststore = NULL;
1040 }
cbdd87d4 1041 new_stmt = gimple_build_assign (lhs, tmp);
8a1561bc
MM
1042 if (!is_gimple_reg (tmp))
1043 gimple_set_vuse (new_stmt, reaching_vuse);
1044 if (!is_gimple_reg (lhs))
1045 {
1046 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1047 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
1048 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
1049 }
07db7d35
RG
1050 else if (reaching_vuse == gimple_vuse (stmt))
1051 unlink_stmt_vdef (stmt);
cbdd87d4
RG
1052 }
1053
1054 gimple_set_location (new_stmt, gimple_location (stmt));
1055 gsi_replace (si_p, new_stmt, false);
1056}
1057
1058/* Return the string length, maximum string length or maximum value of
1059 ARG in LENGTH.
1060 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1061 is not NULL and, for TYPE == 0, its value is not equal to the length
1062 we determine or if we are unable to determine the length or value,
1063 return false. VISITED is a bitmap of visited variables.
1064 TYPE is 0 if string length should be returned, 1 for maximum string
1065 length and 2 for maximum value ARG can have. */
1066
1067static bool
1068get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1069{
1070 tree var, val;
1071 gimple def_stmt;
1072
1073 if (TREE_CODE (arg) != SSA_NAME)
1074 {
1075 if (TREE_CODE (arg) == COND_EXPR)
1076 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
1077 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
1078 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1079 else if (TREE_CODE (arg) == ADDR_EXPR
1080 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1081 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1082 {
1083 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1084 if (TREE_CODE (aop0) == INDIRECT_REF
1085 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1086 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1087 length, visited, type);
1088 }
1089
1090 if (type == 2)
1091 {
1092 val = arg;
1093 if (TREE_CODE (val) != INTEGER_CST
1094 || tree_int_cst_sgn (val) < 0)
1095 return false;
1096 }
1097 else
1098 val = c_strlen (arg, 1);
1099 if (!val)
1100 return false;
1101
1102 if (*length)
1103 {
1104 if (type > 0)
1105 {
1106 if (TREE_CODE (*length) != INTEGER_CST
1107 || TREE_CODE (val) != INTEGER_CST)
1108 return false;
1109
1110 if (tree_int_cst_lt (*length, val))
1111 *length = val;
1112 return true;
1113 }
1114 else if (simple_cst_equal (val, *length) != 1)
1115 return false;
1116 }
1117
1118 *length = val;
1119 return true;
1120 }
1121
1122 /* If we were already here, break the infinite cycle. */
fcaa4ca4 1123 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
cbdd87d4 1124 return true;
cbdd87d4
RG
1125
1126 var = arg;
1127 def_stmt = SSA_NAME_DEF_STMT (var);
1128
1129 switch (gimple_code (def_stmt))
1130 {
1131 case GIMPLE_ASSIGN:
1132 /* The RHS of the statement defining VAR must either have a
1133 constant length or come from another SSA_NAME with a constant
1134 length. */
1135 if (gimple_assign_single_p (def_stmt)
1136 || gimple_assign_unary_nop_p (def_stmt))
1137 {
1138 tree rhs = gimple_assign_rhs1 (def_stmt);
1139 return get_maxval_strlen (rhs, length, visited, type);
1140 }
1141 return false;
1142
1143 case GIMPLE_PHI:
1144 {
1145 /* All the arguments of the PHI node must have the same constant
1146 length. */
1147 unsigned i;
1148
1149 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1150 {
1151 tree arg = gimple_phi_arg (def_stmt, i)->def;
1152
1153 /* If this PHI has itself as an argument, we cannot
1154 determine the string length of this argument. However,
1155 if we can find a constant string length for the other
1156 PHI args then we can still be sure that this is a
1157 constant string length. So be optimistic and just
1158 continue with the next argument. */
1159 if (arg == gimple_phi_result (def_stmt))
1160 continue;
1161
1162 if (!get_maxval_strlen (arg, length, visited, type))
1163 return false;
1164 }
1165 }
1166 return true;
1167
1168 default:
1169 return false;
1170 }
1171}
1172
1173
1174/* Fold builtin call in statement STMT. Returns a simplified tree.
1175 We may return a non-constant expression, including another call
1176 to a different function and with different arguments, e.g.,
1177 substituting memcpy for strcpy when the string length is known.
1178 Note that some builtins expand into inline code that may not
1179 be valid in GIMPLE. Callers must take care. */
1180
1181tree
1182gimple_fold_builtin (gimple stmt)
1183{
1184 tree result, val[3];
1185 tree callee, a;
1186 int arg_idx, type;
1187 bitmap visited;
1188 bool ignore;
1189 int nargs;
1190 location_t loc = gimple_location (stmt);
1191
1192 gcc_assert (is_gimple_call (stmt));
1193
1194 ignore = (gimple_call_lhs (stmt) == NULL);
1195
1196 /* First try the generic builtin folder. If that succeeds, return the
1197 result directly. */
1198 result = fold_call_stmt (stmt, ignore);
1199 if (result)
1200 {
1201 if (ignore)
1202 STRIP_NOPS (result);
1203 return result;
1204 }
1205
1206 /* Ignore MD builtins. */
1207 callee = gimple_call_fndecl (stmt);
1208 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1209 return NULL_TREE;
1210
1211 /* If the builtin could not be folded, and it has no argument list,
1212 we're done. */
1213 nargs = gimple_call_num_args (stmt);
1214 if (nargs == 0)
1215 return NULL_TREE;
1216
1217 /* Limit the work only for builtins we know how to simplify. */
1218 switch (DECL_FUNCTION_CODE (callee))
1219 {
1220 case BUILT_IN_STRLEN:
1221 case BUILT_IN_FPUTS:
1222 case BUILT_IN_FPUTS_UNLOCKED:
1223 arg_idx = 0;
1224 type = 0;
1225 break;
1226 case BUILT_IN_STRCPY:
1227 case BUILT_IN_STRNCPY:
1228 arg_idx = 1;
1229 type = 0;
1230 break;
1231 case BUILT_IN_MEMCPY_CHK:
1232 case BUILT_IN_MEMPCPY_CHK:
1233 case BUILT_IN_MEMMOVE_CHK:
1234 case BUILT_IN_MEMSET_CHK:
1235 case BUILT_IN_STRNCPY_CHK:
1236 arg_idx = 2;
1237 type = 2;
1238 break;
1239 case BUILT_IN_STRCPY_CHK:
1240 case BUILT_IN_STPCPY_CHK:
1241 arg_idx = 1;
1242 type = 1;
1243 break;
1244 case BUILT_IN_SNPRINTF_CHK:
1245 case BUILT_IN_VSNPRINTF_CHK:
1246 arg_idx = 1;
1247 type = 2;
1248 break;
1249 default:
1250 return NULL_TREE;
1251 }
1252
1253 if (arg_idx >= nargs)
1254 return NULL_TREE;
1255
1256 /* Try to use the dataflow information gathered by the CCP process. */
1257 visited = BITMAP_ALLOC (NULL);
1258 bitmap_clear (visited);
1259
1260 memset (val, 0, sizeof (val));
1261 a = gimple_call_arg (stmt, arg_idx);
1262 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
1263 val[arg_idx] = NULL_TREE;
1264
1265 BITMAP_FREE (visited);
1266
1267 result = NULL_TREE;
1268 switch (DECL_FUNCTION_CODE (callee))
1269 {
1270 case BUILT_IN_STRLEN:
1271 if (val[0] && nargs == 1)
1272 {
1273 tree new_val =
1274 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
1275
1276 /* If the result is not a valid gimple value, or not a cast
6e4da084 1277 of a valid gimple value, then we cannot use the result. */
cbdd87d4 1278 if (is_gimple_val (new_val)
3dbe9454 1279 || (CONVERT_EXPR_P (new_val)
cbdd87d4
RG
1280 && is_gimple_val (TREE_OPERAND (new_val, 0))))
1281 return new_val;
1282 }
1283 break;
1284
1285 case BUILT_IN_STRCPY:
1286 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1287 result = fold_builtin_strcpy (loc, callee,
1288 gimple_call_arg (stmt, 0),
1289 gimple_call_arg (stmt, 1),
1290 val[1]);
1291 break;
1292
1293 case BUILT_IN_STRNCPY:
1294 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1295 result = fold_builtin_strncpy (loc, callee,
1296 gimple_call_arg (stmt, 0),
1297 gimple_call_arg (stmt, 1),
1298 gimple_call_arg (stmt, 2),
1299 val[1]);
1300 break;
1301
1302 case BUILT_IN_FPUTS:
1303 if (nargs == 2)
1304 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1305 gimple_call_arg (stmt, 1),
1306 ignore, false, val[0]);
1307 break;
1308
1309 case BUILT_IN_FPUTS_UNLOCKED:
1310 if (nargs == 2)
1311 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1312 gimple_call_arg (stmt, 1),
1313 ignore, true, val[0]);
1314 break;
1315
1316 case BUILT_IN_MEMCPY_CHK:
1317 case BUILT_IN_MEMPCPY_CHK:
1318 case BUILT_IN_MEMMOVE_CHK:
1319 case BUILT_IN_MEMSET_CHK:
1320 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1321 result = fold_builtin_memory_chk (loc, callee,
1322 gimple_call_arg (stmt, 0),
1323 gimple_call_arg (stmt, 1),
1324 gimple_call_arg (stmt, 2),
1325 gimple_call_arg (stmt, 3),
1326 val[2], ignore,
1327 DECL_FUNCTION_CODE (callee));
1328 break;
1329
1330 case BUILT_IN_STRCPY_CHK:
1331 case BUILT_IN_STPCPY_CHK:
1332 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1333 result = fold_builtin_stxcpy_chk (loc, callee,
1334 gimple_call_arg (stmt, 0),
1335 gimple_call_arg (stmt, 1),
1336 gimple_call_arg (stmt, 2),
1337 val[1], ignore,
1338 DECL_FUNCTION_CODE (callee));
1339 break;
1340
1341 case BUILT_IN_STRNCPY_CHK:
1342 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1343 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
1344 gimple_call_arg (stmt, 1),
1345 gimple_call_arg (stmt, 2),
1346 gimple_call_arg (stmt, 3),
1347 val[2]);
1348 break;
1349
1350 case BUILT_IN_SNPRINTF_CHK:
1351 case BUILT_IN_VSNPRINTF_CHK:
1352 if (val[1] && is_gimple_val (val[1]))
1353 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1354 DECL_FUNCTION_CODE (callee));
1355 break;
1356
1357 default:
1358 gcc_unreachable ();
1359 }
1360
1361 if (result && ignore)
1362 result = fold_ignored_result (result);
1363 return result;
1364}
1365
ceeffab0
MJ
1366/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
1367 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
1368 KNOWN_BINFO carries the binfo describing the true type of
1369 OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied
1370 with a this adjustment, the constant which should be added to this pointer
1371 is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function
1372 is a thunk (other than a this adjustment which is dealt with by DELTA). */
1ae6fe9b
MJ
1373
1374tree
84825707 1375gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
ceeffab0 1376 tree *delta, bool refuse_thunks)
1ae6fe9b
MJ
1377{
1378 HOST_WIDE_INT i;
ceeffab0
MJ
1379 tree v, fndecl;
1380 struct cgraph_node *node;
1ae6fe9b
MJ
1381
1382 v = BINFO_VIRTUALS (known_binfo);
ceeffab0
MJ
1383 /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */
1384 if (!v)
1385 return NULL_TREE;
1ae6fe9b
MJ
1386 i = 0;
1387 while (i != token)
1388 {
1389 i += (TARGET_VTABLE_USES_DESCRIPTORS
1390 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
1391 v = TREE_CHAIN (v);
1392 }
1393
84825707
JJ
1394 /* If BV_VCALL_INDEX is non-NULL, give up. */
1395 if (TREE_TYPE (v))
1396 return NULL_TREE;
1397
1ae6fe9b 1398 fndecl = TREE_VALUE (v);
ceeffab0
MJ
1399 node = cgraph_get_node_or_alias (fndecl);
1400 if (refuse_thunks
1401 && (!node
1402 /* Bail out if it is a thunk declaration. Since simple this_adjusting
1403 thunks are represented by a constant in TREE_PURPOSE of items in
1404 BINFO_VIRTUALS, this is a more complicate type which we cannot handle as
1405 yet.
1406
1407 FIXME: Remove the following condition once we are able to represent
1408 thunk information on call graph edges. */
1409 || (node->same_body_alias && node->thunk.thunk_p)))
1410 return NULL_TREE;
3f1f0ae3 1411
6e4da084
JH
1412 /* When cgraph node is missing and function is not public, we cannot
1413 devirtualize. This can happen in WHOPR when the actual method
1414 ends up in other partition, because we found devirtualization
1415 possibility too late. */
ceeffab0
MJ
1416 if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v)))
1417 return NULL_TREE;
1418
1419 *delta = TREE_PURPOSE (v);
1420 gcc_checking_assert (host_integerp (*delta, 0));
1421 return fndecl;
1ae6fe9b
MJ
1422}
1423
ceeffab0
MJ
1424/* Generate code adjusting the first parameter of a call statement determined
1425 by GSI by DELTA. */
1426
1427void
1428gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
1429{
1430 gimple call_stmt = gsi_stmt (*gsi);
1431 tree parm, tmp;
1432 gimple new_stmt;
1433
1434 delta = fold_convert (sizetype, delta);
1435 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
1436 parm = gimple_call_arg (call_stmt, 0);
1437 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1438 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1439 add_referenced_var (tmp);
1440
1441 tmp = make_ssa_name (tmp, NULL);
1442 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1443 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1444 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1445 gimple_call_set_arg (call_stmt, 0, tmp);
1446}
1ae6fe9b 1447
49c471e3
MJ
1448/* Return a binfo to be used for devirtualization of calls based on an object
1449 represented by a declaration (i.e. a global or automatically allocated one)
1450 or NULL if it cannot be found or is not safe. CST is expected to be an
1451 ADDR_EXPR of such object or the function will return NULL. Currently it is
1452 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1453
1454tree
1455gimple_extract_devirt_binfo_from_cst (tree cst)
1456{
1457 HOST_WIDE_INT offset, size, max_size;
1458 tree base, type, expected_type, binfo;
1459 bool last_artificial = false;
1460
1461 if (!flag_devirtualize
1462 || TREE_CODE (cst) != ADDR_EXPR
1463 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1464 return NULL_TREE;
1465
1466 cst = TREE_OPERAND (cst, 0);
1467 expected_type = TREE_TYPE (cst);
1468 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1469 type = TREE_TYPE (base);
1470 if (!DECL_P (base)
1471 || max_size == -1
1472 || max_size != size
1473 || TREE_CODE (type) != RECORD_TYPE)
1474 return NULL_TREE;
1475
1476 /* Find the sub-object the constant actually refers to and mark whether it is
1477 an artificial one (as opposed to a user-defined one). */
1478 while (true)
1479 {
1480 HOST_WIDE_INT pos, size;
1481 tree fld;
1482
1483 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1484 break;
1485 if (offset < 0)
1486 return NULL_TREE;
1487
1488 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1489 {
1490 if (TREE_CODE (fld) != FIELD_DECL)
1491 continue;
1492
1493 pos = int_bit_position (fld);
1494 size = tree_low_cst (DECL_SIZE (fld), 1);
1495 if (pos <= offset && (pos + size) > offset)
1496 break;
1497 }
1498 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1499 return NULL_TREE;
1500
1501 last_artificial = DECL_ARTIFICIAL (fld);
1502 type = TREE_TYPE (fld);
1503 offset -= pos;
1504 }
1505 /* Artifical sub-objects are ancestors, we do not want to use them for
1506 devirtualization, at least not here. */
1507 if (last_artificial)
1508 return NULL_TREE;
1509 binfo = TYPE_BINFO (type);
1510 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1511 return NULL_TREE;
1512 else
1513 return binfo;
1514}
1515
cbdd87d4
RG
1516/* Attempt to fold a call statement referenced by the statement iterator GSI.
1517 The statement may be replaced by another statement, e.g., if the call
1518 simplifies to a constant value. Return true if any changes were made.
1519 It is assumed that the operands have been previously folded. */
1520
ceeffab0
MJ
1521bool
1522gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
1523{
1524 gimple stmt = gsi_stmt (*gsi);
3b45a007 1525 tree callee;
cbdd87d4
RG
1526
1527 /* Check for builtins that CCP can handle using information not
1528 available in the generic fold routines. */
3b45a007 1529 callee = gimple_call_fndecl (stmt);
23c1da7a 1530 if (!inplace && callee && DECL_BUILT_IN (callee))
cbdd87d4
RG
1531 {
1532 tree result = gimple_fold_builtin (stmt);
1533
1534 if (result)
1535 {
1536 if (!update_call_from_tree (gsi, result))
1537 gimplify_and_update_call_from_tree (gsi, result);
1538 return true;
1539 }
1540 }
3b45a007
RG
1541
1542 /* Check for virtual calls that became direct calls. */
1543 callee = gimple_call_fn (stmt);
25583c4f 1544 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 1545 {
49c471e3
MJ
1546 tree binfo, fndecl, delta, obj;
1547 HOST_WIDE_INT token;
1548
1549 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1550 {
1551 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1552 return true;
1553 }
1554
1555 obj = OBJ_TYPE_REF_OBJECT (callee);
1556 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1557 if (!binfo)
1558 return false;
1559 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1560 fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta, false);
1561 if (!fndecl)
1562 return false;
1563 gcc_assert (integer_zerop (delta));
1564 gimple_call_set_fndecl (stmt, fndecl);
3b45a007
RG
1565 return true;
1566 }
1567
cbdd87d4
RG
1568 return false;
1569}
1570
1571/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1572 distinguishes both cases. */
1573
1574static bool
1575fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1576{
1577 bool changed = false;
1578 gimple stmt = gsi_stmt (*gsi);
1579 unsigned i;
1580
1581 /* Fold the main computation performed by the statement. */
1582 switch (gimple_code (stmt))
1583 {
1584 case GIMPLE_ASSIGN:
1585 {
1586 unsigned old_num_ops = gimple_num_ops (stmt);
1587 tree new_rhs = fold_gimple_assign (gsi);
1588 tree lhs = gimple_assign_lhs (stmt);
1589 if (new_rhs
1590 && !useless_type_conversion_p (TREE_TYPE (lhs),
1591 TREE_TYPE (new_rhs)))
1592 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1593 if (new_rhs
1594 && (!inplace
1595 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1596 {
1597 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1598 changed = true;
1599 }
1600 break;
1601 }
1602
1603 case GIMPLE_COND:
1604 changed |= fold_gimple_cond (stmt);
1605 break;
1606
1607 case GIMPLE_CALL:
1608 /* Fold *& in call arguments. */
1609 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1610 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1611 {
1612 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1613 if (tmp)
1614 {
1615 gimple_call_set_arg (stmt, i, tmp);
1616 changed = true;
1617 }
1618 }
ceeffab0 1619 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
1620 break;
1621
1622 case GIMPLE_ASM:
1623 /* Fold *& in asm operands. */
1624 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1625 {
1626 tree link = gimple_asm_output_op (stmt, i);
1627 tree op = TREE_VALUE (link);
1628 if (REFERENCE_CLASS_P (op)
1629 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1630 {
1631 TREE_VALUE (link) = op;
1632 changed = true;
1633 }
1634 }
1635 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1636 {
1637 tree link = gimple_asm_input_op (stmt, i);
1638 tree op = TREE_VALUE (link);
1639 if (REFERENCE_CLASS_P (op)
1640 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1641 {
1642 TREE_VALUE (link) = op;
1643 changed = true;
1644 }
1645 }
1646 break;
1647
bd422c4a
RG
1648 case GIMPLE_DEBUG:
1649 if (gimple_debug_bind_p (stmt))
1650 {
1651 tree val = gimple_debug_bind_get_value (stmt);
1652 if (val
1653 && REFERENCE_CLASS_P (val))
1654 {
1655 tree tem = maybe_fold_reference (val, false);
1656 if (tem)
1657 {
1658 gimple_debug_bind_set_value (stmt, tem);
1659 changed = true;
1660 }
1661 }
1662 }
1663 break;
1664
cbdd87d4
RG
1665 default:;
1666 }
1667
1668 stmt = gsi_stmt (*gsi);
1669
1670 /* Fold *& on the lhs. */
1671 if (gimple_has_lhs (stmt))
1672 {
1673 tree lhs = gimple_get_lhs (stmt);
1674 if (lhs && REFERENCE_CLASS_P (lhs))
1675 {
1676 tree new_lhs = maybe_fold_reference (lhs, true);
1677 if (new_lhs)
1678 {
1679 gimple_set_lhs (stmt, new_lhs);
1680 changed = true;
1681 }
1682 }
1683 }
1684
1685 return changed;
1686}
1687
1688/* Fold the statement pointed to by GSI. In some cases, this function may
1689 replace the whole statement with a new one. Returns true iff folding
1690 makes any changes.
1691 The statement pointed to by GSI should be in valid gimple form but may
1692 be in unfolded state as resulting from for example constant propagation
1693 which can produce *&x = 0. */
1694
1695bool
1696fold_stmt (gimple_stmt_iterator *gsi)
1697{
1698 return fold_stmt_1 (gsi, false);
1699}
1700
1701/* Perform the minimal folding on statement STMT. Only operations like
1702 *&x created by constant propagation are handled. The statement cannot
1703 be replaced with a new one. Return true if the statement was
1704 changed, false otherwise.
1705 The statement STMT should be in valid gimple form but may
1706 be in unfolded state as resulting from for example constant propagation
1707 which can produce *&x = 0. */
1708
1709bool
1710fold_stmt_inplace (gimple stmt)
1711{
1712 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1713 bool changed = fold_stmt_1 (&gsi, true);
1714 gcc_assert (gsi_stmt (gsi) == stmt);
1715 return changed;
1716}
1717
e89065a1
SL
1718/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1719 if EXPR is null or we don't know how.
1720 If non-null, the result always has boolean type. */
1721
1722static tree
1723canonicalize_bool (tree expr, bool invert)
1724{
1725 if (!expr)
1726 return NULL_TREE;
1727 else if (invert)
1728 {
1729 if (integer_nonzerop (expr))
1730 return boolean_false_node;
1731 else if (integer_zerop (expr))
1732 return boolean_true_node;
1733 else if (TREE_CODE (expr) == SSA_NAME)
1734 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1735 build_int_cst (TREE_TYPE (expr), 0));
1736 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1737 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1738 boolean_type_node,
1739 TREE_OPERAND (expr, 0),
1740 TREE_OPERAND (expr, 1));
1741 else
1742 return NULL_TREE;
1743 }
1744 else
1745 {
1746 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1747 return expr;
1748 if (integer_nonzerop (expr))
1749 return boolean_true_node;
1750 else if (integer_zerop (expr))
1751 return boolean_false_node;
1752 else if (TREE_CODE (expr) == SSA_NAME)
1753 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1754 build_int_cst (TREE_TYPE (expr), 0));
1755 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1756 return fold_build2 (TREE_CODE (expr),
1757 boolean_type_node,
1758 TREE_OPERAND (expr, 0),
1759 TREE_OPERAND (expr, 1));
1760 else
1761 return NULL_TREE;
1762 }
1763}
1764
1765/* Check to see if a boolean expression EXPR is logically equivalent to the
1766 comparison (OP1 CODE OP2). Check for various identities involving
1767 SSA_NAMEs. */
1768
1769static bool
1770same_bool_comparison_p (const_tree expr, enum tree_code code,
1771 const_tree op1, const_tree op2)
1772{
1773 gimple s;
1774
1775 /* The obvious case. */
1776 if (TREE_CODE (expr) == code
1777 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1778 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1779 return true;
1780
1781 /* Check for comparing (name, name != 0) and the case where expr
1782 is an SSA_NAME with a definition matching the comparison. */
1783 if (TREE_CODE (expr) == SSA_NAME
1784 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1785 {
1786 if (operand_equal_p (expr, op1, 0))
1787 return ((code == NE_EXPR && integer_zerop (op2))
1788 || (code == EQ_EXPR && integer_nonzerop (op2)));
1789 s = SSA_NAME_DEF_STMT (expr);
1790 if (is_gimple_assign (s)
1791 && gimple_assign_rhs_code (s) == code
1792 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1793 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1794 return true;
1795 }
1796
1797 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1798 of name is a comparison, recurse. */
1799 if (TREE_CODE (op1) == SSA_NAME
1800 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1801 {
1802 s = SSA_NAME_DEF_STMT (op1);
1803 if (is_gimple_assign (s)
1804 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1805 {
1806 enum tree_code c = gimple_assign_rhs_code (s);
1807 if ((c == NE_EXPR && integer_zerop (op2))
1808 || (c == EQ_EXPR && integer_nonzerop (op2)))
1809 return same_bool_comparison_p (expr, c,
1810 gimple_assign_rhs1 (s),
1811 gimple_assign_rhs2 (s));
1812 if ((c == EQ_EXPR && integer_zerop (op2))
1813 || (c == NE_EXPR && integer_nonzerop (op2)))
1814 return same_bool_comparison_p (expr,
1815 invert_tree_comparison (c, false),
1816 gimple_assign_rhs1 (s),
1817 gimple_assign_rhs2 (s));
1818 }
1819 }
1820 return false;
1821}
1822
1823/* Check to see if two boolean expressions OP1 and OP2 are logically
1824 equivalent. */
1825
1826static bool
1827same_bool_result_p (const_tree op1, const_tree op2)
1828{
1829 /* Simple cases first. */
1830 if (operand_equal_p (op1, op2, 0))
1831 return true;
1832
1833 /* Check the cases where at least one of the operands is a comparison.
1834 These are a bit smarter than operand_equal_p in that they apply some
1835 identifies on SSA_NAMEs. */
1836 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1837 && same_bool_comparison_p (op1, TREE_CODE (op2),
1838 TREE_OPERAND (op2, 0),
1839 TREE_OPERAND (op2, 1)))
1840 return true;
1841 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1842 && same_bool_comparison_p (op2, TREE_CODE (op1),
1843 TREE_OPERAND (op1, 0),
1844 TREE_OPERAND (op1, 1)))
1845 return true;
1846
1847 /* Default case. */
1848 return false;
1849}
1850
1851/* Forward declarations for some mutually recursive functions. */
1852
1853static tree
1854and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1855 enum tree_code code2, tree op2a, tree op2b);
1856static tree
1857and_var_with_comparison (tree var, bool invert,
1858 enum tree_code code2, tree op2a, tree op2b);
1859static tree
1860and_var_with_comparison_1 (gimple stmt,
1861 enum tree_code code2, tree op2a, tree op2b);
1862static tree
1863or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1864 enum tree_code code2, tree op2a, tree op2b);
1865static tree
1866or_var_with_comparison (tree var, bool invert,
1867 enum tree_code code2, tree op2a, tree op2b);
1868static tree
1869or_var_with_comparison_1 (gimple stmt,
1870 enum tree_code code2, tree op2a, tree op2b);
1871
1872/* Helper function for and_comparisons_1: try to simplify the AND of the
1873 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1874 If INVERT is true, invert the value of the VAR before doing the AND.
1875 Return NULL_EXPR if we can't simplify this to a single expression. */
1876
1877static tree
1878and_var_with_comparison (tree var, bool invert,
1879 enum tree_code code2, tree op2a, tree op2b)
1880{
1881 tree t;
1882 gimple stmt = SSA_NAME_DEF_STMT (var);
1883
1884 /* We can only deal with variables whose definitions are assignments. */
1885 if (!is_gimple_assign (stmt))
1886 return NULL_TREE;
1887
1888 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1889 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1890 Then we only have to consider the simpler non-inverted cases. */
1891 if (invert)
1892 t = or_var_with_comparison_1 (stmt,
1893 invert_tree_comparison (code2, false),
1894 op2a, op2b);
1895 else
1896 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1897 return canonicalize_bool (t, invert);
1898}
1899
1900/* Try to simplify the AND of the ssa variable defined by the assignment
1901 STMT with the comparison specified by (OP2A CODE2 OP2B).
1902 Return NULL_EXPR if we can't simplify this to a single expression. */
1903
1904static tree
1905and_var_with_comparison_1 (gimple stmt,
1906 enum tree_code code2, tree op2a, tree op2b)
1907{
1908 tree var = gimple_assign_lhs (stmt);
1909 tree true_test_var = NULL_TREE;
1910 tree false_test_var = NULL_TREE;
1911 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1912
1913 /* Check for identities like (var AND (var == 0)) => false. */
1914 if (TREE_CODE (op2a) == SSA_NAME
1915 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1916 {
1917 if ((code2 == NE_EXPR && integer_zerop (op2b))
1918 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1919 {
1920 true_test_var = op2a;
1921 if (var == true_test_var)
1922 return var;
1923 }
1924 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1925 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1926 {
1927 false_test_var = op2a;
1928 if (var == false_test_var)
1929 return boolean_false_node;
1930 }
1931 }
1932
1933 /* If the definition is a comparison, recurse on it. */
1934 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1935 {
1936 tree t = and_comparisons_1 (innercode,
1937 gimple_assign_rhs1 (stmt),
1938 gimple_assign_rhs2 (stmt),
1939 code2,
1940 op2a,
1941 op2b);
1942 if (t)
1943 return t;
1944 }
1945
1946 /* If the definition is an AND or OR expression, we may be able to
1947 simplify by reassociating. */
1948 if (innercode == TRUTH_AND_EXPR
1949 || innercode == TRUTH_OR_EXPR
1950 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1951 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
1952 {
1953 tree inner1 = gimple_assign_rhs1 (stmt);
1954 tree inner2 = gimple_assign_rhs2 (stmt);
1955 gimple s;
1956 tree t;
1957 tree partial = NULL_TREE;
1958 bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
1959
1960 /* Check for boolean identities that don't require recursive examination
1961 of inner1/inner2:
1962 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1963 inner1 AND (inner1 OR inner2) => inner1
1964 !inner1 AND (inner1 AND inner2) => false
1965 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1966 Likewise for similar cases involving inner2. */
1967 if (inner1 == true_test_var)
1968 return (is_and ? var : inner1);
1969 else if (inner2 == true_test_var)
1970 return (is_and ? var : inner2);
1971 else if (inner1 == false_test_var)
1972 return (is_and
1973 ? boolean_false_node
1974 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1975 else if (inner2 == false_test_var)
1976 return (is_and
1977 ? boolean_false_node
1978 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1979
1980 /* Next, redistribute/reassociate the AND across the inner tests.
1981 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1982 if (TREE_CODE (inner1) == SSA_NAME
1983 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1984 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1985 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1986 gimple_assign_rhs1 (s),
1987 gimple_assign_rhs2 (s),
1988 code2, op2a, op2b)))
1989 {
1990 /* Handle the AND case, where we are reassociating:
1991 (inner1 AND inner2) AND (op2a code2 op2b)
1992 => (t AND inner2)
1993 If the partial result t is a constant, we win. Otherwise
1994 continue on to try reassociating with the other inner test. */
1995 if (is_and)
1996 {
1997 if (integer_onep (t))
1998 return inner2;
1999 else if (integer_zerop (t))
2000 return boolean_false_node;
2001 }
2002
2003 /* Handle the OR case, where we are redistributing:
2004 (inner1 OR inner2) AND (op2a code2 op2b)
2005 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
2006 else if (integer_onep (t))
2007 return boolean_true_node;
2008
2009 /* Save partial result for later. */
2010 partial = t;
e89065a1
SL
2011 }
2012
2013 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
2014 if (TREE_CODE (inner2) == SSA_NAME
2015 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2016 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2017 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2018 gimple_assign_rhs1 (s),
2019 gimple_assign_rhs2 (s),
2020 code2, op2a, op2b)))
2021 {
2022 /* Handle the AND case, where we are reassociating:
2023 (inner1 AND inner2) AND (op2a code2 op2b)
2024 => (inner1 AND t) */
2025 if (is_and)
2026 {
2027 if (integer_onep (t))
2028 return inner1;
2029 else if (integer_zerop (t))
2030 return boolean_false_node;
8236c8eb
JJ
2031 /* If both are the same, we can apply the identity
2032 (x AND x) == x. */
2033 else if (partial && same_bool_result_p (t, partial))
2034 return t;
e89065a1
SL
2035 }
2036
2037 /* Handle the OR case. where we are redistributing:
2038 (inner1 OR inner2) AND (op2a code2 op2b)
2039 => (t OR (inner1 AND (op2a code2 op2b)))
2040 => (t OR partial) */
2041 else
2042 {
2043 if (integer_onep (t))
2044 return boolean_true_node;
2045 else if (partial)
2046 {
2047 /* We already got a simplification for the other
2048 operand to the redistributed OR expression. The
2049 interesting case is when at least one is false.
2050 Or, if both are the same, we can apply the identity
2051 (x OR x) == x. */
2052 if (integer_zerop (partial))
2053 return t;
2054 else if (integer_zerop (t))
2055 return partial;
2056 else if (same_bool_result_p (t, partial))
2057 return t;
2058 }
2059 }
2060 }
2061 }
2062 return NULL_TREE;
2063}
2064
2065/* Try to simplify the AND of two comparisons defined by
2066 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2067 If this can be done without constructing an intermediate value,
2068 return the resulting tree; otherwise NULL_TREE is returned.
2069 This function is deliberately asymmetric as it recurses on SSA_DEFs
2070 in the first comparison but not the second. */
2071
2072static tree
2073and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2074 enum tree_code code2, tree op2a, tree op2b)
2075{
2076 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
2077 if (operand_equal_p (op1a, op2a, 0)
2078 && operand_equal_p (op1b, op2b, 0))
2079 {
2080 tree t = combine_comparisons (UNKNOWN_LOCATION,
2081 TRUTH_ANDIF_EXPR, code1, code2,
2082 boolean_type_node, op1a, op1b);
2083 if (t)
2084 return t;
2085 }
2086
2087 /* Likewise the swapped case of the above. */
2088 if (operand_equal_p (op1a, op2b, 0)
2089 && operand_equal_p (op1b, op2a, 0))
2090 {
2091 tree t = combine_comparisons (UNKNOWN_LOCATION,
2092 TRUTH_ANDIF_EXPR, code1,
2093 swap_tree_comparison (code2),
2094 boolean_type_node, op1a, op1b);
2095 if (t)
2096 return t;
2097 }
2098
2099 /* If both comparisons are of the same value against constants, we might
2100 be able to merge them. */
2101 if (operand_equal_p (op1a, op2a, 0)
2102 && TREE_CODE (op1b) == INTEGER_CST
2103 && TREE_CODE (op2b) == INTEGER_CST)
2104 {
2105 int cmp = tree_int_cst_compare (op1b, op2b);
2106
2107 /* If we have (op1a == op1b), we should either be able to
2108 return that or FALSE, depending on whether the constant op1b
2109 also satisfies the other comparison against op2b. */
2110 if (code1 == EQ_EXPR)
2111 {
2112 bool done = true;
2113 bool val;
2114 switch (code2)
2115 {
2116 case EQ_EXPR: val = (cmp == 0); break;
2117 case NE_EXPR: val = (cmp != 0); break;
2118 case LT_EXPR: val = (cmp < 0); break;
2119 case GT_EXPR: val = (cmp > 0); break;
2120 case LE_EXPR: val = (cmp <= 0); break;
2121 case GE_EXPR: val = (cmp >= 0); break;
2122 default: done = false;
2123 }
2124 if (done)
2125 {
2126 if (val)
2127 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2128 else
2129 return boolean_false_node;
2130 }
2131 }
2132 /* Likewise if the second comparison is an == comparison. */
2133 else if (code2 == EQ_EXPR)
2134 {
2135 bool done = true;
2136 bool val;
2137 switch (code1)
2138 {
2139 case EQ_EXPR: val = (cmp == 0); break;
2140 case NE_EXPR: val = (cmp != 0); break;
2141 case LT_EXPR: val = (cmp > 0); break;
2142 case GT_EXPR: val = (cmp < 0); break;
2143 case LE_EXPR: val = (cmp >= 0); break;
2144 case GE_EXPR: val = (cmp <= 0); break;
2145 default: done = false;
2146 }
2147 if (done)
2148 {
2149 if (val)
2150 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2151 else
2152 return boolean_false_node;
2153 }
2154 }
2155
2156 /* Same business with inequality tests. */
2157 else if (code1 == NE_EXPR)
2158 {
2159 bool val;
2160 switch (code2)
2161 {
2162 case EQ_EXPR: val = (cmp != 0); break;
2163 case NE_EXPR: val = (cmp == 0); break;
2164 case LT_EXPR: val = (cmp >= 0); break;
2165 case GT_EXPR: val = (cmp <= 0); break;
2166 case LE_EXPR: val = (cmp > 0); break;
2167 case GE_EXPR: val = (cmp < 0); break;
2168 default:
2169 val = false;
2170 }
2171 if (val)
2172 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2173 }
2174 else if (code2 == NE_EXPR)
2175 {
2176 bool val;
2177 switch (code1)
2178 {
2179 case EQ_EXPR: val = (cmp == 0); break;
2180 case NE_EXPR: val = (cmp != 0); break;
2181 case LT_EXPR: val = (cmp <= 0); break;
2182 case GT_EXPR: val = (cmp >= 0); break;
2183 case LE_EXPR: val = (cmp < 0); break;
2184 case GE_EXPR: val = (cmp > 0); break;
2185 default:
2186 val = false;
2187 }
2188 if (val)
2189 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2190 }
2191
2192 /* Chose the more restrictive of two < or <= comparisons. */
2193 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2194 && (code2 == LT_EXPR || code2 == LE_EXPR))
2195 {
2196 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2197 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2198 else
2199 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2200 }
2201
2202 /* Likewise chose the more restrictive of two > or >= comparisons. */
2203 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2204 && (code2 == GT_EXPR || code2 == GE_EXPR))
2205 {
2206 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2207 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2208 else
2209 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2210 }
2211
2212 /* Check for singleton ranges. */
2213 else if (cmp == 0
2214 && ((code1 == LE_EXPR && code2 == GE_EXPR)
2215 || (code1 == GE_EXPR && code2 == LE_EXPR)))
2216 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
2217
2218 /* Check for disjoint ranges. */
2219 else if (cmp <= 0
2220 && (code1 == LT_EXPR || code1 == LE_EXPR)
2221 && (code2 == GT_EXPR || code2 == GE_EXPR))
2222 return boolean_false_node;
2223 else if (cmp >= 0
2224 && (code1 == GT_EXPR || code1 == GE_EXPR)
2225 && (code2 == LT_EXPR || code2 == LE_EXPR))
2226 return boolean_false_node;
2227 }
2228
2229 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2230 NAME's definition is a truth value. See if there are any simplifications
2231 that can be done against the NAME's definition. */
2232 if (TREE_CODE (op1a) == SSA_NAME
2233 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2234 && (integer_zerop (op1b) || integer_onep (op1b)))
2235 {
2236 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2237 || (code1 == NE_EXPR && integer_onep (op1b)));
2238 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2239 switch (gimple_code (stmt))
2240 {
2241 case GIMPLE_ASSIGN:
2242 /* Try to simplify by copy-propagating the definition. */
2243 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
2244
2245 case GIMPLE_PHI:
2246 /* If every argument to the PHI produces the same result when
2247 ANDed with the second comparison, we win.
2248 Do not do this unless the type is bool since we need a bool
2249 result here anyway. */
2250 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2251 {
2252 tree result = NULL_TREE;
2253 unsigned i;
2254 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2255 {
2256 tree arg = gimple_phi_arg_def (stmt, i);
2257
2258 /* If this PHI has itself as an argument, ignore it.
2259 If all the other args produce the same result,
2260 we're still OK. */
2261 if (arg == gimple_phi_result (stmt))
2262 continue;
2263 else if (TREE_CODE (arg) == INTEGER_CST)
2264 {
2265 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
2266 {
2267 if (!result)
2268 result = boolean_false_node;
2269 else if (!integer_zerop (result))
2270 return NULL_TREE;
2271 }
2272 else if (!result)
2273 result = fold_build2 (code2, boolean_type_node,
2274 op2a, op2b);
2275 else if (!same_bool_comparison_p (result,
2276 code2, op2a, op2b))
2277 return NULL_TREE;
2278 }
2279 else if (TREE_CODE (arg) == SSA_NAME)
2280 {
6c66f733
JJ
2281 tree temp;
2282 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2283 /* In simple cases we can look through PHI nodes,
2284 but we have to be careful with loops.
2285 See PR49073. */
2286 if (! dom_info_available_p (CDI_DOMINATORS)
2287 || gimple_bb (def_stmt) == gimple_bb (stmt)
2288 || dominated_by_p (CDI_DOMINATORS,
2289 gimple_bb (def_stmt),
2290 gimple_bb (stmt)))
2291 return NULL_TREE;
2292 temp = and_var_with_comparison (arg, invert, code2,
2293 op2a, op2b);
e89065a1
SL
2294 if (!temp)
2295 return NULL_TREE;
2296 else if (!result)
2297 result = temp;
2298 else if (!same_bool_result_p (result, temp))
2299 return NULL_TREE;
2300 }
2301 else
2302 return NULL_TREE;
2303 }
2304 return result;
2305 }
2306
2307 default:
2308 break;
2309 }
2310 }
2311 return NULL_TREE;
2312}
2313
2314/* Try to simplify the AND of two comparisons, specified by
2315 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2316 If this can be simplified to a single expression (without requiring
2317 introducing more SSA variables to hold intermediate values),
2318 return the resulting tree. Otherwise return NULL_TREE.
2319 If the result expression is non-null, it has boolean type. */
2320
2321tree
2322maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2323 enum tree_code code2, tree op2a, tree op2b)
2324{
2325 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2326 if (t)
2327 return t;
2328 else
2329 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2330}
2331
2332/* Helper function for or_comparisons_1: try to simplify the OR of the
2333 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2334 If INVERT is true, invert the value of VAR before doing the OR.
2335 Return NULL_EXPR if we can't simplify this to a single expression. */
2336
2337static tree
2338or_var_with_comparison (tree var, bool invert,
2339 enum tree_code code2, tree op2a, tree op2b)
2340{
2341 tree t;
2342 gimple stmt = SSA_NAME_DEF_STMT (var);
2343
2344 /* We can only deal with variables whose definitions are assignments. */
2345 if (!is_gimple_assign (stmt))
2346 return NULL_TREE;
2347
2348 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2349 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2350 Then we only have to consider the simpler non-inverted cases. */
2351 if (invert)
2352 t = and_var_with_comparison_1 (stmt,
2353 invert_tree_comparison (code2, false),
2354 op2a, op2b);
2355 else
2356 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2357 return canonicalize_bool (t, invert);
2358}
2359
2360/* Try to simplify the OR of the ssa variable defined by the assignment
2361 STMT with the comparison specified by (OP2A CODE2 OP2B).
2362 Return NULL_EXPR if we can't simplify this to a single expression. */
2363
2364static tree
2365or_var_with_comparison_1 (gimple stmt,
2366 enum tree_code code2, tree op2a, tree op2b)
2367{
2368 tree var = gimple_assign_lhs (stmt);
2369 tree true_test_var = NULL_TREE;
2370 tree false_test_var = NULL_TREE;
2371 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2372
2373 /* Check for identities like (var OR (var != 0)) => true . */
2374 if (TREE_CODE (op2a) == SSA_NAME
2375 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2376 {
2377 if ((code2 == NE_EXPR && integer_zerop (op2b))
2378 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2379 {
2380 true_test_var = op2a;
2381 if (var == true_test_var)
2382 return var;
2383 }
2384 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2385 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2386 {
2387 false_test_var = op2a;
2388 if (var == false_test_var)
2389 return boolean_true_node;
2390 }
2391 }
2392
2393 /* If the definition is a comparison, recurse on it. */
2394 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2395 {
2396 tree t = or_comparisons_1 (innercode,
2397 gimple_assign_rhs1 (stmt),
2398 gimple_assign_rhs2 (stmt),
2399 code2,
2400 op2a,
2401 op2b);
2402 if (t)
2403 return t;
2404 }
2405
2406 /* If the definition is an AND or OR expression, we may be able to
2407 simplify by reassociating. */
2408 if (innercode == TRUTH_AND_EXPR
2409 || innercode == TRUTH_OR_EXPR
2410 || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2411 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
2412 {
2413 tree inner1 = gimple_assign_rhs1 (stmt);
2414 tree inner2 = gimple_assign_rhs2 (stmt);
2415 gimple s;
2416 tree t;
2417 tree partial = NULL_TREE;
2418 bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
2419
2420 /* Check for boolean identities that don't require recursive examination
2421 of inner1/inner2:
2422 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2423 inner1 OR (inner1 AND inner2) => inner1
2424 !inner1 OR (inner1 OR inner2) => true
2425 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2426 */
2427 if (inner1 == true_test_var)
2428 return (is_or ? var : inner1);
2429 else if (inner2 == true_test_var)
2430 return (is_or ? var : inner2);
2431 else if (inner1 == false_test_var)
2432 return (is_or
2433 ? boolean_true_node
2434 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2435 else if (inner2 == false_test_var)
2436 return (is_or
2437 ? boolean_true_node
2438 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2439
2440 /* Next, redistribute/reassociate the OR across the inner tests.
2441 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2442 if (TREE_CODE (inner1) == SSA_NAME
2443 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2444 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2445 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2446 gimple_assign_rhs1 (s),
2447 gimple_assign_rhs2 (s),
2448 code2, op2a, op2b)))
2449 {
2450 /* Handle the OR case, where we are reassociating:
2451 (inner1 OR inner2) OR (op2a code2 op2b)
2452 => (t OR inner2)
2453 If the partial result t is a constant, we win. Otherwise
2454 continue on to try reassociating with the other inner test. */
8236c8eb 2455 if (is_or)
e89065a1
SL
2456 {
2457 if (integer_onep (t))
2458 return boolean_true_node;
2459 else if (integer_zerop (t))
2460 return inner2;
2461 }
2462
2463 /* Handle the AND case, where we are redistributing:
2464 (inner1 AND inner2) OR (op2a code2 op2b)
2465 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
2466 else if (integer_zerop (t))
2467 return boolean_false_node;
2468
2469 /* Save partial result for later. */
2470 partial = t;
e89065a1
SL
2471 }
2472
2473 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2474 if (TREE_CODE (inner2) == SSA_NAME
2475 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2476 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2477 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2478 gimple_assign_rhs1 (s),
2479 gimple_assign_rhs2 (s),
2480 code2, op2a, op2b)))
2481 {
2482 /* Handle the OR case, where we are reassociating:
2483 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
2484 => (inner1 OR t)
2485 => (t OR partial) */
2486 if (is_or)
e89065a1
SL
2487 {
2488 if (integer_zerop (t))
2489 return inner1;
2490 else if (integer_onep (t))
2491 return boolean_true_node;
8236c8eb
JJ
2492 /* If both are the same, we can apply the identity
2493 (x OR x) == x. */
2494 else if (partial && same_bool_result_p (t, partial))
2495 return t;
e89065a1
SL
2496 }
2497
2498 /* Handle the AND case, where we are redistributing:
2499 (inner1 AND inner2) OR (op2a code2 op2b)
2500 => (t AND (inner1 OR (op2a code2 op2b)))
2501 => (t AND partial) */
2502 else
2503 {
2504 if (integer_zerop (t))
2505 return boolean_false_node;
2506 else if (partial)
2507 {
2508 /* We already got a simplification for the other
2509 operand to the redistributed AND expression. The
2510 interesting case is when at least one is true.
2511 Or, if both are the same, we can apply the identity
8236c8eb 2512 (x AND x) == x. */
e89065a1
SL
2513 if (integer_onep (partial))
2514 return t;
2515 else if (integer_onep (t))
2516 return partial;
2517 else if (same_bool_result_p (t, partial))
8236c8eb 2518 return t;
e89065a1
SL
2519 }
2520 }
2521 }
2522 }
2523 return NULL_TREE;
2524}
2525
2526/* Try to simplify the OR of two comparisons defined by
2527 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2528 If this can be done without constructing an intermediate value,
2529 return the resulting tree; otherwise NULL_TREE is returned.
2530 This function is deliberately asymmetric as it recurses on SSA_DEFs
2531 in the first comparison but not the second. */
2532
2533static tree
2534or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2535 enum tree_code code2, tree op2a, tree op2b)
2536{
2537 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2538 if (operand_equal_p (op1a, op2a, 0)
2539 && operand_equal_p (op1b, op2b, 0))
2540 {
2541 tree t = combine_comparisons (UNKNOWN_LOCATION,
2542 TRUTH_ORIF_EXPR, code1, code2,
2543 boolean_type_node, op1a, op1b);
2544 if (t)
2545 return t;
2546 }
2547
2548 /* Likewise the swapped case of the above. */
2549 if (operand_equal_p (op1a, op2b, 0)
2550 && operand_equal_p (op1b, op2a, 0))
2551 {
2552 tree t = combine_comparisons (UNKNOWN_LOCATION,
2553 TRUTH_ORIF_EXPR, code1,
2554 swap_tree_comparison (code2),
2555 boolean_type_node, op1a, op1b);
2556 if (t)
2557 return t;
2558 }
2559
2560 /* If both comparisons are of the same value against constants, we might
2561 be able to merge them. */
2562 if (operand_equal_p (op1a, op2a, 0)
2563 && TREE_CODE (op1b) == INTEGER_CST
2564 && TREE_CODE (op2b) == INTEGER_CST)
2565 {
2566 int cmp = tree_int_cst_compare (op1b, op2b);
2567
2568 /* If we have (op1a != op1b), we should either be able to
2569 return that or TRUE, depending on whether the constant op1b
2570 also satisfies the other comparison against op2b. */
2571 if (code1 == NE_EXPR)
2572 {
2573 bool done = true;
2574 bool val;
2575 switch (code2)
2576 {
2577 case EQ_EXPR: val = (cmp == 0); break;
2578 case NE_EXPR: val = (cmp != 0); break;
2579 case LT_EXPR: val = (cmp < 0); break;
2580 case GT_EXPR: val = (cmp > 0); break;
2581 case LE_EXPR: val = (cmp <= 0); break;
2582 case GE_EXPR: val = (cmp >= 0); break;
2583 default: done = false;
2584 }
2585 if (done)
2586 {
2587 if (val)
2588 return boolean_true_node;
2589 else
2590 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2591 }
2592 }
2593 /* Likewise if the second comparison is a != comparison. */
2594 else if (code2 == NE_EXPR)
2595 {
2596 bool done = true;
2597 bool val;
2598 switch (code1)
2599 {
2600 case EQ_EXPR: val = (cmp == 0); break;
2601 case NE_EXPR: val = (cmp != 0); break;
2602 case LT_EXPR: val = (cmp > 0); break;
2603 case GT_EXPR: val = (cmp < 0); break;
2604 case LE_EXPR: val = (cmp >= 0); break;
2605 case GE_EXPR: val = (cmp <= 0); break;
2606 default: done = false;
2607 }
2608 if (done)
2609 {
2610 if (val)
2611 return boolean_true_node;
2612 else
2613 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2614 }
2615 }
2616
2617 /* See if an equality test is redundant with the other comparison. */
2618 else if (code1 == EQ_EXPR)
2619 {
2620 bool val;
2621 switch (code2)
2622 {
2623 case EQ_EXPR: val = (cmp == 0); break;
2624 case NE_EXPR: val = (cmp != 0); break;
2625 case LT_EXPR: val = (cmp < 0); break;
2626 case GT_EXPR: val = (cmp > 0); break;
2627 case LE_EXPR: val = (cmp <= 0); break;
2628 case GE_EXPR: val = (cmp >= 0); break;
2629 default:
2630 val = false;
2631 }
2632 if (val)
2633 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2634 }
2635 else if (code2 == EQ_EXPR)
2636 {
2637 bool val;
2638 switch (code1)
2639 {
2640 case EQ_EXPR: val = (cmp == 0); break;
2641 case NE_EXPR: val = (cmp != 0); break;
2642 case LT_EXPR: val = (cmp > 0); break;
2643 case GT_EXPR: val = (cmp < 0); break;
2644 case LE_EXPR: val = (cmp >= 0); break;
2645 case GE_EXPR: val = (cmp <= 0); break;
2646 default:
2647 val = false;
2648 }
2649 if (val)
2650 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2651 }
2652
2653 /* Chose the less restrictive of two < or <= comparisons. */
2654 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2655 && (code2 == LT_EXPR || code2 == LE_EXPR))
2656 {
2657 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2658 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2659 else
2660 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2661 }
2662
2663 /* Likewise chose the less restrictive of two > or >= comparisons. */
2664 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2665 && (code2 == GT_EXPR || code2 == GE_EXPR))
2666 {
2667 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2668 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2669 else
2670 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2671 }
2672
2673 /* Check for singleton ranges. */
2674 else if (cmp == 0
2675 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2676 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2677 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2678
2679 /* Check for less/greater pairs that don't restrict the range at all. */
2680 else if (cmp >= 0
2681 && (code1 == LT_EXPR || code1 == LE_EXPR)
2682 && (code2 == GT_EXPR || code2 == GE_EXPR))
2683 return boolean_true_node;
2684 else if (cmp <= 0
2685 && (code1 == GT_EXPR || code1 == GE_EXPR)
2686 && (code2 == LT_EXPR || code2 == LE_EXPR))
2687 return boolean_true_node;
2688 }
2689
2690 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2691 NAME's definition is a truth value. See if there are any simplifications
2692 that can be done against the NAME's definition. */
2693 if (TREE_CODE (op1a) == SSA_NAME
2694 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2695 && (integer_zerop (op1b) || integer_onep (op1b)))
2696 {
2697 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2698 || (code1 == NE_EXPR && integer_onep (op1b)));
2699 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2700 switch (gimple_code (stmt))
2701 {
2702 case GIMPLE_ASSIGN:
2703 /* Try to simplify by copy-propagating the definition. */
2704 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2705
2706 case GIMPLE_PHI:
2707 /* If every argument to the PHI produces the same result when
2708 ORed with the second comparison, we win.
2709 Do not do this unless the type is bool since we need a bool
2710 result here anyway. */
2711 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2712 {
2713 tree result = NULL_TREE;
2714 unsigned i;
2715 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2716 {
2717 tree arg = gimple_phi_arg_def (stmt, i);
2718
2719 /* If this PHI has itself as an argument, ignore it.
2720 If all the other args produce the same result,
2721 we're still OK. */
2722 if (arg == gimple_phi_result (stmt))
2723 continue;
2724 else if (TREE_CODE (arg) == INTEGER_CST)
2725 {
2726 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2727 {
2728 if (!result)
2729 result = boolean_true_node;
2730 else if (!integer_onep (result))
2731 return NULL_TREE;
2732 }
2733 else if (!result)
2734 result = fold_build2 (code2, boolean_type_node,
2735 op2a, op2b);
2736 else if (!same_bool_comparison_p (result,
2737 code2, op2a, op2b))
2738 return NULL_TREE;
2739 }
2740 else if (TREE_CODE (arg) == SSA_NAME)
2741 {
6c66f733
JJ
2742 tree temp;
2743 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2744 /* In simple cases we can look through PHI nodes,
2745 but we have to be careful with loops.
2746 See PR49073. */
2747 if (! dom_info_available_p (CDI_DOMINATORS)
2748 || gimple_bb (def_stmt) == gimple_bb (stmt)
2749 || dominated_by_p (CDI_DOMINATORS,
2750 gimple_bb (def_stmt),
2751 gimple_bb (stmt)))
2752 return NULL_TREE;
2753 temp = or_var_with_comparison (arg, invert, code2,
2754 op2a, op2b);
e89065a1
SL
2755 if (!temp)
2756 return NULL_TREE;
2757 else if (!result)
2758 result = temp;
2759 else if (!same_bool_result_p (result, temp))
2760 return NULL_TREE;
2761 }
2762 else
2763 return NULL_TREE;
2764 }
2765 return result;
2766 }
2767
2768 default:
2769 break;
2770 }
2771 }
2772 return NULL_TREE;
2773}
2774
2775/* Try to simplify the OR of two comparisons, specified by
2776 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2777 If this can be simplified to a single expression (without requiring
2778 introducing more SSA variables to hold intermediate values),
2779 return the resulting tree. Otherwise return NULL_TREE.
2780 If the result expression is non-null, it has boolean type. */
2781
2782tree
2783maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2784 enum tree_code code2, tree op2a, tree op2b)
2785{
2786 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2787 if (t)
2788 return t;
2789 else
2790 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2791}
cfef45c8
RG
2792
2793
2794/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2795
2796 Either NULL_TREE, a simplified but non-constant or a constant
2797 is returned.
2798
2799 ??? This should go into a gimple-fold-inline.h file to be eventually
2800 privatized with the single valueize function used in the various TUs
2801 to avoid the indirect function call overhead. */
2802
2803tree
2804gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2805{
2806 location_t loc = gimple_location (stmt);
2807 switch (gimple_code (stmt))
2808 {
2809 case GIMPLE_ASSIGN:
2810 {
2811 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2812
2813 switch (get_gimple_rhs_class (subcode))
2814 {
2815 case GIMPLE_SINGLE_RHS:
2816 {
2817 tree rhs = gimple_assign_rhs1 (stmt);
2818 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2819
2820 if (TREE_CODE (rhs) == SSA_NAME)
2821 {
2822 /* If the RHS is an SSA_NAME, return its known constant value,
2823 if any. */
2824 return (*valueize) (rhs);
2825 }
2826 /* Handle propagating invariant addresses into address
2827 operations. */
2828 else if (TREE_CODE (rhs) == ADDR_EXPR
2829 && !is_gimple_min_invariant (rhs))
2830 {
2831 HOST_WIDE_INT offset;
2832 tree base;
2833 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2834 &offset,
2835 valueize);
2836 if (base
2837 && (CONSTANT_CLASS_P (base)
2838 || decl_address_invariant_p (base)))
2839 return build_invariant_address (TREE_TYPE (rhs),
2840 base, offset);
2841 }
2842 else if (TREE_CODE (rhs) == CONSTRUCTOR
2843 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2844 && (CONSTRUCTOR_NELTS (rhs)
2845 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2846 {
2847 unsigned i;
2848 tree val, list;
2849
2850 list = NULL_TREE;
2851 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2852 {
2853 val = (*valueize) (val);
2854 if (TREE_CODE (val) == INTEGER_CST
2855 || TREE_CODE (val) == REAL_CST
2856 || TREE_CODE (val) == FIXED_CST)
2857 list = tree_cons (NULL_TREE, val, list);
2858 else
2859 return NULL_TREE;
2860 }
2861
2862 return build_vector (TREE_TYPE (rhs), nreverse (list));
2863 }
2864
2865 if (kind == tcc_reference)
2866 {
2867 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2868 || TREE_CODE (rhs) == REALPART_EXPR
2869 || TREE_CODE (rhs) == IMAGPART_EXPR)
2870 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2871 {
2872 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2873 return fold_unary_loc (EXPR_LOCATION (rhs),
2874 TREE_CODE (rhs),
2875 TREE_TYPE (rhs), val);
2876 }
2877 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2878 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2879 {
2880 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2881 return fold_ternary_loc (EXPR_LOCATION (rhs),
2882 TREE_CODE (rhs),
2883 TREE_TYPE (rhs), val,
2884 TREE_OPERAND (rhs, 1),
2885 TREE_OPERAND (rhs, 2));
2886 }
2887 else if (TREE_CODE (rhs) == MEM_REF
2888 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2889 {
2890 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2891 if (TREE_CODE (val) == ADDR_EXPR
2892 && is_gimple_min_invariant (val))
2893 {
2894 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2895 unshare_expr (val),
2896 TREE_OPERAND (rhs, 1));
2897 if (tem)
2898 rhs = tem;
2899 }
2900 }
2901 return fold_const_aggregate_ref_1 (rhs, valueize);
2902 }
2903 else if (kind == tcc_declaration)
2904 return get_symbol_constant_value (rhs);
2905 return rhs;
2906 }
2907
2908 case GIMPLE_UNARY_RHS:
2909 {
2910 /* Handle unary operators that can appear in GIMPLE form.
2911 Note that we know the single operand must be a constant,
2912 so this should almost always return a simplified RHS. */
2913 tree lhs = gimple_assign_lhs (stmt);
2914 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2915
2916 /* Conversions are useless for CCP purposes if they are
2917 value-preserving. Thus the restrictions that
2918 useless_type_conversion_p places for pointer type conversions
2919 do not apply here. Substitution later will only substitute to
2920 allowed places. */
2921 if (CONVERT_EXPR_CODE_P (subcode)
2922 && POINTER_TYPE_P (TREE_TYPE (lhs))
2923 && POINTER_TYPE_P (TREE_TYPE (op0)))
2924 {
2925 tree tem;
2926 /* Try to re-construct array references on-the-fly. */
2927 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2928 TREE_TYPE (op0))
2929 && ((tem = maybe_fold_offset_to_address
2930 (loc,
2931 op0, integer_zero_node, TREE_TYPE (lhs)))
2932 != NULL_TREE))
2933 return tem;
2934 return op0;
2935 }
2936
2937 return
2938 fold_unary_ignore_overflow_loc (loc, subcode,
2939 gimple_expr_type (stmt), op0);
2940 }
2941
2942 case GIMPLE_BINARY_RHS:
2943 {
2944 /* Handle binary operators that can appear in GIMPLE form. */
2945 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2946 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2947
2948 /* Translate &x + CST into an invariant form suitable for
2949 further propagation. */
2950 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2951 && TREE_CODE (op0) == ADDR_EXPR
2952 && TREE_CODE (op1) == INTEGER_CST)
2953 {
2954 tree off = fold_convert (ptr_type_node, op1);
2955 return build_fold_addr_expr
2956 (fold_build2 (MEM_REF,
2957 TREE_TYPE (TREE_TYPE (op0)),
2958 unshare_expr (op0), off));
2959 }
2960
2961 return fold_binary_loc (loc, subcode,
2962 gimple_expr_type (stmt), op0, op1);
2963 }
2964
2965 case GIMPLE_TERNARY_RHS:
2966 {
2967 /* Handle ternary operators that can appear in GIMPLE form. */
2968 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2969 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2970 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2971
2972 return fold_ternary_loc (loc, subcode,
2973 gimple_expr_type (stmt), op0, op1, op2);
2974 }
2975
2976 default:
2977 gcc_unreachable ();
2978 }
2979 }
2980
2981 case GIMPLE_CALL:
2982 {
25583c4f
RS
2983 tree fn;
2984
2985 if (gimple_call_internal_p (stmt))
2986 /* No folding yet for these functions. */
2987 return NULL_TREE;
2988
2989 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
2990 if (TREE_CODE (fn) == ADDR_EXPR
2991 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2992 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2993 {
2994 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2995 tree call, retval;
2996 unsigned i;
2997 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2998 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2999 call = build_call_array_loc (loc,
3000 gimple_call_return_type (stmt),
3001 fn, gimple_call_num_args (stmt), args);
3002 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
3003 if (retval)
3004 /* fold_call_expr wraps the result inside a NOP_EXPR. */
3005 STRIP_NOPS (retval);
3006 return retval;
3007 }
3008 return NULL_TREE;
3009 }
3010
3011 default:
3012 return NULL_TREE;
3013 }
3014}
3015
3016/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3017 Returns NULL_TREE if folding to a constant is not possible, otherwise
3018 returns a constant according to is_gimple_min_invariant. */
3019
3020tree
3021gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
3022{
3023 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
3024 if (res && is_gimple_min_invariant (res))
3025 return res;
3026 return NULL_TREE;
3027}
3028
3029
3030/* The following set of functions are supposed to fold references using
3031 their constant initializers. */
3032
3033static tree fold_ctor_reference (tree type, tree ctor,
3034 unsigned HOST_WIDE_INT offset,
3035 unsigned HOST_WIDE_INT size);
3036
3037/* See if we can find constructor defining value of BASE.
3038 When we know the consructor with constant offset (such as
3039 base is array[40] and we do know constructor of array), then
3040 BIT_OFFSET is adjusted accordingly.
3041
3042 As a special case, return error_mark_node when constructor
3043 is not explicitly available, but it is known to be zero
3044 such as 'static const int a;'. */
3045static tree
3046get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
3047 tree (*valueize)(tree))
3048{
3049 HOST_WIDE_INT bit_offset2, size, max_size;
3050 if (TREE_CODE (base) == MEM_REF)
3051 {
3052 if (!integer_zerop (TREE_OPERAND (base, 1)))
3053 {
3054 if (!host_integerp (TREE_OPERAND (base, 1), 0))
3055 return NULL_TREE;
3056 *bit_offset += (mem_ref_offset (base).low
3057 * BITS_PER_UNIT);
3058 }
3059
3060 if (valueize
3061 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3062 base = valueize (TREE_OPERAND (base, 0));
3063 if (!base || TREE_CODE (base) != ADDR_EXPR)
3064 return NULL_TREE;
3065 base = TREE_OPERAND (base, 0);
3066 }
3067
3068 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
3069 DECL_INITIAL. If BASE is a nested reference into another
3070 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
3071 the inner reference. */
3072 switch (TREE_CODE (base))
3073 {
3074 case VAR_DECL:
3075 if (!const_value_known_p (base))
3076 return NULL_TREE;
3077
3078 /* Fallthru. */
3079 case CONST_DECL:
3080 if (!DECL_INITIAL (base)
3081 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
3082 return error_mark_node;
3083 return DECL_INITIAL (base);
3084
3085 case ARRAY_REF:
3086 case COMPONENT_REF:
3087 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
3088 if (max_size == -1 || size != max_size)
3089 return NULL_TREE;
3090 *bit_offset += bit_offset2;
3091 return get_base_constructor (base, bit_offset, valueize);
3092
3093 case STRING_CST:
3094 case CONSTRUCTOR:
3095 return base;
3096
3097 default:
3098 return NULL_TREE;
3099 }
3100}
3101
3102/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
3103 to the memory at bit OFFSET.
3104
3105 We do only simple job of folding byte accesses. */
3106
3107static tree
3108fold_string_cst_ctor_reference (tree type, tree ctor,
3109 unsigned HOST_WIDE_INT offset,
3110 unsigned HOST_WIDE_INT size)
3111{
3112 if (INTEGRAL_TYPE_P (type)
3113 && (TYPE_MODE (type)
3114 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3115 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
3116 == MODE_INT)
3117 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
3118 && size == BITS_PER_UNIT
3119 && !(offset % BITS_PER_UNIT))
3120 {
3121 offset /= BITS_PER_UNIT;
3122 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
3123 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
3124 [offset]));
3125 /* Folding
3126 const char a[20]="hello";
3127 return a[10];
3128
3129 might lead to offset greater than string length. In this case we
3130 know value is either initialized to 0 or out of bounds. Return 0
3131 in both cases. */
3132 return build_zero_cst (type);
3133 }
3134 return NULL_TREE;
3135}
3136
3137/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
3138 SIZE to the memory at bit OFFSET. */
3139
3140static tree
3141fold_array_ctor_reference (tree type, tree ctor,
3142 unsigned HOST_WIDE_INT offset,
3143 unsigned HOST_WIDE_INT size)
3144{
3145 unsigned HOST_WIDE_INT cnt;
3146 tree cfield, cval;
3147 double_int low_bound, elt_size;
3148 double_int index, max_index;
3149 double_int access_index;
3150 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
3151 HOST_WIDE_INT inner_offset;
3152
3153 /* Compute low bound and elt size. */
3154 if (domain_type && TYPE_MIN_VALUE (domain_type))
3155 {
3156 /* Static constructors for variably sized objects makes no sense. */
3157 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
3158 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
3159 }
3160 else
3161 low_bound = double_int_zero;
3162 /* Static constructors for variably sized objects makes no sense. */
3163 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
3164 == INTEGER_CST);
3165 elt_size =
3166 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
3167
3168
3169 /* We can handle only constantly sized accesses that are known to not
3170 be larger than size of array element. */
3171 if (!TYPE_SIZE_UNIT (type)
3172 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
3173 || double_int_cmp (elt_size,
3174 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
3175 return NULL_TREE;
3176
3177 /* Compute the array index we look for. */
3178 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
3179 elt_size, TRUNC_DIV_EXPR);
3180 access_index = double_int_add (access_index, low_bound);
3181
3182 /* And offset within the access. */
3183 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
3184
3185 /* See if the array field is large enough to span whole access. We do not
3186 care to fold accesses spanning multiple array indexes. */
3187 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
3188 return NULL_TREE;
3189
3190 index = double_int_sub (low_bound, double_int_one);
3191 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
3192 {
3193 /* Array constructor might explicitely set index, or specify range
3194 or leave index NULL meaning that it is next index after previous
3195 one. */
3196 if (cfield)
3197 {
3198 if (TREE_CODE (cfield) == INTEGER_CST)
3199 max_index = index = tree_to_double_int (cfield);
3200 else
3201 {
3202 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
3203 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
3204 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
3205 }
3206 }
3207 else
3208 max_index = index = double_int_add (index, double_int_one);
3209
3210 /* Do we have match? */
3211 if (double_int_cmp (access_index, index, 1) >= 0
3212 && double_int_cmp (access_index, max_index, 1) <= 0)
3213 return fold_ctor_reference (type, cval, inner_offset, size);
3214 }
3215 /* When memory is not explicitely mentioned in constructor,
3216 it is 0 (or out of range). */
3217 return build_zero_cst (type);
3218}
3219
3220/* CTOR is CONSTRUCTOR of an aggregate or vector.
3221 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3222
3223static tree
3224fold_nonarray_ctor_reference (tree type, tree ctor,
3225 unsigned HOST_WIDE_INT offset,
3226 unsigned HOST_WIDE_INT size)
3227{
3228 unsigned HOST_WIDE_INT cnt;
3229 tree cfield, cval;
3230
3231 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3232 cval)
3233 {
3234 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3235 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3236 tree field_size = DECL_SIZE (cfield);
3237 double_int bitoffset;
3238 double_int byte_offset_cst = tree_to_double_int (byte_offset);
3239 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
3240 double_int bitoffset_end;
3241
3242 /* Variable sized objects in static constructors makes no sense,
3243 but field_size can be NULL for flexible array members. */
3244 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3245 && TREE_CODE (byte_offset) == INTEGER_CST
3246 && (field_size != NULL_TREE
3247 ? TREE_CODE (field_size) == INTEGER_CST
3248 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3249
3250 /* Compute bit offset of the field. */
3251 bitoffset = double_int_add (tree_to_double_int (field_offset),
3252 double_int_mul (byte_offset_cst,
3253 bits_per_unit_cst));
3254 /* Compute bit offset where the field ends. */
3255 if (field_size != NULL_TREE)
3256 bitoffset_end = double_int_add (bitoffset,
3257 tree_to_double_int (field_size));
3258 else
3259 bitoffset_end = double_int_zero;
3260
3261 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
3262 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
3263 && (field_size == NULL_TREE
3264 || double_int_cmp (uhwi_to_double_int (offset),
3265 bitoffset_end, 0) < 0))
3266 {
3267 double_int access_end = double_int_add (uhwi_to_double_int (offset),
3268 uhwi_to_double_int (size));
3269 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
3270 bitoffset);
3271 /* We do have overlap. Now see if field is large enough to
3272 cover the access. Give up for accesses spanning multiple
3273 fields. */
3274 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
3275 return NULL_TREE;
3276 return fold_ctor_reference (type, cval,
3277 double_int_to_uhwi (inner_offset), size);
3278 }
3279 }
3280 /* When memory is not explicitely mentioned in constructor, it is 0. */
3281 return build_zero_cst (type);
3282}
3283
3284/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3285 to the memory at bit OFFSET. */
3286
3287static tree
3288fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
3289 unsigned HOST_WIDE_INT size)
3290{
3291 tree ret;
3292
3293 /* We found the field with exact match. */
3294 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3295 && !offset)
3296 return canonicalize_constructor_val (ctor);
3297
3298 /* We are at the end of walk, see if we can view convert the
3299 result. */
3300 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3301 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3302 && operand_equal_p (TYPE_SIZE (type),
3303 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3304 {
3305 ret = canonicalize_constructor_val (ctor);
3306 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3307 if (ret)
3308 STRIP_NOPS (ret);
3309 return ret;
3310 }
3311 if (TREE_CODE (ctor) == STRING_CST)
3312 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3313 if (TREE_CODE (ctor) == CONSTRUCTOR)
3314 {
3315
3316 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
3317 return fold_array_ctor_reference (type, ctor, offset, size);
3318 else
3319 return fold_nonarray_ctor_reference (type, ctor, offset, size);
3320 }
3321
3322 return NULL_TREE;
3323}
3324
3325/* Return the tree representing the element referenced by T if T is an
3326 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3327 names using VALUEIZE. Return NULL_TREE otherwise. */
3328
3329tree
3330fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3331{
3332 tree ctor, idx, base;
3333 HOST_WIDE_INT offset, size, max_size;
3334 tree tem;
3335
3336 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3337 return get_symbol_constant_value (t);
3338
3339 tem = fold_read_from_constant_string (t);
3340 if (tem)
3341 return tem;
3342
3343 switch (TREE_CODE (t))
3344 {
3345 case ARRAY_REF:
3346 case ARRAY_RANGE_REF:
3347 /* Constant indexes are handled well by get_base_constructor.
3348 Only special case variable offsets.
3349 FIXME: This code can't handle nested references with variable indexes
3350 (they will be handled only by iteration of ccp). Perhaps we can bring
3351 get_ref_base_and_extent here and make it use a valueize callback. */
3352 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3353 && valueize
3354 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3355 && host_integerp (idx, 0))
3356 {
3357 tree low_bound, unit_size;
3358
3359 /* If the resulting bit-offset is constant, track it. */
3360 if ((low_bound = array_ref_low_bound (t),
3361 host_integerp (low_bound, 0))
3362 && (unit_size = array_ref_element_size (t),
3363 host_integerp (unit_size, 1)))
3364 {
3365 offset = TREE_INT_CST_LOW (idx);
3366 offset -= TREE_INT_CST_LOW (low_bound);
3367 offset *= TREE_INT_CST_LOW (unit_size);
3368 offset *= BITS_PER_UNIT;
3369
3370 base = TREE_OPERAND (t, 0);
3371 ctor = get_base_constructor (base, &offset, valueize);
3372 /* Empty constructor. Always fold to 0. */
3373 if (ctor == error_mark_node)
3374 return build_zero_cst (TREE_TYPE (t));
3375 /* Out of bound array access. Value is undefined,
3376 but don't fold. */
3377 if (offset < 0)
3378 return NULL_TREE;
3379 /* We can not determine ctor. */
3380 if (!ctor)
3381 return NULL_TREE;
3382 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3383 TREE_INT_CST_LOW (unit_size)
3384 * BITS_PER_UNIT);
3385 }
3386 }
3387 /* Fallthru. */
3388
3389 case COMPONENT_REF:
3390 case BIT_FIELD_REF:
3391 case TARGET_MEM_REF:
3392 case MEM_REF:
3393 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3394 ctor = get_base_constructor (base, &offset, valueize);
3395
3396 /* Empty constructor. Always fold to 0. */
3397 if (ctor == error_mark_node)
3398 return build_zero_cst (TREE_TYPE (t));
3399 /* We do not know precise address. */
3400 if (max_size == -1 || max_size != size)
3401 return NULL_TREE;
3402 /* We can not determine ctor. */
3403 if (!ctor)
3404 return NULL_TREE;
3405
3406 /* Out of bound array access. Value is undefined, but don't fold. */
3407 if (offset < 0)
3408 return NULL_TREE;
3409
3410 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3411
3412 case REALPART_EXPR:
3413 case IMAGPART_EXPR:
3414 {
3415 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3416 if (c && TREE_CODE (c) == COMPLEX_CST)
3417 return fold_build1_loc (EXPR_LOCATION (t),
3418 TREE_CODE (t), TREE_TYPE (t), c);
3419 break;
3420 }
3421
3422 default:
3423 break;
3424 }
3425
3426 return NULL_TREE;
3427}
3428
3429tree
3430fold_const_aggregate_ref (tree t)
3431{
3432 return fold_const_aggregate_ref_1 (t, NULL);
3433}