]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
re PR driver/57651 (gcc-ar and gcc-nm don't find the lto plugin)
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
d1e082c2 2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
cbdd87d4 27#include "function.h"
7ee2468b 28#include "dumpfile.h"
cbdd87d4 29#include "tree-flow.h"
cbdd87d4 30#include "tree-ssa-propagate.h"
cbdd87d4 31#include "target.h"
cfef45c8 32#include "gimple-fold.h"
cbdd87d4 33
b3b9f3d0 34/* Return true when DECL can be referenced from current unit.
c44c2088
JH
35 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
36 We can get declarations that are not possible to reference for various
37 reasons:
1389294c 38
1389294c
JH
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
1389294c 56static bool
c44c2088 57can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c
JH
58{
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
c44c2088
JH
61 symtab_node snode;
62
6da8be89 63 /* We will later output the initializer, so we can refer to it.
c44c2088
JH
64 So we are concerned only when DECL comes from initializer of
65 external var. */
66 if (!from_decl
67 || TREE_CODE (from_decl) != VAR_DECL
68 || !DECL_EXTERNAL (from_decl)
6da8be89
MM
69 || (flag_ltrans
70 && symtab_get_node (from_decl)->symbol.in_other_partition))
c44c2088 71 return true;
6da8be89 72 /* We are concerned only about static/external vars and functions. */
c44c2088
JH
73 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
74 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
75 return true;
c44c2088
JH
76 /* We are folding reference from external vtable. The vtable may reffer
77 to a symbol keyed to other compilation unit. The other compilation
78 unit may be in separate DSO and the symbol may be hidden. */
79 if (DECL_VISIBILITY_SPECIFIED (decl)
80 && DECL_EXTERNAL (decl)
81 && (!(snode = symtab_get_node (decl)) || !snode->symbol.in_other_partition))
82 return false;
b3b9f3d0
JH
83 /* When function is public, we always can introduce new reference.
84 Exception are the COMDAT functions where introducing a direct
85 reference imply need to include function body in the curren tunit. */
86 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
87 return true;
88 /* We are not at ltrans stage; so don't worry about WHOPR.
89 Also when still gimplifying all referred comdat functions will be
2e9bb6ba 90 produced.
c44c2088
JH
91
92 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 93 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
94 output elsewhere when corresponding vtable is output.
95 This is however not possible - ABI specify that COMDATs are output in
96 units where they are used and when the other unit was compiled with LTO
97 it is possible that vtable was kept public while the function itself
98 was privatized. */
b3b9f3d0
JH
99 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
100 return true;
c44c2088
JH
101
102 /* OK we are seeing either COMDAT or static variable. In this case we must
103 check that the definition is still around so we can refer it. */
1389294c
JH
104 if (TREE_CODE (decl) == FUNCTION_DECL)
105 {
106 node = cgraph_get_node (decl);
b3b9f3d0
JH
107 /* Check that we still have function body and that we didn't took
108 the decision to eliminate offline copy of the function yet.
109 The second is important when devirtualization happens during final
110 compilation stage when making a new reference no longer makes callee
111 to be compiled. */
e70670cf 112 if (!node || !node->symbol.definition || node->global.inlined_to)
c44c2088
JH
113 {
114 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
115 return false;
116 }
1389294c
JH
117 }
118 else if (TREE_CODE (decl) == VAR_DECL)
119 {
120 vnode = varpool_get_node (decl);
e70670cf 121 if (!vnode || !vnode->symbol.definition)
c44c2088
JH
122 {
123 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
124 return false;
125 }
1389294c 126 }
b3b9f3d0 127 return true;
1389294c
JH
128}
129
0038d4e0 130/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
131 acceptable form for is_gimple_min_invariant.
132 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
133
134tree
c44c2088 135canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 136{
50619002
EB
137 tree orig_cval = cval;
138 STRIP_NOPS (cval);
315f5f1b
RG
139 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
140 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 141 {
315f5f1b
RG
142 tree ptr = TREE_OPERAND (cval, 0);
143 if (is_gimple_min_invariant (ptr))
144 cval = build1_loc (EXPR_LOCATION (cval),
145 ADDR_EXPR, TREE_TYPE (ptr),
146 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
147 ptr,
148 fold_convert (ptr_type_node,
149 TREE_OPERAND (cval, 1))));
17f39a39
JH
150 }
151 if (TREE_CODE (cval) == ADDR_EXPR)
152 {
5a27a197
RG
153 tree base = NULL_TREE;
154 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
155 {
156 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
157 if (base)
158 TREE_OPERAND (cval, 0) = base;
159 }
5a27a197
RG
160 else
161 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
162 if (!base)
163 return NULL_TREE;
b3b9f3d0 164
7501ca28
RG
165 if ((TREE_CODE (base) == VAR_DECL
166 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 167 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 168 return NULL_TREE;
7501ca28 169 if (TREE_CODE (base) == VAR_DECL)
46eb666a 170 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
171 else if (TREE_CODE (base) == FUNCTION_DECL)
172 {
173 /* Make sure we create a cgraph node for functions we'll reference.
174 They can be non-existent if the reference comes from an entry
175 of an external vtable for example. */
66e6b990 176 cgraph_get_create_real_symbol_node (base);
7501ca28 177 }
0038d4e0 178 /* Fixup types in global initializers. */
73aef89e
RG
179 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
180 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
181
182 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
183 cval = fold_convert (TREE_TYPE (orig_cval), cval);
184 return cval;
17f39a39 185 }
50619002 186 return orig_cval;
17f39a39 187}
cbdd87d4
RG
188
189/* If SYM is a constant variable with known value, return the value.
190 NULL_TREE is returned otherwise. */
191
192tree
193get_symbol_constant_value (tree sym)
194{
64e0f5ff 195 if (const_value_known_p (sym))
cbdd87d4
RG
196 {
197 tree val = DECL_INITIAL (sym);
198 if (val)
199 {
9d60be38 200 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 201 if (val && is_gimple_min_invariant (val))
17f39a39 202 return val;
1389294c
JH
203 else
204 return NULL_TREE;
cbdd87d4
RG
205 }
206 /* Variables declared 'const' without an initializer
207 have zero as the initializer if they may not be
208 overridden at link or run time. */
209 if (!val
cbdd87d4
RG
210 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
211 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 212 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
213 }
214
215 return NULL_TREE;
216}
217
218
cbdd87d4
RG
219
220/* Subroutine of fold_stmt. We perform several simplifications of the
221 memory reference tree EXPR and make sure to re-gimplify them properly
222 after propagation of constant addresses. IS_LHS is true if the
223 reference is supposed to be an lvalue. */
224
225static tree
226maybe_fold_reference (tree expr, bool is_lhs)
227{
228 tree *t = &expr;
17f39a39 229 tree result;
cbdd87d4 230
f0eddb90
RG
231 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
232 || TREE_CODE (expr) == REALPART_EXPR
233 || TREE_CODE (expr) == IMAGPART_EXPR)
234 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
235 return fold_unary_loc (EXPR_LOCATION (expr),
236 TREE_CODE (expr),
237 TREE_TYPE (expr),
238 TREE_OPERAND (expr, 0));
239 else if (TREE_CODE (expr) == BIT_FIELD_REF
240 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
241 return fold_ternary_loc (EXPR_LOCATION (expr),
242 TREE_CODE (expr),
243 TREE_TYPE (expr),
244 TREE_OPERAND (expr, 0),
245 TREE_OPERAND (expr, 1),
246 TREE_OPERAND (expr, 2));
247
248 while (handled_component_p (*t))
249 t = &TREE_OPERAND (*t, 0);
cbdd87d4 250
f0eddb90
RG
251 /* Canonicalize MEM_REFs invariant address operand. Do this first
252 to avoid feeding non-canonical MEM_REFs elsewhere. */
253 if (TREE_CODE (*t) == MEM_REF
254 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 255 {
f0eddb90
RG
256 bool volatile_p = TREE_THIS_VOLATILE (*t);
257 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
258 TREE_OPERAND (*t, 0),
259 TREE_OPERAND (*t, 1));
260 if (tem)
261 {
262 TREE_THIS_VOLATILE (tem) = volatile_p;
263 *t = tem;
264 tem = maybe_fold_reference (expr, is_lhs);
265 if (tem)
266 return tem;
267 return expr;
268 }
cbdd87d4
RG
269 }
270
f0eddb90
RG
271 if (!is_lhs
272 && (result = fold_const_aggregate_ref (expr))
273 && is_gimple_min_invariant (result))
274 return result;
cbdd87d4 275
70f34814
RG
276 /* Fold back MEM_REFs to reference trees. */
277 if (TREE_CODE (*t) == MEM_REF
278 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
279 && integer_zerop (TREE_OPERAND (*t, 1))
280 && (TREE_THIS_VOLATILE (*t)
281 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
282 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
283 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
284 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
285 /* We have to look out here to not drop a required conversion
286 from the rhs to the lhs if is_lhs, but we don't have the
287 rhs here to verify that. Thus require strict type
288 compatibility. */
289 && types_compatible_p (TREE_TYPE (*t),
290 TREE_TYPE (TREE_OPERAND
f0eddb90 291 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 292 {
70f34814
RG
293 tree tem;
294 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
295 tem = maybe_fold_reference (expr, is_lhs);
296 if (tem)
297 return tem;
298 return expr;
299 }
4d948885
RG
300 else if (TREE_CODE (*t) == TARGET_MEM_REF)
301 {
302 tree tem = maybe_fold_tmr (*t);
303 if (tem)
304 {
305 *t = tem;
306 tem = maybe_fold_reference (expr, is_lhs);
307 if (tem)
308 return tem;
309 return expr;
310 }
311 }
cbdd87d4
RG
312
313 return NULL_TREE;
314}
315
316
317/* Attempt to fold an assignment statement pointed-to by SI. Returns a
318 replacement rhs for the statement or NULL_TREE if no simplification
319 could be made. It is assumed that the operands have been previously
320 folded. */
321
322static tree
323fold_gimple_assign (gimple_stmt_iterator *si)
324{
325 gimple stmt = gsi_stmt (*si);
326 enum tree_code subcode = gimple_assign_rhs_code (stmt);
327 location_t loc = gimple_location (stmt);
328
329 tree result = NULL_TREE;
330
331 switch (get_gimple_rhs_class (subcode))
332 {
333 case GIMPLE_SINGLE_RHS:
334 {
335 tree rhs = gimple_assign_rhs1 (stmt);
336
4e71066d 337 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
338 return maybe_fold_reference (rhs, false);
339
340 else if (TREE_CODE (rhs) == ADDR_EXPR)
341 {
70f34814
RG
342 tree ref = TREE_OPERAND (rhs, 0);
343 tree tem = maybe_fold_reference (ref, true);
344 if (tem
345 && TREE_CODE (tem) == MEM_REF
346 && integer_zerop (TREE_OPERAND (tem, 1)))
347 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
348 else if (tem)
cbdd87d4
RG
349 result = fold_convert (TREE_TYPE (rhs),
350 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
351 else if (TREE_CODE (ref) == MEM_REF
352 && integer_zerop (TREE_OPERAND (ref, 1)))
353 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
354 }
355
356 else if (TREE_CODE (rhs) == CONSTRUCTOR
357 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
358 && (CONSTRUCTOR_NELTS (rhs)
359 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
360 {
361 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
362 unsigned i;
363 tree val;
364
365 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
366 if (TREE_CODE (val) != INTEGER_CST
367 && TREE_CODE (val) != REAL_CST
368 && TREE_CODE (val) != FIXED_CST)
369 return NULL_TREE;
370
371 return build_vector_from_ctor (TREE_TYPE (rhs),
372 CONSTRUCTOR_ELTS (rhs));
373 }
374
375 else if (DECL_P (rhs))
9d60be38 376 return get_symbol_constant_value (rhs);
cbdd87d4
RG
377
378 /* If we couldn't fold the RHS, hand over to the generic
379 fold routines. */
380 if (result == NULL_TREE)
381 result = fold (rhs);
382
383 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
384 that may have been added by fold, and "useless" type
385 conversions that might now be apparent due to propagation. */
386 STRIP_USELESS_TYPE_CONVERSION (result);
387
388 if (result != rhs && valid_gimple_rhs_p (result))
389 return result;
390
391 return NULL_TREE;
392 }
393 break;
394
395 case GIMPLE_UNARY_RHS:
396 {
397 tree rhs = gimple_assign_rhs1 (stmt);
398
399 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
400 if (result)
401 {
402 /* If the operation was a conversion do _not_ mark a
403 resulting constant with TREE_OVERFLOW if the original
404 constant was not. These conversions have implementation
405 defined behavior and retaining the TREE_OVERFLOW flag
406 here would confuse later passes such as VRP. */
407 if (CONVERT_EXPR_CODE_P (subcode)
408 && TREE_CODE (result) == INTEGER_CST
409 && TREE_CODE (rhs) == INTEGER_CST)
410 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
411
412 STRIP_USELESS_TYPE_CONVERSION (result);
413 if (valid_gimple_rhs_p (result))
414 return result;
415 }
cbdd87d4
RG
416 }
417 break;
418
419 case GIMPLE_BINARY_RHS:
9b80d091
KT
420 /* Try to canonicalize for boolean-typed X the comparisons
421 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
422 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
423 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
424 {
425 tree lhs = gimple_assign_lhs (stmt);
426 tree op1 = gimple_assign_rhs1 (stmt);
427 tree op2 = gimple_assign_rhs2 (stmt);
428 tree type = TREE_TYPE (op1);
429
430 /* Check whether the comparison operands are of the same boolean
431 type as the result type is.
432 Check that second operand is an integer-constant with value
433 one or zero. */
434 if (TREE_CODE (op2) == INTEGER_CST
435 && (integer_zerop (op2) || integer_onep (op2))
436 && useless_type_conversion_p (TREE_TYPE (lhs), type))
437 {
438 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
439 bool is_logical_not = false;
440
441 /* X == 0 and X != 1 is a logical-not.of X
442 X == 1 and X != 0 is X */
443 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
444 || (cmp_code == NE_EXPR && integer_onep (op2)))
445 is_logical_not = true;
446
447 if (is_logical_not == false)
448 result = op1;
449 /* Only for one-bit precision typed X the transformation
450 !X -> ~X is valied. */
451 else if (TYPE_PRECISION (type) == 1)
452 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
453 type, op1);
454 /* Otherwise we use !X -> X ^ 1. */
455 else
456 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
457 type, op1, build_int_cst (type, 1));
458
459 }
460 }
cbdd87d4
RG
461
462 if (!result)
463 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
464 TREE_TYPE (gimple_assign_lhs (stmt)),
465 gimple_assign_rhs1 (stmt),
466 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
467
468 if (result)
469 {
470 STRIP_USELESS_TYPE_CONVERSION (result);
471 if (valid_gimple_rhs_p (result))
472 return result;
cbdd87d4
RG
473 }
474 break;
475
0354c0c7 476 case GIMPLE_TERNARY_RHS:
4e71066d
RG
477 /* Try to fold a conditional expression. */
478 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
479 {
480 tree op0 = gimple_assign_rhs1 (stmt);
481 tree tem;
482 bool set = false;
483 location_t cond_loc = gimple_location (stmt);
484
485 if (COMPARISON_CLASS_P (op0))
486 {
487 fold_defer_overflow_warnings ();
488 tem = fold_binary_loc (cond_loc,
489 TREE_CODE (op0), TREE_TYPE (op0),
490 TREE_OPERAND (op0, 0),
491 TREE_OPERAND (op0, 1));
492 /* This is actually a conditional expression, not a GIMPLE
493 conditional statement, however, the valid_gimple_rhs_p
494 test still applies. */
495 set = (tem && is_gimple_condexpr (tem)
496 && valid_gimple_rhs_p (tem));
497 fold_undefer_overflow_warnings (set, stmt, 0);
498 }
499 else if (is_gimple_min_invariant (op0))
500 {
501 tem = op0;
502 set = true;
503 }
504 else
505 return NULL_TREE;
506
507 if (set)
508 result = fold_build3_loc (cond_loc, COND_EXPR,
509 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
510 gimple_assign_rhs2 (stmt),
511 gimple_assign_rhs3 (stmt));
512 }
513
514 if (!result)
515 result = fold_ternary_loc (loc, subcode,
516 TREE_TYPE (gimple_assign_lhs (stmt)),
517 gimple_assign_rhs1 (stmt),
518 gimple_assign_rhs2 (stmt),
519 gimple_assign_rhs3 (stmt));
0354c0c7
BS
520
521 if (result)
522 {
523 STRIP_USELESS_TYPE_CONVERSION (result);
524 if (valid_gimple_rhs_p (result))
525 return result;
0354c0c7
BS
526 }
527 break;
528
cbdd87d4
RG
529 case GIMPLE_INVALID_RHS:
530 gcc_unreachable ();
531 }
532
533 return NULL_TREE;
534}
535
536/* Attempt to fold a conditional statement. Return true if any changes were
537 made. We only attempt to fold the condition expression, and do not perform
538 any transformation that would require alteration of the cfg. It is
539 assumed that the operands have been previously folded. */
540
541static bool
542fold_gimple_cond (gimple stmt)
543{
544 tree result = fold_binary_loc (gimple_location (stmt),
545 gimple_cond_code (stmt),
546 boolean_type_node,
547 gimple_cond_lhs (stmt),
548 gimple_cond_rhs (stmt));
549
550 if (result)
551 {
552 STRIP_USELESS_TYPE_CONVERSION (result);
553 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
554 {
555 gimple_cond_set_condition_from_tree (stmt, result);
556 return true;
557 }
558 }
559
560 return false;
561}
562
563/* Convert EXPR into a GIMPLE value suitable for substitution on the
564 RHS of an assignment. Insert the necessary statements before
565 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
566 is replaced. If the call is expected to produces a result, then it
567 is replaced by an assignment of the new RHS to the result variable.
568 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
569 GIMPLE_NOP. A proper VDEF chain is retained by making the first
570 VUSE and the last VDEF of the whole sequence be the same as the replaced
571 statement and using new SSA names for stores in between. */
cbdd87d4
RG
572
573void
574gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
575{
576 tree lhs;
cbdd87d4
RG
577 gimple stmt, new_stmt;
578 gimple_stmt_iterator i;
355a7673 579 gimple_seq stmts = NULL;
cbdd87d4 580 struct gimplify_ctx gctx;
e256dfce 581 gimple laststore;
fe2ef088 582 tree reaching_vuse;
cbdd87d4
RG
583
584 stmt = gsi_stmt (*si_p);
585
586 gcc_assert (is_gimple_call (stmt));
587
cbdd87d4 588 push_gimplify_context (&gctx);
21860814 589 gctx.into_ssa = gimple_in_ssa_p (cfun);
cbdd87d4 590
e256dfce 591 lhs = gimple_call_lhs (stmt);
cbdd87d4 592 if (lhs == NULL_TREE)
6e572326
RG
593 {
594 gimplify_and_add (expr, &stmts);
595 /* We can end up with folding a memcpy of an empty class assignment
596 which gets optimized away by C++ gimplification. */
597 if (gimple_seq_empty_p (stmts))
598 {
9fdc58de 599 pop_gimplify_context (NULL);
6e572326
RG
600 if (gimple_in_ssa_p (cfun))
601 {
602 unlink_stmt_vdef (stmt);
603 release_defs (stmt);
604 }
77d19c72 605 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
606 return;
607 }
608 }
cbdd87d4 609 else
e256dfce
RG
610 {
611 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
612 new_stmt = gimple_build_assign (lhs, tmp);
613 i = gsi_last (stmts);
614 gsi_insert_after_without_update (&i, new_stmt,
615 GSI_CONTINUE_LINKING);
616 }
cbdd87d4
RG
617
618 pop_gimplify_context (NULL);
619
620 if (gimple_has_location (stmt))
621 annotate_all_with_location (stmts, gimple_location (stmt));
622
e256dfce
RG
623 /* First iterate over the replacement statements backward, assigning
624 virtual operands to their defining statements. */
625 laststore = NULL;
626 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
627 {
628 new_stmt = gsi_stmt (i);
949e47e5
JJ
629 if ((gimple_assign_single_p (new_stmt)
630 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
631 || (is_gimple_call (new_stmt)
632 && (gimple_call_flags (new_stmt)
633 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
e256dfce
RG
634 {
635 tree vdef;
636 if (!laststore)
637 vdef = gimple_vdef (stmt);
638 else
639 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
640 gimple_set_vdef (new_stmt, vdef);
52f26be4 641 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce
RG
642 SSA_NAME_DEF_STMT (vdef) = new_stmt;
643 laststore = new_stmt;
644 }
645 }
646
647 /* Second iterate over the statements forward, assigning virtual
648 operands to their uses. */
e256dfce 649 reaching_vuse = gimple_vuse (stmt);
cbdd87d4
RG
650 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
651 {
cbdd87d4 652 new_stmt = gsi_stmt (i);
a24ac460
RG
653 /* If the new statement possibly has a VUSE, update it with exact SSA
654 name we know will reach this one. */
655 if (gimple_has_mem_ops (new_stmt))
e256dfce
RG
656 gimple_set_vuse (new_stmt, reaching_vuse);
657 gimple_set_modified (new_stmt, true);
658 if (gimple_vdef (new_stmt))
659 reaching_vuse = gimple_vdef (new_stmt);
cbdd87d4
RG
660 }
661
e256dfce
RG
662 /* If the new sequence does not do a store release the virtual
663 definition of the original statement. */
664 if (reaching_vuse
665 && reaching_vuse == gimple_vuse (stmt))
cbdd87d4 666 {
e256dfce
RG
667 tree vdef = gimple_vdef (stmt);
668 if (vdef
669 && TREE_CODE (vdef) == SSA_NAME)
fe2ef088
MM
670 {
671 unlink_stmt_vdef (stmt);
e256dfce 672 release_ssa_name (vdef);
8a1561bc 673 }
cbdd87d4
RG
674 }
675
355a7673
MM
676 /* Finally replace the original statement with the sequence. */
677 gsi_replace_with_seq (si_p, stmts, false);
cbdd87d4
RG
678}
679
680/* Return the string length, maximum string length or maximum value of
681 ARG in LENGTH.
682 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
683 is not NULL and, for TYPE == 0, its value is not equal to the length
684 we determine or if we are unable to determine the length or value,
685 return false. VISITED is a bitmap of visited variables.
686 TYPE is 0 if string length should be returned, 1 for maximum string
687 length and 2 for maximum value ARG can have. */
688
689static bool
690get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
691{
692 tree var, val;
693 gimple def_stmt;
694
695 if (TREE_CODE (arg) != SSA_NAME)
696 {
cbdd87d4 697 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
763dc99d
AP
698 if (TREE_CODE (arg) == ADDR_EXPR
699 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
700 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
cbdd87d4
RG
701 {
702 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
703 if (TREE_CODE (aop0) == INDIRECT_REF
704 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
705 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
706 length, visited, type);
707 }
708
709 if (type == 2)
710 {
711 val = arg;
712 if (TREE_CODE (val) != INTEGER_CST
713 || tree_int_cst_sgn (val) < 0)
714 return false;
715 }
716 else
717 val = c_strlen (arg, 1);
718 if (!val)
719 return false;
720
721 if (*length)
722 {
723 if (type > 0)
724 {
725 if (TREE_CODE (*length) != INTEGER_CST
726 || TREE_CODE (val) != INTEGER_CST)
727 return false;
728
729 if (tree_int_cst_lt (*length, val))
730 *length = val;
731 return true;
732 }
733 else if (simple_cst_equal (val, *length) != 1)
734 return false;
735 }
736
737 *length = val;
738 return true;
739 }
740
491e0b9b
RG
741 /* If ARG is registered for SSA update we cannot look at its defining
742 statement. */
743 if (name_registered_for_update_p (arg))
744 return false;
745
cbdd87d4 746 /* If we were already here, break the infinite cycle. */
fcaa4ca4 747 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
cbdd87d4 748 return true;
cbdd87d4
RG
749
750 var = arg;
751 def_stmt = SSA_NAME_DEF_STMT (var);
752
753 switch (gimple_code (def_stmt))
754 {
755 case GIMPLE_ASSIGN:
756 /* The RHS of the statement defining VAR must either have a
757 constant length or come from another SSA_NAME with a constant
758 length. */
759 if (gimple_assign_single_p (def_stmt)
760 || gimple_assign_unary_nop_p (def_stmt))
761 {
762 tree rhs = gimple_assign_rhs1 (def_stmt);
763 return get_maxval_strlen (rhs, length, visited, type);
764 }
763dc99d
AP
765 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
766 {
767 tree op2 = gimple_assign_rhs2 (def_stmt);
768 tree op3 = gimple_assign_rhs3 (def_stmt);
769 return get_maxval_strlen (op2, length, visited, type)
770 && get_maxval_strlen (op3, length, visited, type);
771 }
cbdd87d4
RG
772 return false;
773
774 case GIMPLE_PHI:
775 {
776 /* All the arguments of the PHI node must have the same constant
777 length. */
778 unsigned i;
779
780 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
781 {
782 tree arg = gimple_phi_arg (def_stmt, i)->def;
783
784 /* If this PHI has itself as an argument, we cannot
785 determine the string length of this argument. However,
786 if we can find a constant string length for the other
787 PHI args then we can still be sure that this is a
788 constant string length. So be optimistic and just
789 continue with the next argument. */
790 if (arg == gimple_phi_result (def_stmt))
791 continue;
792
793 if (!get_maxval_strlen (arg, length, visited, type))
794 return false;
795 }
796 }
797 return true;
798
799 default:
800 return false;
801 }
802}
803
804
805/* Fold builtin call in statement STMT. Returns a simplified tree.
806 We may return a non-constant expression, including another call
807 to a different function and with different arguments, e.g.,
808 substituting memcpy for strcpy when the string length is known.
809 Note that some builtins expand into inline code that may not
810 be valid in GIMPLE. Callers must take care. */
811
812tree
813gimple_fold_builtin (gimple stmt)
814{
815 tree result, val[3];
816 tree callee, a;
817 int arg_idx, type;
818 bitmap visited;
819 bool ignore;
820 int nargs;
821 location_t loc = gimple_location (stmt);
822
823 gcc_assert (is_gimple_call (stmt));
824
825 ignore = (gimple_call_lhs (stmt) == NULL);
826
827 /* First try the generic builtin folder. If that succeeds, return the
828 result directly. */
829 result = fold_call_stmt (stmt, ignore);
830 if (result)
831 {
832 if (ignore)
833 STRIP_NOPS (result);
834 return result;
835 }
836
837 /* Ignore MD builtins. */
838 callee = gimple_call_fndecl (stmt);
839 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
840 return NULL_TREE;
841
e7f9dae0
JJ
842 /* Give up for always_inline inline builtins until they are
843 inlined. */
844 if (avoid_folding_inline_builtin (callee))
845 return NULL_TREE;
846
cbdd87d4
RG
847 /* If the builtin could not be folded, and it has no argument list,
848 we're done. */
849 nargs = gimple_call_num_args (stmt);
850 if (nargs == 0)
851 return NULL_TREE;
852
853 /* Limit the work only for builtins we know how to simplify. */
854 switch (DECL_FUNCTION_CODE (callee))
855 {
856 case BUILT_IN_STRLEN:
857 case BUILT_IN_FPUTS:
858 case BUILT_IN_FPUTS_UNLOCKED:
859 arg_idx = 0;
860 type = 0;
861 break;
862 case BUILT_IN_STRCPY:
863 case BUILT_IN_STRNCPY:
864 arg_idx = 1;
865 type = 0;
866 break;
867 case BUILT_IN_MEMCPY_CHK:
868 case BUILT_IN_MEMPCPY_CHK:
869 case BUILT_IN_MEMMOVE_CHK:
870 case BUILT_IN_MEMSET_CHK:
871 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 872 case BUILT_IN_STPNCPY_CHK:
cbdd87d4
RG
873 arg_idx = 2;
874 type = 2;
875 break;
876 case BUILT_IN_STRCPY_CHK:
877 case BUILT_IN_STPCPY_CHK:
878 arg_idx = 1;
879 type = 1;
880 break;
881 case BUILT_IN_SNPRINTF_CHK:
882 case BUILT_IN_VSNPRINTF_CHK:
883 arg_idx = 1;
884 type = 2;
885 break;
886 default:
887 return NULL_TREE;
888 }
889
890 if (arg_idx >= nargs)
891 return NULL_TREE;
892
893 /* Try to use the dataflow information gathered by the CCP process. */
894 visited = BITMAP_ALLOC (NULL);
895 bitmap_clear (visited);
896
897 memset (val, 0, sizeof (val));
898 a = gimple_call_arg (stmt, arg_idx);
899 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
900 val[arg_idx] = NULL_TREE;
901
902 BITMAP_FREE (visited);
903
904 result = NULL_TREE;
905 switch (DECL_FUNCTION_CODE (callee))
906 {
907 case BUILT_IN_STRLEN:
908 if (val[0] && nargs == 1)
909 {
910 tree new_val =
911 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
912
913 /* If the result is not a valid gimple value, or not a cast
6e4da084 914 of a valid gimple value, then we cannot use the result. */
cbdd87d4 915 if (is_gimple_val (new_val)
3dbe9454 916 || (CONVERT_EXPR_P (new_val)
cbdd87d4
RG
917 && is_gimple_val (TREE_OPERAND (new_val, 0))))
918 return new_val;
919 }
920 break;
921
922 case BUILT_IN_STRCPY:
923 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
924 result = fold_builtin_strcpy (loc, callee,
925 gimple_call_arg (stmt, 0),
926 gimple_call_arg (stmt, 1),
927 val[1]);
928 break;
929
930 case BUILT_IN_STRNCPY:
931 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
932 result = fold_builtin_strncpy (loc, callee,
933 gimple_call_arg (stmt, 0),
934 gimple_call_arg (stmt, 1),
935 gimple_call_arg (stmt, 2),
936 val[1]);
937 break;
938
939 case BUILT_IN_FPUTS:
940 if (nargs == 2)
941 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
942 gimple_call_arg (stmt, 1),
943 ignore, false, val[0]);
944 break;
945
946 case BUILT_IN_FPUTS_UNLOCKED:
947 if (nargs == 2)
948 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
949 gimple_call_arg (stmt, 1),
950 ignore, true, val[0]);
951 break;
952
953 case BUILT_IN_MEMCPY_CHK:
954 case BUILT_IN_MEMPCPY_CHK:
955 case BUILT_IN_MEMMOVE_CHK:
956 case BUILT_IN_MEMSET_CHK:
957 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
958 result = fold_builtin_memory_chk (loc, callee,
959 gimple_call_arg (stmt, 0),
960 gimple_call_arg (stmt, 1),
961 gimple_call_arg (stmt, 2),
962 gimple_call_arg (stmt, 3),
963 val[2], ignore,
964 DECL_FUNCTION_CODE (callee));
965 break;
966
967 case BUILT_IN_STRCPY_CHK:
968 case BUILT_IN_STPCPY_CHK:
969 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
970 result = fold_builtin_stxcpy_chk (loc, callee,
971 gimple_call_arg (stmt, 0),
972 gimple_call_arg (stmt, 1),
973 gimple_call_arg (stmt, 2),
974 val[1], ignore,
975 DECL_FUNCTION_CODE (callee));
976 break;
977
978 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 979 case BUILT_IN_STPNCPY_CHK:
cbdd87d4 980 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
f3fc9b80 981 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
cbdd87d4
RG
982 gimple_call_arg (stmt, 1),
983 gimple_call_arg (stmt, 2),
984 gimple_call_arg (stmt, 3),
f3fc9b80
RG
985 val[2], ignore,
986 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
987 break;
988
989 case BUILT_IN_SNPRINTF_CHK:
990 case BUILT_IN_VSNPRINTF_CHK:
991 if (val[1] && is_gimple_val (val[1]))
992 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
993 DECL_FUNCTION_CODE (callee));
994 break;
995
996 default:
997 gcc_unreachable ();
998 }
999
1000 if (result && ignore)
1001 result = fold_ignored_result (result);
1002 return result;
1003}
1004
1ae6fe9b 1005
49c471e3
MJ
1006/* Return a binfo to be used for devirtualization of calls based on an object
1007 represented by a declaration (i.e. a global or automatically allocated one)
1008 or NULL if it cannot be found or is not safe. CST is expected to be an
1009 ADDR_EXPR of such object or the function will return NULL. Currently it is
1010 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1011
1012tree
1013gimple_extract_devirt_binfo_from_cst (tree cst)
1014{
1015 HOST_WIDE_INT offset, size, max_size;
1016 tree base, type, expected_type, binfo;
1017 bool last_artificial = false;
1018
1019 if (!flag_devirtualize
1020 || TREE_CODE (cst) != ADDR_EXPR
1021 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1022 return NULL_TREE;
1023
1024 cst = TREE_OPERAND (cst, 0);
1025 expected_type = TREE_TYPE (cst);
1026 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1027 type = TREE_TYPE (base);
1028 if (!DECL_P (base)
1029 || max_size == -1
1030 || max_size != size
1031 || TREE_CODE (type) != RECORD_TYPE)
1032 return NULL_TREE;
1033
1034 /* Find the sub-object the constant actually refers to and mark whether it is
1035 an artificial one (as opposed to a user-defined one). */
1036 while (true)
1037 {
1038 HOST_WIDE_INT pos, size;
1039 tree fld;
1040
1041 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1042 break;
1043 if (offset < 0)
1044 return NULL_TREE;
1045
1046 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1047 {
1048 if (TREE_CODE (fld) != FIELD_DECL)
1049 continue;
1050
1051 pos = int_bit_position (fld);
1052 size = tree_low_cst (DECL_SIZE (fld), 1);
1053 if (pos <= offset && (pos + size) > offset)
1054 break;
1055 }
1056 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1057 return NULL_TREE;
1058
1059 last_artificial = DECL_ARTIFICIAL (fld);
1060 type = TREE_TYPE (fld);
1061 offset -= pos;
1062 }
073a8998 1063 /* Artificial sub-objects are ancestors, we do not want to use them for
49c471e3
MJ
1064 devirtualization, at least not here. */
1065 if (last_artificial)
1066 return NULL_TREE;
1067 binfo = TYPE_BINFO (type);
1068 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1069 return NULL_TREE;
1070 else
1071 return binfo;
1072}
1073
cbdd87d4
RG
1074/* Attempt to fold a call statement referenced by the statement iterator GSI.
1075 The statement may be replaced by another statement, e.g., if the call
1076 simplifies to a constant value. Return true if any changes were made.
1077 It is assumed that the operands have been previously folded. */
1078
e021c122 1079static bool
ceeffab0 1080gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
1081{
1082 gimple stmt = gsi_stmt (*gsi);
3b45a007 1083 tree callee;
e021c122
RG
1084 bool changed = false;
1085 unsigned i;
cbdd87d4 1086
e021c122
RG
1087 /* Fold *& in call arguments. */
1088 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1089 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1090 {
1091 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1092 if (tmp)
1093 {
1094 gimple_call_set_arg (stmt, i, tmp);
1095 changed = true;
1096 }
1097 }
3b45a007
RG
1098
1099 /* Check for virtual calls that became direct calls. */
1100 callee = gimple_call_fn (stmt);
25583c4f 1101 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 1102 {
49c471e3
MJ
1103 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1104 {
1105 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
1106 changed = true;
1107 }
1108 else
1109 {
1110 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1111 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1112 if (binfo)
1113 {
1114 HOST_WIDE_INT token
1115 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1116 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1117 if (fndecl)
1118 {
1119 gimple_call_set_fndecl (stmt, fndecl);
1120 changed = true;
1121 }
1122 }
49c471e3 1123 }
e021c122 1124 }
49c471e3 1125
e021c122
RG
1126 if (inplace)
1127 return changed;
1128
1129 /* Check for builtins that CCP can handle using information not
1130 available in the generic fold routines. */
89faf322 1131 callee = gimple_call_fndecl (stmt);
e021c122
RG
1132 if (callee && DECL_BUILT_IN (callee))
1133 {
1134 tree result = gimple_fold_builtin (stmt);
f6dbed32 1135 if (result)
e021c122
RG
1136 {
1137 if (!update_call_from_tree (gsi, result))
1138 gimplify_and_update_call_from_tree (gsi, result);
1139 changed = true;
1140 }
ea679d55
JG
1141 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1142 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007
RG
1143 }
1144
e021c122 1145 return changed;
cbdd87d4
RG
1146}
1147
1148/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1149 distinguishes both cases. */
1150
1151static bool
1152fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1153{
1154 bool changed = false;
1155 gimple stmt = gsi_stmt (*gsi);
1156 unsigned i;
1157
1158 /* Fold the main computation performed by the statement. */
1159 switch (gimple_code (stmt))
1160 {
1161 case GIMPLE_ASSIGN:
1162 {
1163 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 1164 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 1165 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
1166 tree new_rhs;
1167 /* First canonicalize operand order. This avoids building new
1168 trees if this is the only thing fold would later do. */
1169 if ((commutative_tree_code (subcode)
1170 || commutative_ternary_tree_code (subcode))
1171 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1172 gimple_assign_rhs2 (stmt), false))
1173 {
1174 tree tem = gimple_assign_rhs1 (stmt);
1175 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1176 gimple_assign_set_rhs2 (stmt, tem);
1177 changed = true;
1178 }
1179 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
1180 if (new_rhs
1181 && !useless_type_conversion_p (TREE_TYPE (lhs),
1182 TREE_TYPE (new_rhs)))
1183 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1184 if (new_rhs
1185 && (!inplace
1186 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1187 {
1188 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1189 changed = true;
1190 }
1191 break;
1192 }
1193
1194 case GIMPLE_COND:
1195 changed |= fold_gimple_cond (stmt);
1196 break;
1197
1198 case GIMPLE_CALL:
ceeffab0 1199 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
1200 break;
1201
1202 case GIMPLE_ASM:
1203 /* Fold *& in asm operands. */
38384150
JJ
1204 {
1205 size_t noutputs;
1206 const char **oconstraints;
1207 const char *constraint;
1208 bool allows_mem, allows_reg;
1209
1210 noutputs = gimple_asm_noutputs (stmt);
1211 oconstraints = XALLOCAVEC (const char *, noutputs);
1212
1213 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1214 {
1215 tree link = gimple_asm_output_op (stmt, i);
1216 tree op = TREE_VALUE (link);
1217 oconstraints[i]
1218 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1219 if (REFERENCE_CLASS_P (op)
1220 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1221 {
1222 TREE_VALUE (link) = op;
1223 changed = true;
1224 }
1225 }
1226 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1227 {
1228 tree link = gimple_asm_input_op (stmt, i);
1229 tree op = TREE_VALUE (link);
1230 constraint
1231 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1232 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1233 oconstraints, &allows_mem, &allows_reg);
1234 if (REFERENCE_CLASS_P (op)
1235 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1236 != NULL_TREE)
1237 {
1238 TREE_VALUE (link) = op;
1239 changed = true;
1240 }
1241 }
1242 }
cbdd87d4
RG
1243 break;
1244
bd422c4a
RG
1245 case GIMPLE_DEBUG:
1246 if (gimple_debug_bind_p (stmt))
1247 {
1248 tree val = gimple_debug_bind_get_value (stmt);
1249 if (val
1250 && REFERENCE_CLASS_P (val))
1251 {
1252 tree tem = maybe_fold_reference (val, false);
1253 if (tem)
1254 {
1255 gimple_debug_bind_set_value (stmt, tem);
1256 changed = true;
1257 }
1258 }
3e888a5e
RG
1259 else if (val
1260 && TREE_CODE (val) == ADDR_EXPR)
1261 {
1262 tree ref = TREE_OPERAND (val, 0);
1263 tree tem = maybe_fold_reference (ref, false);
1264 if (tem)
1265 {
1266 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1267 gimple_debug_bind_set_value (stmt, tem);
1268 changed = true;
1269 }
1270 }
bd422c4a
RG
1271 }
1272 break;
1273
cbdd87d4
RG
1274 default:;
1275 }
1276
1277 stmt = gsi_stmt (*gsi);
1278
37376165
RB
1279 /* Fold *& on the lhs. */
1280 if (gimple_has_lhs (stmt))
cbdd87d4
RG
1281 {
1282 tree lhs = gimple_get_lhs (stmt);
1283 if (lhs && REFERENCE_CLASS_P (lhs))
1284 {
1285 tree new_lhs = maybe_fold_reference (lhs, true);
1286 if (new_lhs)
1287 {
1288 gimple_set_lhs (stmt, new_lhs);
1289 changed = true;
1290 }
1291 }
1292 }
1293
1294 return changed;
1295}
1296
1297/* Fold the statement pointed to by GSI. In some cases, this function may
1298 replace the whole statement with a new one. Returns true iff folding
1299 makes any changes.
1300 The statement pointed to by GSI should be in valid gimple form but may
1301 be in unfolded state as resulting from for example constant propagation
1302 which can produce *&x = 0. */
1303
1304bool
1305fold_stmt (gimple_stmt_iterator *gsi)
1306{
1307 return fold_stmt_1 (gsi, false);
1308}
1309
59401b92 1310/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
1311 *&x created by constant propagation are handled. The statement cannot
1312 be replaced with a new one. Return true if the statement was
1313 changed, false otherwise.
59401b92 1314 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
1315 be in unfolded state as resulting from for example constant propagation
1316 which can produce *&x = 0. */
1317
1318bool
59401b92 1319fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 1320{
59401b92
RG
1321 gimple stmt = gsi_stmt (*gsi);
1322 bool changed = fold_stmt_1 (gsi, true);
1323 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
1324 return changed;
1325}
1326
e89065a1
SL
1327/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1328 if EXPR is null or we don't know how.
1329 If non-null, the result always has boolean type. */
1330
1331static tree
1332canonicalize_bool (tree expr, bool invert)
1333{
1334 if (!expr)
1335 return NULL_TREE;
1336 else if (invert)
1337 {
1338 if (integer_nonzerop (expr))
1339 return boolean_false_node;
1340 else if (integer_zerop (expr))
1341 return boolean_true_node;
1342 else if (TREE_CODE (expr) == SSA_NAME)
1343 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1344 build_int_cst (TREE_TYPE (expr), 0));
1345 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1346 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1347 boolean_type_node,
1348 TREE_OPERAND (expr, 0),
1349 TREE_OPERAND (expr, 1));
1350 else
1351 return NULL_TREE;
1352 }
1353 else
1354 {
1355 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1356 return expr;
1357 if (integer_nonzerop (expr))
1358 return boolean_true_node;
1359 else if (integer_zerop (expr))
1360 return boolean_false_node;
1361 else if (TREE_CODE (expr) == SSA_NAME)
1362 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1363 build_int_cst (TREE_TYPE (expr), 0));
1364 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1365 return fold_build2 (TREE_CODE (expr),
1366 boolean_type_node,
1367 TREE_OPERAND (expr, 0),
1368 TREE_OPERAND (expr, 1));
1369 else
1370 return NULL_TREE;
1371 }
1372}
1373
1374/* Check to see if a boolean expression EXPR is logically equivalent to the
1375 comparison (OP1 CODE OP2). Check for various identities involving
1376 SSA_NAMEs. */
1377
1378static bool
1379same_bool_comparison_p (const_tree expr, enum tree_code code,
1380 const_tree op1, const_tree op2)
1381{
1382 gimple s;
1383
1384 /* The obvious case. */
1385 if (TREE_CODE (expr) == code
1386 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1387 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1388 return true;
1389
1390 /* Check for comparing (name, name != 0) and the case where expr
1391 is an SSA_NAME with a definition matching the comparison. */
1392 if (TREE_CODE (expr) == SSA_NAME
1393 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1394 {
1395 if (operand_equal_p (expr, op1, 0))
1396 return ((code == NE_EXPR && integer_zerop (op2))
1397 || (code == EQ_EXPR && integer_nonzerop (op2)));
1398 s = SSA_NAME_DEF_STMT (expr);
1399 if (is_gimple_assign (s)
1400 && gimple_assign_rhs_code (s) == code
1401 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1402 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1403 return true;
1404 }
1405
1406 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1407 of name is a comparison, recurse. */
1408 if (TREE_CODE (op1) == SSA_NAME
1409 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1410 {
1411 s = SSA_NAME_DEF_STMT (op1);
1412 if (is_gimple_assign (s)
1413 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1414 {
1415 enum tree_code c = gimple_assign_rhs_code (s);
1416 if ((c == NE_EXPR && integer_zerop (op2))
1417 || (c == EQ_EXPR && integer_nonzerop (op2)))
1418 return same_bool_comparison_p (expr, c,
1419 gimple_assign_rhs1 (s),
1420 gimple_assign_rhs2 (s));
1421 if ((c == EQ_EXPR && integer_zerop (op2))
1422 || (c == NE_EXPR && integer_nonzerop (op2)))
1423 return same_bool_comparison_p (expr,
1424 invert_tree_comparison (c, false),
1425 gimple_assign_rhs1 (s),
1426 gimple_assign_rhs2 (s));
1427 }
1428 }
1429 return false;
1430}
1431
1432/* Check to see if two boolean expressions OP1 and OP2 are logically
1433 equivalent. */
1434
1435static bool
1436same_bool_result_p (const_tree op1, const_tree op2)
1437{
1438 /* Simple cases first. */
1439 if (operand_equal_p (op1, op2, 0))
1440 return true;
1441
1442 /* Check the cases where at least one of the operands is a comparison.
1443 These are a bit smarter than operand_equal_p in that they apply some
1444 identifies on SSA_NAMEs. */
1445 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1446 && same_bool_comparison_p (op1, TREE_CODE (op2),
1447 TREE_OPERAND (op2, 0),
1448 TREE_OPERAND (op2, 1)))
1449 return true;
1450 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1451 && same_bool_comparison_p (op2, TREE_CODE (op1),
1452 TREE_OPERAND (op1, 0),
1453 TREE_OPERAND (op1, 1)))
1454 return true;
1455
1456 /* Default case. */
1457 return false;
1458}
1459
1460/* Forward declarations for some mutually recursive functions. */
1461
1462static tree
1463and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1464 enum tree_code code2, tree op2a, tree op2b);
1465static tree
1466and_var_with_comparison (tree var, bool invert,
1467 enum tree_code code2, tree op2a, tree op2b);
1468static tree
1469and_var_with_comparison_1 (gimple stmt,
1470 enum tree_code code2, tree op2a, tree op2b);
1471static tree
1472or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1473 enum tree_code code2, tree op2a, tree op2b);
1474static tree
1475or_var_with_comparison (tree var, bool invert,
1476 enum tree_code code2, tree op2a, tree op2b);
1477static tree
1478or_var_with_comparison_1 (gimple stmt,
1479 enum tree_code code2, tree op2a, tree op2b);
1480
1481/* Helper function for and_comparisons_1: try to simplify the AND of the
1482 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1483 If INVERT is true, invert the value of the VAR before doing the AND.
1484 Return NULL_EXPR if we can't simplify this to a single expression. */
1485
1486static tree
1487and_var_with_comparison (tree var, bool invert,
1488 enum tree_code code2, tree op2a, tree op2b)
1489{
1490 tree t;
1491 gimple stmt = SSA_NAME_DEF_STMT (var);
1492
1493 /* We can only deal with variables whose definitions are assignments. */
1494 if (!is_gimple_assign (stmt))
1495 return NULL_TREE;
1496
1497 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1498 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1499 Then we only have to consider the simpler non-inverted cases. */
1500 if (invert)
1501 t = or_var_with_comparison_1 (stmt,
1502 invert_tree_comparison (code2, false),
1503 op2a, op2b);
1504 else
1505 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1506 return canonicalize_bool (t, invert);
1507}
1508
1509/* Try to simplify the AND of the ssa variable defined by the assignment
1510 STMT with the comparison specified by (OP2A CODE2 OP2B).
1511 Return NULL_EXPR if we can't simplify this to a single expression. */
1512
1513static tree
1514and_var_with_comparison_1 (gimple stmt,
1515 enum tree_code code2, tree op2a, tree op2b)
1516{
1517 tree var = gimple_assign_lhs (stmt);
1518 tree true_test_var = NULL_TREE;
1519 tree false_test_var = NULL_TREE;
1520 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1521
1522 /* Check for identities like (var AND (var == 0)) => false. */
1523 if (TREE_CODE (op2a) == SSA_NAME
1524 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1525 {
1526 if ((code2 == NE_EXPR && integer_zerop (op2b))
1527 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1528 {
1529 true_test_var = op2a;
1530 if (var == true_test_var)
1531 return var;
1532 }
1533 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1534 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1535 {
1536 false_test_var = op2a;
1537 if (var == false_test_var)
1538 return boolean_false_node;
1539 }
1540 }
1541
1542 /* If the definition is a comparison, recurse on it. */
1543 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1544 {
1545 tree t = and_comparisons_1 (innercode,
1546 gimple_assign_rhs1 (stmt),
1547 gimple_assign_rhs2 (stmt),
1548 code2,
1549 op2a,
1550 op2b);
1551 if (t)
1552 return t;
1553 }
1554
1555 /* If the definition is an AND or OR expression, we may be able to
1556 simplify by reassociating. */
eb9820c0
KT
1557 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1558 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
1559 {
1560 tree inner1 = gimple_assign_rhs1 (stmt);
1561 tree inner2 = gimple_assign_rhs2 (stmt);
1562 gimple s;
1563 tree t;
1564 tree partial = NULL_TREE;
eb9820c0 1565 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
1566
1567 /* Check for boolean identities that don't require recursive examination
1568 of inner1/inner2:
1569 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1570 inner1 AND (inner1 OR inner2) => inner1
1571 !inner1 AND (inner1 AND inner2) => false
1572 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1573 Likewise for similar cases involving inner2. */
1574 if (inner1 == true_test_var)
1575 return (is_and ? var : inner1);
1576 else if (inner2 == true_test_var)
1577 return (is_and ? var : inner2);
1578 else if (inner1 == false_test_var)
1579 return (is_and
1580 ? boolean_false_node
1581 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1582 else if (inner2 == false_test_var)
1583 return (is_and
1584 ? boolean_false_node
1585 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1586
1587 /* Next, redistribute/reassociate the AND across the inner tests.
1588 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1589 if (TREE_CODE (inner1) == SSA_NAME
1590 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1591 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1592 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1593 gimple_assign_rhs1 (s),
1594 gimple_assign_rhs2 (s),
1595 code2, op2a, op2b)))
1596 {
1597 /* Handle the AND case, where we are reassociating:
1598 (inner1 AND inner2) AND (op2a code2 op2b)
1599 => (t AND inner2)
1600 If the partial result t is a constant, we win. Otherwise
1601 continue on to try reassociating with the other inner test. */
1602 if (is_and)
1603 {
1604 if (integer_onep (t))
1605 return inner2;
1606 else if (integer_zerop (t))
1607 return boolean_false_node;
1608 }
1609
1610 /* Handle the OR case, where we are redistributing:
1611 (inner1 OR inner2) AND (op2a code2 op2b)
1612 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
1613 else if (integer_onep (t))
1614 return boolean_true_node;
1615
1616 /* Save partial result for later. */
1617 partial = t;
e89065a1
SL
1618 }
1619
1620 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1621 if (TREE_CODE (inner2) == SSA_NAME
1622 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1623 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1624 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1625 gimple_assign_rhs1 (s),
1626 gimple_assign_rhs2 (s),
1627 code2, op2a, op2b)))
1628 {
1629 /* Handle the AND case, where we are reassociating:
1630 (inner1 AND inner2) AND (op2a code2 op2b)
1631 => (inner1 AND t) */
1632 if (is_and)
1633 {
1634 if (integer_onep (t))
1635 return inner1;
1636 else if (integer_zerop (t))
1637 return boolean_false_node;
8236c8eb
JJ
1638 /* If both are the same, we can apply the identity
1639 (x AND x) == x. */
1640 else if (partial && same_bool_result_p (t, partial))
1641 return t;
e89065a1
SL
1642 }
1643
1644 /* Handle the OR case. where we are redistributing:
1645 (inner1 OR inner2) AND (op2a code2 op2b)
1646 => (t OR (inner1 AND (op2a code2 op2b)))
1647 => (t OR partial) */
1648 else
1649 {
1650 if (integer_onep (t))
1651 return boolean_true_node;
1652 else if (partial)
1653 {
1654 /* We already got a simplification for the other
1655 operand to the redistributed OR expression. The
1656 interesting case is when at least one is false.
1657 Or, if both are the same, we can apply the identity
1658 (x OR x) == x. */
1659 if (integer_zerop (partial))
1660 return t;
1661 else if (integer_zerop (t))
1662 return partial;
1663 else if (same_bool_result_p (t, partial))
1664 return t;
1665 }
1666 }
1667 }
1668 }
1669 return NULL_TREE;
1670}
1671
1672/* Try to simplify the AND of two comparisons defined by
1673 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1674 If this can be done without constructing an intermediate value,
1675 return the resulting tree; otherwise NULL_TREE is returned.
1676 This function is deliberately asymmetric as it recurses on SSA_DEFs
1677 in the first comparison but not the second. */
1678
1679static tree
1680and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1681 enum tree_code code2, tree op2a, tree op2b)
1682{
ae22ac3c 1683 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 1684
e89065a1
SL
1685 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1686 if (operand_equal_p (op1a, op2a, 0)
1687 && operand_equal_p (op1b, op2b, 0))
1688 {
eb9820c0 1689 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1690 tree t = combine_comparisons (UNKNOWN_LOCATION,
1691 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 1692 truth_type, op1a, op1b);
e89065a1
SL
1693 if (t)
1694 return t;
1695 }
1696
1697 /* Likewise the swapped case of the above. */
1698 if (operand_equal_p (op1a, op2b, 0)
1699 && operand_equal_p (op1b, op2a, 0))
1700 {
eb9820c0 1701 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1702 tree t = combine_comparisons (UNKNOWN_LOCATION,
1703 TRUTH_ANDIF_EXPR, code1,
1704 swap_tree_comparison (code2),
31ed6226 1705 truth_type, op1a, op1b);
e89065a1
SL
1706 if (t)
1707 return t;
1708 }
1709
1710 /* If both comparisons are of the same value against constants, we might
1711 be able to merge them. */
1712 if (operand_equal_p (op1a, op2a, 0)
1713 && TREE_CODE (op1b) == INTEGER_CST
1714 && TREE_CODE (op2b) == INTEGER_CST)
1715 {
1716 int cmp = tree_int_cst_compare (op1b, op2b);
1717
1718 /* If we have (op1a == op1b), we should either be able to
1719 return that or FALSE, depending on whether the constant op1b
1720 also satisfies the other comparison against op2b. */
1721 if (code1 == EQ_EXPR)
1722 {
1723 bool done = true;
1724 bool val;
1725 switch (code2)
1726 {
1727 case EQ_EXPR: val = (cmp == 0); break;
1728 case NE_EXPR: val = (cmp != 0); break;
1729 case LT_EXPR: val = (cmp < 0); break;
1730 case GT_EXPR: val = (cmp > 0); break;
1731 case LE_EXPR: val = (cmp <= 0); break;
1732 case GE_EXPR: val = (cmp >= 0); break;
1733 default: done = false;
1734 }
1735 if (done)
1736 {
1737 if (val)
1738 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1739 else
1740 return boolean_false_node;
1741 }
1742 }
1743 /* Likewise if the second comparison is an == comparison. */
1744 else if (code2 == EQ_EXPR)
1745 {
1746 bool done = true;
1747 bool val;
1748 switch (code1)
1749 {
1750 case EQ_EXPR: val = (cmp == 0); break;
1751 case NE_EXPR: val = (cmp != 0); break;
1752 case LT_EXPR: val = (cmp > 0); break;
1753 case GT_EXPR: val = (cmp < 0); break;
1754 case LE_EXPR: val = (cmp >= 0); break;
1755 case GE_EXPR: val = (cmp <= 0); break;
1756 default: done = false;
1757 }
1758 if (done)
1759 {
1760 if (val)
1761 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1762 else
1763 return boolean_false_node;
1764 }
1765 }
1766
1767 /* Same business with inequality tests. */
1768 else if (code1 == NE_EXPR)
1769 {
1770 bool val;
1771 switch (code2)
1772 {
1773 case EQ_EXPR: val = (cmp != 0); break;
1774 case NE_EXPR: val = (cmp == 0); break;
1775 case LT_EXPR: val = (cmp >= 0); break;
1776 case GT_EXPR: val = (cmp <= 0); break;
1777 case LE_EXPR: val = (cmp > 0); break;
1778 case GE_EXPR: val = (cmp < 0); break;
1779 default:
1780 val = false;
1781 }
1782 if (val)
1783 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1784 }
1785 else if (code2 == NE_EXPR)
1786 {
1787 bool val;
1788 switch (code1)
1789 {
1790 case EQ_EXPR: val = (cmp == 0); break;
1791 case NE_EXPR: val = (cmp != 0); break;
1792 case LT_EXPR: val = (cmp <= 0); break;
1793 case GT_EXPR: val = (cmp >= 0); break;
1794 case LE_EXPR: val = (cmp < 0); break;
1795 case GE_EXPR: val = (cmp > 0); break;
1796 default:
1797 val = false;
1798 }
1799 if (val)
1800 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1801 }
1802
1803 /* Chose the more restrictive of two < or <= comparisons. */
1804 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1805 && (code2 == LT_EXPR || code2 == LE_EXPR))
1806 {
1807 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1808 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1809 else
1810 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1811 }
1812
1813 /* Likewise chose the more restrictive of two > or >= comparisons. */
1814 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1815 && (code2 == GT_EXPR || code2 == GE_EXPR))
1816 {
1817 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1818 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1819 else
1820 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1821 }
1822
1823 /* Check for singleton ranges. */
1824 else if (cmp == 0
1825 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1826 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1827 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1828
1829 /* Check for disjoint ranges. */
1830 else if (cmp <= 0
1831 && (code1 == LT_EXPR || code1 == LE_EXPR)
1832 && (code2 == GT_EXPR || code2 == GE_EXPR))
1833 return boolean_false_node;
1834 else if (cmp >= 0
1835 && (code1 == GT_EXPR || code1 == GE_EXPR)
1836 && (code2 == LT_EXPR || code2 == LE_EXPR))
1837 return boolean_false_node;
1838 }
1839
1840 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1841 NAME's definition is a truth value. See if there are any simplifications
1842 that can be done against the NAME's definition. */
1843 if (TREE_CODE (op1a) == SSA_NAME
1844 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1845 && (integer_zerop (op1b) || integer_onep (op1b)))
1846 {
1847 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1848 || (code1 == NE_EXPR && integer_onep (op1b)));
1849 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1850 switch (gimple_code (stmt))
1851 {
1852 case GIMPLE_ASSIGN:
1853 /* Try to simplify by copy-propagating the definition. */
1854 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1855
1856 case GIMPLE_PHI:
1857 /* If every argument to the PHI produces the same result when
1858 ANDed with the second comparison, we win.
1859 Do not do this unless the type is bool since we need a bool
1860 result here anyway. */
1861 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1862 {
1863 tree result = NULL_TREE;
1864 unsigned i;
1865 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1866 {
1867 tree arg = gimple_phi_arg_def (stmt, i);
1868
1869 /* If this PHI has itself as an argument, ignore it.
1870 If all the other args produce the same result,
1871 we're still OK. */
1872 if (arg == gimple_phi_result (stmt))
1873 continue;
1874 else if (TREE_CODE (arg) == INTEGER_CST)
1875 {
1876 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1877 {
1878 if (!result)
1879 result = boolean_false_node;
1880 else if (!integer_zerop (result))
1881 return NULL_TREE;
1882 }
1883 else if (!result)
1884 result = fold_build2 (code2, boolean_type_node,
1885 op2a, op2b);
1886 else if (!same_bool_comparison_p (result,
1887 code2, op2a, op2b))
1888 return NULL_TREE;
1889 }
0e8b84ec
JJ
1890 else if (TREE_CODE (arg) == SSA_NAME
1891 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 1892 {
6c66f733
JJ
1893 tree temp;
1894 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1895 /* In simple cases we can look through PHI nodes,
1896 but we have to be careful with loops.
1897 See PR49073. */
1898 if (! dom_info_available_p (CDI_DOMINATORS)
1899 || gimple_bb (def_stmt) == gimple_bb (stmt)
1900 || dominated_by_p (CDI_DOMINATORS,
1901 gimple_bb (def_stmt),
1902 gimple_bb (stmt)))
1903 return NULL_TREE;
1904 temp = and_var_with_comparison (arg, invert, code2,
1905 op2a, op2b);
e89065a1
SL
1906 if (!temp)
1907 return NULL_TREE;
1908 else if (!result)
1909 result = temp;
1910 else if (!same_bool_result_p (result, temp))
1911 return NULL_TREE;
1912 }
1913 else
1914 return NULL_TREE;
1915 }
1916 return result;
1917 }
1918
1919 default:
1920 break;
1921 }
1922 }
1923 return NULL_TREE;
1924}
1925
1926/* Try to simplify the AND of two comparisons, specified by
1927 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1928 If this can be simplified to a single expression (without requiring
1929 introducing more SSA variables to hold intermediate values),
1930 return the resulting tree. Otherwise return NULL_TREE.
1931 If the result expression is non-null, it has boolean type. */
1932
1933tree
1934maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1935 enum tree_code code2, tree op2a, tree op2b)
1936{
1937 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1938 if (t)
1939 return t;
1940 else
1941 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1942}
1943
1944/* Helper function for or_comparisons_1: try to simplify the OR of the
1945 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1946 If INVERT is true, invert the value of VAR before doing the OR.
1947 Return NULL_EXPR if we can't simplify this to a single expression. */
1948
1949static tree
1950or_var_with_comparison (tree var, bool invert,
1951 enum tree_code code2, tree op2a, tree op2b)
1952{
1953 tree t;
1954 gimple stmt = SSA_NAME_DEF_STMT (var);
1955
1956 /* We can only deal with variables whose definitions are assignments. */
1957 if (!is_gimple_assign (stmt))
1958 return NULL_TREE;
1959
1960 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1961 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1962 Then we only have to consider the simpler non-inverted cases. */
1963 if (invert)
1964 t = and_var_with_comparison_1 (stmt,
1965 invert_tree_comparison (code2, false),
1966 op2a, op2b);
1967 else
1968 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1969 return canonicalize_bool (t, invert);
1970}
1971
1972/* Try to simplify the OR of the ssa variable defined by the assignment
1973 STMT with the comparison specified by (OP2A CODE2 OP2B).
1974 Return NULL_EXPR if we can't simplify this to a single expression. */
1975
1976static tree
1977or_var_with_comparison_1 (gimple stmt,
1978 enum tree_code code2, tree op2a, tree op2b)
1979{
1980 tree var = gimple_assign_lhs (stmt);
1981 tree true_test_var = NULL_TREE;
1982 tree false_test_var = NULL_TREE;
1983 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1984
1985 /* Check for identities like (var OR (var != 0)) => true . */
1986 if (TREE_CODE (op2a) == SSA_NAME
1987 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1988 {
1989 if ((code2 == NE_EXPR && integer_zerop (op2b))
1990 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1991 {
1992 true_test_var = op2a;
1993 if (var == true_test_var)
1994 return var;
1995 }
1996 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1997 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1998 {
1999 false_test_var = op2a;
2000 if (var == false_test_var)
2001 return boolean_true_node;
2002 }
2003 }
2004
2005 /* If the definition is a comparison, recurse on it. */
2006 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2007 {
2008 tree t = or_comparisons_1 (innercode,
2009 gimple_assign_rhs1 (stmt),
2010 gimple_assign_rhs2 (stmt),
2011 code2,
2012 op2a,
2013 op2b);
2014 if (t)
2015 return t;
2016 }
2017
2018 /* If the definition is an AND or OR expression, we may be able to
2019 simplify by reassociating. */
eb9820c0
KT
2020 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2021 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
2022 {
2023 tree inner1 = gimple_assign_rhs1 (stmt);
2024 tree inner2 = gimple_assign_rhs2 (stmt);
2025 gimple s;
2026 tree t;
2027 tree partial = NULL_TREE;
eb9820c0 2028 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
2029
2030 /* Check for boolean identities that don't require recursive examination
2031 of inner1/inner2:
2032 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2033 inner1 OR (inner1 AND inner2) => inner1
2034 !inner1 OR (inner1 OR inner2) => true
2035 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2036 */
2037 if (inner1 == true_test_var)
2038 return (is_or ? var : inner1);
2039 else if (inner2 == true_test_var)
2040 return (is_or ? var : inner2);
2041 else if (inner1 == false_test_var)
2042 return (is_or
2043 ? boolean_true_node
2044 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2045 else if (inner2 == false_test_var)
2046 return (is_or
2047 ? boolean_true_node
2048 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2049
2050 /* Next, redistribute/reassociate the OR across the inner tests.
2051 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2052 if (TREE_CODE (inner1) == SSA_NAME
2053 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2054 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2055 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2056 gimple_assign_rhs1 (s),
2057 gimple_assign_rhs2 (s),
2058 code2, op2a, op2b)))
2059 {
2060 /* Handle the OR case, where we are reassociating:
2061 (inner1 OR inner2) OR (op2a code2 op2b)
2062 => (t OR inner2)
2063 If the partial result t is a constant, we win. Otherwise
2064 continue on to try reassociating with the other inner test. */
8236c8eb 2065 if (is_or)
e89065a1
SL
2066 {
2067 if (integer_onep (t))
2068 return boolean_true_node;
2069 else if (integer_zerop (t))
2070 return inner2;
2071 }
2072
2073 /* Handle the AND case, where we are redistributing:
2074 (inner1 AND inner2) OR (op2a code2 op2b)
2075 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
2076 else if (integer_zerop (t))
2077 return boolean_false_node;
2078
2079 /* Save partial result for later. */
2080 partial = t;
e89065a1
SL
2081 }
2082
2083 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2084 if (TREE_CODE (inner2) == SSA_NAME
2085 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2086 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2087 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2088 gimple_assign_rhs1 (s),
2089 gimple_assign_rhs2 (s),
2090 code2, op2a, op2b)))
2091 {
2092 /* Handle the OR case, where we are reassociating:
2093 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
2094 => (inner1 OR t)
2095 => (t OR partial) */
2096 if (is_or)
e89065a1
SL
2097 {
2098 if (integer_zerop (t))
2099 return inner1;
2100 else if (integer_onep (t))
2101 return boolean_true_node;
8236c8eb
JJ
2102 /* If both are the same, we can apply the identity
2103 (x OR x) == x. */
2104 else if (partial && same_bool_result_p (t, partial))
2105 return t;
e89065a1
SL
2106 }
2107
2108 /* Handle the AND case, where we are redistributing:
2109 (inner1 AND inner2) OR (op2a code2 op2b)
2110 => (t AND (inner1 OR (op2a code2 op2b)))
2111 => (t AND partial) */
2112 else
2113 {
2114 if (integer_zerop (t))
2115 return boolean_false_node;
2116 else if (partial)
2117 {
2118 /* We already got a simplification for the other
2119 operand to the redistributed AND expression. The
2120 interesting case is when at least one is true.
2121 Or, if both are the same, we can apply the identity
8236c8eb 2122 (x AND x) == x. */
e89065a1
SL
2123 if (integer_onep (partial))
2124 return t;
2125 else if (integer_onep (t))
2126 return partial;
2127 else if (same_bool_result_p (t, partial))
8236c8eb 2128 return t;
e89065a1
SL
2129 }
2130 }
2131 }
2132 }
2133 return NULL_TREE;
2134}
2135
2136/* Try to simplify the OR of two comparisons defined by
2137 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2138 If this can be done without constructing an intermediate value,
2139 return the resulting tree; otherwise NULL_TREE is returned.
2140 This function is deliberately asymmetric as it recurses on SSA_DEFs
2141 in the first comparison but not the second. */
2142
2143static tree
2144or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2145 enum tree_code code2, tree op2a, tree op2b)
2146{
ae22ac3c 2147 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 2148
e89065a1
SL
2149 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2150 if (operand_equal_p (op1a, op2a, 0)
2151 && operand_equal_p (op1b, op2b, 0))
2152 {
eb9820c0 2153 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2154 tree t = combine_comparisons (UNKNOWN_LOCATION,
2155 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 2156 truth_type, op1a, op1b);
e89065a1
SL
2157 if (t)
2158 return t;
2159 }
2160
2161 /* Likewise the swapped case of the above. */
2162 if (operand_equal_p (op1a, op2b, 0)
2163 && operand_equal_p (op1b, op2a, 0))
2164 {
eb9820c0 2165 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2166 tree t = combine_comparisons (UNKNOWN_LOCATION,
2167 TRUTH_ORIF_EXPR, code1,
2168 swap_tree_comparison (code2),
31ed6226 2169 truth_type, op1a, op1b);
e89065a1
SL
2170 if (t)
2171 return t;
2172 }
2173
2174 /* If both comparisons are of the same value against constants, we might
2175 be able to merge them. */
2176 if (operand_equal_p (op1a, op2a, 0)
2177 && TREE_CODE (op1b) == INTEGER_CST
2178 && TREE_CODE (op2b) == INTEGER_CST)
2179 {
2180 int cmp = tree_int_cst_compare (op1b, op2b);
2181
2182 /* If we have (op1a != op1b), we should either be able to
2183 return that or TRUE, depending on whether the constant op1b
2184 also satisfies the other comparison against op2b. */
2185 if (code1 == NE_EXPR)
2186 {
2187 bool done = true;
2188 bool val;
2189 switch (code2)
2190 {
2191 case EQ_EXPR: val = (cmp == 0); break;
2192 case NE_EXPR: val = (cmp != 0); break;
2193 case LT_EXPR: val = (cmp < 0); break;
2194 case GT_EXPR: val = (cmp > 0); break;
2195 case LE_EXPR: val = (cmp <= 0); break;
2196 case GE_EXPR: val = (cmp >= 0); break;
2197 default: done = false;
2198 }
2199 if (done)
2200 {
2201 if (val)
2202 return boolean_true_node;
2203 else
2204 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2205 }
2206 }
2207 /* Likewise if the second comparison is a != comparison. */
2208 else if (code2 == NE_EXPR)
2209 {
2210 bool done = true;
2211 bool val;
2212 switch (code1)
2213 {
2214 case EQ_EXPR: val = (cmp == 0); break;
2215 case NE_EXPR: val = (cmp != 0); break;
2216 case LT_EXPR: val = (cmp > 0); break;
2217 case GT_EXPR: val = (cmp < 0); break;
2218 case LE_EXPR: val = (cmp >= 0); break;
2219 case GE_EXPR: val = (cmp <= 0); break;
2220 default: done = false;
2221 }
2222 if (done)
2223 {
2224 if (val)
2225 return boolean_true_node;
2226 else
2227 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2228 }
2229 }
2230
2231 /* See if an equality test is redundant with the other comparison. */
2232 else if (code1 == EQ_EXPR)
2233 {
2234 bool val;
2235 switch (code2)
2236 {
2237 case EQ_EXPR: val = (cmp == 0); break;
2238 case NE_EXPR: val = (cmp != 0); break;
2239 case LT_EXPR: val = (cmp < 0); break;
2240 case GT_EXPR: val = (cmp > 0); break;
2241 case LE_EXPR: val = (cmp <= 0); break;
2242 case GE_EXPR: val = (cmp >= 0); break;
2243 default:
2244 val = false;
2245 }
2246 if (val)
2247 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2248 }
2249 else if (code2 == EQ_EXPR)
2250 {
2251 bool val;
2252 switch (code1)
2253 {
2254 case EQ_EXPR: val = (cmp == 0); break;
2255 case NE_EXPR: val = (cmp != 0); break;
2256 case LT_EXPR: val = (cmp > 0); break;
2257 case GT_EXPR: val = (cmp < 0); break;
2258 case LE_EXPR: val = (cmp >= 0); break;
2259 case GE_EXPR: val = (cmp <= 0); break;
2260 default:
2261 val = false;
2262 }
2263 if (val)
2264 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2265 }
2266
2267 /* Chose the less restrictive of two < or <= comparisons. */
2268 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2269 && (code2 == LT_EXPR || code2 == LE_EXPR))
2270 {
2271 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2272 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2273 else
2274 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2275 }
2276
2277 /* Likewise chose the less restrictive of two > or >= comparisons. */
2278 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2279 && (code2 == GT_EXPR || code2 == GE_EXPR))
2280 {
2281 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2282 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2283 else
2284 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2285 }
2286
2287 /* Check for singleton ranges. */
2288 else if (cmp == 0
2289 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2290 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2291 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2292
2293 /* Check for less/greater pairs that don't restrict the range at all. */
2294 else if (cmp >= 0
2295 && (code1 == LT_EXPR || code1 == LE_EXPR)
2296 && (code2 == GT_EXPR || code2 == GE_EXPR))
2297 return boolean_true_node;
2298 else if (cmp <= 0
2299 && (code1 == GT_EXPR || code1 == GE_EXPR)
2300 && (code2 == LT_EXPR || code2 == LE_EXPR))
2301 return boolean_true_node;
2302 }
2303
2304 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2305 NAME's definition is a truth value. See if there are any simplifications
2306 that can be done against the NAME's definition. */
2307 if (TREE_CODE (op1a) == SSA_NAME
2308 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2309 && (integer_zerop (op1b) || integer_onep (op1b)))
2310 {
2311 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2312 || (code1 == NE_EXPR && integer_onep (op1b)));
2313 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2314 switch (gimple_code (stmt))
2315 {
2316 case GIMPLE_ASSIGN:
2317 /* Try to simplify by copy-propagating the definition. */
2318 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2319
2320 case GIMPLE_PHI:
2321 /* If every argument to the PHI produces the same result when
2322 ORed with the second comparison, we win.
2323 Do not do this unless the type is bool since we need a bool
2324 result here anyway. */
2325 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2326 {
2327 tree result = NULL_TREE;
2328 unsigned i;
2329 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2330 {
2331 tree arg = gimple_phi_arg_def (stmt, i);
2332
2333 /* If this PHI has itself as an argument, ignore it.
2334 If all the other args produce the same result,
2335 we're still OK. */
2336 if (arg == gimple_phi_result (stmt))
2337 continue;
2338 else if (TREE_CODE (arg) == INTEGER_CST)
2339 {
2340 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2341 {
2342 if (!result)
2343 result = boolean_true_node;
2344 else if (!integer_onep (result))
2345 return NULL_TREE;
2346 }
2347 else if (!result)
2348 result = fold_build2 (code2, boolean_type_node,
2349 op2a, op2b);
2350 else if (!same_bool_comparison_p (result,
2351 code2, op2a, op2b))
2352 return NULL_TREE;
2353 }
0e8b84ec
JJ
2354 else if (TREE_CODE (arg) == SSA_NAME
2355 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 2356 {
6c66f733
JJ
2357 tree temp;
2358 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2359 /* In simple cases we can look through PHI nodes,
2360 but we have to be careful with loops.
2361 See PR49073. */
2362 if (! dom_info_available_p (CDI_DOMINATORS)
2363 || gimple_bb (def_stmt) == gimple_bb (stmt)
2364 || dominated_by_p (CDI_DOMINATORS,
2365 gimple_bb (def_stmt),
2366 gimple_bb (stmt)))
2367 return NULL_TREE;
2368 temp = or_var_with_comparison (arg, invert, code2,
2369 op2a, op2b);
e89065a1
SL
2370 if (!temp)
2371 return NULL_TREE;
2372 else if (!result)
2373 result = temp;
2374 else if (!same_bool_result_p (result, temp))
2375 return NULL_TREE;
2376 }
2377 else
2378 return NULL_TREE;
2379 }
2380 return result;
2381 }
2382
2383 default:
2384 break;
2385 }
2386 }
2387 return NULL_TREE;
2388}
2389
2390/* Try to simplify the OR of two comparisons, specified by
2391 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2392 If this can be simplified to a single expression (without requiring
2393 introducing more SSA variables to hold intermediate values),
2394 return the resulting tree. Otherwise return NULL_TREE.
2395 If the result expression is non-null, it has boolean type. */
2396
2397tree
2398maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2399 enum tree_code code2, tree op2a, tree op2b)
2400{
2401 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2402 if (t)
2403 return t;
2404 else
2405 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2406}
cfef45c8
RG
2407
2408
2409/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2410
2411 Either NULL_TREE, a simplified but non-constant or a constant
2412 is returned.
2413
2414 ??? This should go into a gimple-fold-inline.h file to be eventually
2415 privatized with the single valueize function used in the various TUs
2416 to avoid the indirect function call overhead. */
2417
2418tree
2419gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2420{
2421 location_t loc = gimple_location (stmt);
2422 switch (gimple_code (stmt))
2423 {
2424 case GIMPLE_ASSIGN:
2425 {
2426 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2427
2428 switch (get_gimple_rhs_class (subcode))
2429 {
2430 case GIMPLE_SINGLE_RHS:
2431 {
2432 tree rhs = gimple_assign_rhs1 (stmt);
2433 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2434
2435 if (TREE_CODE (rhs) == SSA_NAME)
2436 {
2437 /* If the RHS is an SSA_NAME, return its known constant value,
2438 if any. */
2439 return (*valueize) (rhs);
2440 }
2441 /* Handle propagating invariant addresses into address
2442 operations. */
2443 else if (TREE_CODE (rhs) == ADDR_EXPR
2444 && !is_gimple_min_invariant (rhs))
2445 {
d25c4172 2446 HOST_WIDE_INT offset = 0;
cfef45c8
RG
2447 tree base;
2448 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2449 &offset,
2450 valueize);
2451 if (base
2452 && (CONSTANT_CLASS_P (base)
2453 || decl_address_invariant_p (base)))
2454 return build_invariant_address (TREE_TYPE (rhs),
2455 base, offset);
2456 }
2457 else if (TREE_CODE (rhs) == CONSTRUCTOR
2458 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2459 && (CONSTRUCTOR_NELTS (rhs)
2460 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2461 {
2462 unsigned i;
d2a12ae7 2463 tree val, *vec;
cfef45c8 2464
d2a12ae7
RG
2465 vec = XALLOCAVEC (tree,
2466 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
2467 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2468 {
2469 val = (*valueize) (val);
2470 if (TREE_CODE (val) == INTEGER_CST
2471 || TREE_CODE (val) == REAL_CST
2472 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 2473 vec[i] = val;
cfef45c8
RG
2474 else
2475 return NULL_TREE;
2476 }
2477
d2a12ae7 2478 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8
RG
2479 }
2480
2481 if (kind == tcc_reference)
2482 {
2483 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2484 || TREE_CODE (rhs) == REALPART_EXPR
2485 || TREE_CODE (rhs) == IMAGPART_EXPR)
2486 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2487 {
2488 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2489 return fold_unary_loc (EXPR_LOCATION (rhs),
2490 TREE_CODE (rhs),
2491 TREE_TYPE (rhs), val);
2492 }
2493 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2494 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2495 {
2496 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2497 return fold_ternary_loc (EXPR_LOCATION (rhs),
2498 TREE_CODE (rhs),
2499 TREE_TYPE (rhs), val,
2500 TREE_OPERAND (rhs, 1),
2501 TREE_OPERAND (rhs, 2));
2502 }
2503 else if (TREE_CODE (rhs) == MEM_REF
2504 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2505 {
2506 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2507 if (TREE_CODE (val) == ADDR_EXPR
2508 && is_gimple_min_invariant (val))
2509 {
2510 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2511 unshare_expr (val),
2512 TREE_OPERAND (rhs, 1));
2513 if (tem)
2514 rhs = tem;
2515 }
2516 }
2517 return fold_const_aggregate_ref_1 (rhs, valueize);
2518 }
2519 else if (kind == tcc_declaration)
2520 return get_symbol_constant_value (rhs);
2521 return rhs;
2522 }
2523
2524 case GIMPLE_UNARY_RHS:
2525 {
2526 /* Handle unary operators that can appear in GIMPLE form.
2527 Note that we know the single operand must be a constant,
2528 so this should almost always return a simplified RHS. */
315f5f1b 2529 tree lhs = gimple_assign_lhs (stmt);
cfef45c8
RG
2530 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2531
2532 /* Conversions are useless for CCP purposes if they are
2533 value-preserving. Thus the restrictions that
315f5f1b
RG
2534 useless_type_conversion_p places for restrict qualification
2535 of pointer types should not apply here.
2536 Substitution later will only substitute to allowed places. */
cfef45c8
RG
2537 if (CONVERT_EXPR_CODE_P (subcode)
2538 && POINTER_TYPE_P (TREE_TYPE (lhs))
315f5f1b 2539 && POINTER_TYPE_P (TREE_TYPE (op0))
8f420307
EB
2540 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2541 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2542 && TYPE_MODE (TREE_TYPE (lhs))
2543 == TYPE_MODE (TREE_TYPE (op0)))
315f5f1b 2544 return op0;
cfef45c8
RG
2545
2546 return
2547 fold_unary_ignore_overflow_loc (loc, subcode,
2548 gimple_expr_type (stmt), op0);
2549 }
2550
2551 case GIMPLE_BINARY_RHS:
2552 {
2553 /* Handle binary operators that can appear in GIMPLE form. */
2554 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2555 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2556
2557 /* Translate &x + CST into an invariant form suitable for
2558 further propagation. */
2559 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2560 && TREE_CODE (op0) == ADDR_EXPR
2561 && TREE_CODE (op1) == INTEGER_CST)
2562 {
2563 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
2564 return build_fold_addr_expr_loc
2565 (loc,
2566 fold_build2 (MEM_REF,
cfef45c8
RG
2567 TREE_TYPE (TREE_TYPE (op0)),
2568 unshare_expr (op0), off));
2569 }
2570
2571 return fold_binary_loc (loc, subcode,
2572 gimple_expr_type (stmt), op0, op1);
2573 }
2574
2575 case GIMPLE_TERNARY_RHS:
2576 {
2577 /* Handle ternary operators that can appear in GIMPLE form. */
2578 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2579 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2580 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2581
d3878abf
RG
2582 /* Fold embedded expressions in ternary codes. */
2583 if ((subcode == COND_EXPR
2584 || subcode == VEC_COND_EXPR)
2585 && COMPARISON_CLASS_P (op0))
2586 {
2587 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2588 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2589 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2590 TREE_TYPE (op0), op00, op01);
2591 if (tem)
2592 op0 = tem;
2593 }
2594
cfef45c8
RG
2595 return fold_ternary_loc (loc, subcode,
2596 gimple_expr_type (stmt), op0, op1, op2);
2597 }
2598
2599 default:
2600 gcc_unreachable ();
2601 }
2602 }
2603
2604 case GIMPLE_CALL:
2605 {
25583c4f
RS
2606 tree fn;
2607
2608 if (gimple_call_internal_p (stmt))
2609 /* No folding yet for these functions. */
2610 return NULL_TREE;
2611
2612 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
2613 if (TREE_CODE (fn) == ADDR_EXPR
2614 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2615 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2616 {
2617 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2618 tree call, retval;
2619 unsigned i;
2620 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2621 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2622 call = build_call_array_loc (loc,
2623 gimple_call_return_type (stmt),
2624 fn, gimple_call_num_args (stmt), args);
2625 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2626 if (retval)
2627 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2628 STRIP_NOPS (retval);
2629 return retval;
2630 }
2631 return NULL_TREE;
2632 }
2633
2634 default:
2635 return NULL_TREE;
2636 }
2637}
2638
2639/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2640 Returns NULL_TREE if folding to a constant is not possible, otherwise
2641 returns a constant according to is_gimple_min_invariant. */
2642
2643tree
2644gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2645{
2646 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2647 if (res && is_gimple_min_invariant (res))
2648 return res;
2649 return NULL_TREE;
2650}
2651
2652
2653/* The following set of functions are supposed to fold references using
2654 their constant initializers. */
2655
2656static tree fold_ctor_reference (tree type, tree ctor,
2657 unsigned HOST_WIDE_INT offset,
c44c2088 2658 unsigned HOST_WIDE_INT size, tree);
cfef45c8
RG
2659
2660/* See if we can find constructor defining value of BASE.
2661 When we know the consructor with constant offset (such as
2662 base is array[40] and we do know constructor of array), then
2663 BIT_OFFSET is adjusted accordingly.
2664
2665 As a special case, return error_mark_node when constructor
2666 is not explicitly available, but it is known to be zero
2667 such as 'static const int a;'. */
2668static tree
2669get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2670 tree (*valueize)(tree))
2671{
2672 HOST_WIDE_INT bit_offset2, size, max_size;
2673 if (TREE_CODE (base) == MEM_REF)
2674 {
2675 if (!integer_zerop (TREE_OPERAND (base, 1)))
2676 {
2677 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2678 return NULL_TREE;
2679 *bit_offset += (mem_ref_offset (base).low
2680 * BITS_PER_UNIT);
2681 }
2682
2683 if (valueize
2684 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2685 base = valueize (TREE_OPERAND (base, 0));
2686 if (!base || TREE_CODE (base) != ADDR_EXPR)
2687 return NULL_TREE;
2688 base = TREE_OPERAND (base, 0);
2689 }
2690
2691 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2692 DECL_INITIAL. If BASE is a nested reference into another
2693 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2694 the inner reference. */
2695 switch (TREE_CODE (base))
2696 {
2697 case VAR_DECL:
2698 if (!const_value_known_p (base))
2699 return NULL_TREE;
2700
2701 /* Fallthru. */
2702 case CONST_DECL:
2703 if (!DECL_INITIAL (base)
2704 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2705 return error_mark_node;
23af816c
RG
2706 /* Do not return an error_mark_node DECL_INITIAL. LTO uses this
2707 as special marker (_not_ zero ...) for its own purposes. */
2708 if (DECL_INITIAL (base) == error_mark_node)
2709 return NULL_TREE;
cfef45c8
RG
2710 return DECL_INITIAL (base);
2711
2712 case ARRAY_REF:
2713 case COMPONENT_REF:
2714 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2715 if (max_size == -1 || size != max_size)
2716 return NULL_TREE;
2717 *bit_offset += bit_offset2;
2718 return get_base_constructor (base, bit_offset, valueize);
2719
2720 case STRING_CST:
2721 case CONSTRUCTOR:
2722 return base;
2723
2724 default:
2725 return NULL_TREE;
2726 }
2727}
2728
2729/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2730 to the memory at bit OFFSET.
2731
2732 We do only simple job of folding byte accesses. */
2733
2734static tree
2735fold_string_cst_ctor_reference (tree type, tree ctor,
2736 unsigned HOST_WIDE_INT offset,
2737 unsigned HOST_WIDE_INT size)
2738{
2739 if (INTEGRAL_TYPE_P (type)
2740 && (TYPE_MODE (type)
2741 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2742 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2743 == MODE_INT)
2744 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2745 && size == BITS_PER_UNIT
2746 && !(offset % BITS_PER_UNIT))
2747 {
2748 offset /= BITS_PER_UNIT;
2749 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2750 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2751 [offset]));
2752 /* Folding
2753 const char a[20]="hello";
2754 return a[10];
2755
2756 might lead to offset greater than string length. In this case we
2757 know value is either initialized to 0 or out of bounds. Return 0
2758 in both cases. */
2759 return build_zero_cst (type);
2760 }
2761 return NULL_TREE;
2762}
2763
2764/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2765 SIZE to the memory at bit OFFSET. */
2766
2767static tree
2768fold_array_ctor_reference (tree type, tree ctor,
2769 unsigned HOST_WIDE_INT offset,
c44c2088
JH
2770 unsigned HOST_WIDE_INT size,
2771 tree from_decl)
cfef45c8
RG
2772{
2773 unsigned HOST_WIDE_INT cnt;
2774 tree cfield, cval;
2775 double_int low_bound, elt_size;
2776 double_int index, max_index;
2777 double_int access_index;
b48e22b2 2778 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
2779 HOST_WIDE_INT inner_offset;
2780
2781 /* Compute low bound and elt size. */
eb8f1123
RG
2782 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2783 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
2784 if (domain_type && TYPE_MIN_VALUE (domain_type))
2785 {
2786 /* Static constructors for variably sized objects makes no sense. */
2787 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 2788 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
2789 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2790 }
2791 else
2792 low_bound = double_int_zero;
2793 /* Static constructors for variably sized objects makes no sense. */
2794 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2795 == INTEGER_CST);
2796 elt_size =
2797 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2798
2799
2800 /* We can handle only constantly sized accesses that are known to not
2801 be larger than size of array element. */
2802 if (!TYPE_SIZE_UNIT (type)
2803 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
27bcd47c 2804 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
cfef45c8
RG
2805 return NULL_TREE;
2806
2807 /* Compute the array index we look for. */
27bcd47c
LC
2808 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2809 .udiv (elt_size, TRUNC_DIV_EXPR);
2810 access_index += low_bound;
b48e22b2 2811 if (index_type)
27bcd47c
LC
2812 access_index = access_index.ext (TYPE_PRECISION (index_type),
2813 TYPE_UNSIGNED (index_type));
cfef45c8
RG
2814
2815 /* And offset within the access. */
27bcd47c 2816 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
2817
2818 /* See if the array field is large enough to span whole access. We do not
2819 care to fold accesses spanning multiple array indexes. */
27bcd47c 2820 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
2821 return NULL_TREE;
2822
27bcd47c 2823 index = low_bound - double_int_one;
b48e22b2 2824 if (index_type)
27bcd47c 2825 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
b48e22b2 2826
cfef45c8
RG
2827 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2828 {
2829 /* Array constructor might explicitely set index, or specify range
2830 or leave index NULL meaning that it is next index after previous
2831 one. */
2832 if (cfield)
2833 {
2834 if (TREE_CODE (cfield) == INTEGER_CST)
2835 max_index = index = tree_to_double_int (cfield);
2836 else
2837 {
2838 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2839 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2840 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2841 }
2842 }
2843 else
b48e22b2 2844 {
27bcd47c 2845 index += double_int_one;
b48e22b2 2846 if (index_type)
27bcd47c
LC
2847 index = index.ext (TYPE_PRECISION (index_type),
2848 TYPE_UNSIGNED (index_type));
b48e22b2
EB
2849 max_index = index;
2850 }
cfef45c8
RG
2851
2852 /* Do we have match? */
27bcd47c
LC
2853 if (access_index.cmp (index, 1) >= 0
2854 && access_index.cmp (max_index, 1) <= 0)
c44c2088
JH
2855 return fold_ctor_reference (type, cval, inner_offset, size,
2856 from_decl);
cfef45c8
RG
2857 }
2858 /* When memory is not explicitely mentioned in constructor,
2859 it is 0 (or out of range). */
2860 return build_zero_cst (type);
2861}
2862
2863/* CTOR is CONSTRUCTOR of an aggregate or vector.
2864 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2865
2866static tree
2867fold_nonarray_ctor_reference (tree type, tree ctor,
2868 unsigned HOST_WIDE_INT offset,
c44c2088
JH
2869 unsigned HOST_WIDE_INT size,
2870 tree from_decl)
cfef45c8
RG
2871{
2872 unsigned HOST_WIDE_INT cnt;
2873 tree cfield, cval;
2874
2875 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2876 cval)
2877 {
2878 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2879 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2880 tree field_size = DECL_SIZE (cfield);
2881 double_int bitoffset;
2882 double_int byte_offset_cst = tree_to_double_int (byte_offset);
27bcd47c 2883 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
b8b2b009 2884 double_int bitoffset_end, access_end;
cfef45c8
RG
2885
2886 /* Variable sized objects in static constructors makes no sense,
2887 but field_size can be NULL for flexible array members. */
2888 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2889 && TREE_CODE (byte_offset) == INTEGER_CST
2890 && (field_size != NULL_TREE
2891 ? TREE_CODE (field_size) == INTEGER_CST
2892 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2893
2894 /* Compute bit offset of the field. */
27bcd47c
LC
2895 bitoffset = tree_to_double_int (field_offset)
2896 + byte_offset_cst * bits_per_unit_cst;
cfef45c8
RG
2897 /* Compute bit offset where the field ends. */
2898 if (field_size != NULL_TREE)
27bcd47c 2899 bitoffset_end = bitoffset + tree_to_double_int (field_size);
cfef45c8
RG
2900 else
2901 bitoffset_end = double_int_zero;
2902
27bcd47c
LC
2903 access_end = double_int::from_uhwi (offset)
2904 + double_int::from_uhwi (size);
b8b2b009
JJ
2905
2906 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2907 [BITOFFSET, BITOFFSET_END)? */
27bcd47c 2908 if (access_end.cmp (bitoffset, 0) > 0
cfef45c8 2909 && (field_size == NULL_TREE
27bcd47c 2910 || double_int::from_uhwi (offset).slt (bitoffset_end)))
cfef45c8 2911 {
27bcd47c 2912 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
cfef45c8
RG
2913 /* We do have overlap. Now see if field is large enough to
2914 cover the access. Give up for accesses spanning multiple
2915 fields. */
27bcd47c 2916 if (access_end.cmp (bitoffset_end, 0) > 0)
cfef45c8 2917 return NULL_TREE;
27bcd47c 2918 if (double_int::from_uhwi (offset).slt (bitoffset))
b8b2b009 2919 return NULL_TREE;
cfef45c8 2920 return fold_ctor_reference (type, cval,
27bcd47c 2921 inner_offset.to_uhwi (), size,
c44c2088 2922 from_decl);
cfef45c8
RG
2923 }
2924 }
2925 /* When memory is not explicitely mentioned in constructor, it is 0. */
2926 return build_zero_cst (type);
2927}
2928
2929/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2930 to the memory at bit OFFSET. */
2931
2932static tree
2933fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 2934 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
2935{
2936 tree ret;
2937
2938 /* We found the field with exact match. */
2939 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2940 && !offset)
9d60be38 2941 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
2942
2943 /* We are at the end of walk, see if we can view convert the
2944 result. */
2945 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2946 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2947 && operand_equal_p (TYPE_SIZE (type),
2948 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2949 {
9d60be38 2950 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
2951 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2952 if (ret)
2953 STRIP_NOPS (ret);
2954 return ret;
2955 }
2956 if (TREE_CODE (ctor) == STRING_CST)
2957 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2958 if (TREE_CODE (ctor) == CONSTRUCTOR)
2959 {
2960
eb8f1123
RG
2961 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2962 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
2963 return fold_array_ctor_reference (type, ctor, offset, size,
2964 from_decl);
cfef45c8 2965 else
c44c2088
JH
2966 return fold_nonarray_ctor_reference (type, ctor, offset, size,
2967 from_decl);
cfef45c8
RG
2968 }
2969
2970 return NULL_TREE;
2971}
2972
2973/* Return the tree representing the element referenced by T if T is an
2974 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2975 names using VALUEIZE. Return NULL_TREE otherwise. */
2976
2977tree
2978fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2979{
2980 tree ctor, idx, base;
2981 HOST_WIDE_INT offset, size, max_size;
2982 tree tem;
2983
f8a7df45
RG
2984 if (TREE_THIS_VOLATILE (t))
2985 return NULL_TREE;
2986
cfef45c8
RG
2987 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2988 return get_symbol_constant_value (t);
2989
2990 tem = fold_read_from_constant_string (t);
2991 if (tem)
2992 return tem;
2993
2994 switch (TREE_CODE (t))
2995 {
2996 case ARRAY_REF:
2997 case ARRAY_RANGE_REF:
2998 /* Constant indexes are handled well by get_base_constructor.
2999 Only special case variable offsets.
3000 FIXME: This code can't handle nested references with variable indexes
3001 (they will be handled only by iteration of ccp). Perhaps we can bring
3002 get_ref_base_and_extent here and make it use a valueize callback. */
3003 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3004 && valueize
3005 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 3006 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
3007 {
3008 tree low_bound, unit_size;
b48e22b2 3009 double_int doffset;
cfef45c8
RG
3010
3011 /* If the resulting bit-offset is constant, track it. */
3012 if ((low_bound = array_ref_low_bound (t),
b48e22b2 3013 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 3014 && (unit_size = array_ref_element_size (t),
b48e22b2 3015 host_integerp (unit_size, 1))
27bcd47c
LC
3016 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3017 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3018 doffset.fits_shwi ()))
cfef45c8 3019 {
27bcd47c 3020 offset = doffset.to_shwi ();
cfef45c8
RG
3021 offset *= TREE_INT_CST_LOW (unit_size);
3022 offset *= BITS_PER_UNIT;
3023
3024 base = TREE_OPERAND (t, 0);
3025 ctor = get_base_constructor (base, &offset, valueize);
3026 /* Empty constructor. Always fold to 0. */
3027 if (ctor == error_mark_node)
3028 return build_zero_cst (TREE_TYPE (t));
3029 /* Out of bound array access. Value is undefined,
3030 but don't fold. */
3031 if (offset < 0)
3032 return NULL_TREE;
3033 /* We can not determine ctor. */
3034 if (!ctor)
3035 return NULL_TREE;
3036 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3037 TREE_INT_CST_LOW (unit_size)
c44c2088
JH
3038 * BITS_PER_UNIT,
3039 base);
cfef45c8
RG
3040 }
3041 }
3042 /* Fallthru. */
3043
3044 case COMPONENT_REF:
3045 case BIT_FIELD_REF:
3046 case TARGET_MEM_REF:
3047 case MEM_REF:
3048 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3049 ctor = get_base_constructor (base, &offset, valueize);
3050
3051 /* Empty constructor. Always fold to 0. */
3052 if (ctor == error_mark_node)
3053 return build_zero_cst (TREE_TYPE (t));
3054 /* We do not know precise address. */
3055 if (max_size == -1 || max_size != size)
3056 return NULL_TREE;
3057 /* We can not determine ctor. */
3058 if (!ctor)
3059 return NULL_TREE;
3060
3061 /* Out of bound array access. Value is undefined, but don't fold. */
3062 if (offset < 0)
3063 return NULL_TREE;
3064
c44c2088
JH
3065 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3066 base);
cfef45c8
RG
3067
3068 case REALPART_EXPR:
3069 case IMAGPART_EXPR:
3070 {
3071 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3072 if (c && TREE_CODE (c) == COMPLEX_CST)
3073 return fold_build1_loc (EXPR_LOCATION (t),
3074 TREE_CODE (t), TREE_TYPE (t), c);
3075 break;
3076 }
3077
3078 default:
3079 break;
3080 }
3081
3082 return NULL_TREE;
3083}
3084
3085tree
3086fold_const_aggregate_ref (tree t)
3087{
3088 return fold_const_aggregate_ref_1 (t, NULL);
3089}
06bc3ec7 3090
81fa35bd
MJ
3091/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3092 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3093 KNOWN_BINFO carries the binfo describing the true type of
3094 OBJ_TYPE_REF_OBJECT(REF). */
3095
3096tree
3097gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3098{
3099 unsigned HOST_WIDE_INT offset, size;
c44c2088 3100 tree v, fn, vtable;
81fa35bd 3101
c44c2088 3102 vtable = v = BINFO_VTABLE (known_binfo);
81fa35bd
MJ
3103 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3104 if (!v)
3105 return NULL_TREE;
3106
3107 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3108 {
3109 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3110 v = TREE_OPERAND (v, 0);
3111 }
3112 else
3113 offset = 0;
3114
3115 if (TREE_CODE (v) != ADDR_EXPR)
3116 return NULL_TREE;
3117 v = TREE_OPERAND (v, 0);
3118
3119 if (TREE_CODE (v) != VAR_DECL
3120 || !DECL_VIRTUAL_P (v)
5548ca35
JH
3121 || !DECL_INITIAL (v)
3122 || DECL_INITIAL (v) == error_mark_node)
81fa35bd
MJ
3123 return NULL_TREE;
3124 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3125 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3126 offset += token * size;
3127 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
c44c2088 3128 offset, size, vtable);
8e8483e6 3129 if (!fn || integer_zerop (fn))
81fa35bd
MJ
3130 return NULL_TREE;
3131 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3132 || TREE_CODE (fn) == FDESC_EXPR);
3133 fn = TREE_OPERAND (fn, 0);
3134 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3135
3136 /* When cgraph node is missing and function is not public, we cannot
3137 devirtualize. This can happen in WHOPR when the actual method
3138 ends up in other partition, because we found devirtualization
3139 possibility too late. */
c44c2088 3140 if (!can_refer_decl_in_current_unit_p (fn, vtable))
81fa35bd
MJ
3141 return NULL_TREE;
3142
7501ca28
RG
3143 /* Make sure we create a cgraph node for functions we'll reference.
3144 They can be non-existent if the reference comes from an entry
3145 of an external vtable for example. */
3146 cgraph_get_create_node (fn);
3147
81fa35bd
MJ
3148 return fn;
3149}
3150
06bc3ec7
BS
3151/* Return true iff VAL is a gimple expression that is known to be
3152 non-negative. Restricted to floating-point inputs. */
3153
3154bool
3155gimple_val_nonnegative_real_p (tree val)
3156{
3157 gimple def_stmt;
3158
3159 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3160
3161 /* Use existing logic for non-gimple trees. */
3162 if (tree_expr_nonnegative_p (val))
3163 return true;
3164
3165 if (TREE_CODE (val) != SSA_NAME)
3166 return false;
3167
3168 /* Currently we look only at the immediately defining statement
3169 to make this determination, since recursion on defining
3170 statements of operands can lead to quadratic behavior in the
3171 worst case. This is expected to catch almost all occurrences
3172 in practice. It would be possible to implement limited-depth
3173 recursion if important cases are lost. Alternatively, passes
3174 that need this information (such as the pow/powi lowering code
3175 in the cse_sincos pass) could be revised to provide it through
3176 dataflow propagation. */
3177
3178 def_stmt = SSA_NAME_DEF_STMT (val);
3179
3180 if (is_gimple_assign (def_stmt))
3181 {
3182 tree op0, op1;
3183
3184 /* See fold-const.c:tree_expr_nonnegative_p for additional
3185 cases that could be handled with recursion. */
3186
3187 switch (gimple_assign_rhs_code (def_stmt))
3188 {
3189 case ABS_EXPR:
3190 /* Always true for floating-point operands. */
3191 return true;
3192
3193 case MULT_EXPR:
3194 /* True if the two operands are identical (since we are
3195 restricted to floating-point inputs). */
3196 op0 = gimple_assign_rhs1 (def_stmt);
3197 op1 = gimple_assign_rhs2 (def_stmt);
3198
3199 if (op0 == op1
3200 || operand_equal_p (op0, op1, 0))
3201 return true;
3202
3203 default:
3204 return false;
3205 }
3206 }
3207 else if (is_gimple_call (def_stmt))
3208 {
3209 tree fndecl = gimple_call_fndecl (def_stmt);
3210 if (fndecl
3211 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3212 {
3213 tree arg1;
3214
3215 switch (DECL_FUNCTION_CODE (fndecl))
3216 {
3217 CASE_FLT_FN (BUILT_IN_ACOS):
3218 CASE_FLT_FN (BUILT_IN_ACOSH):
3219 CASE_FLT_FN (BUILT_IN_CABS):
3220 CASE_FLT_FN (BUILT_IN_COSH):
3221 CASE_FLT_FN (BUILT_IN_ERFC):
3222 CASE_FLT_FN (BUILT_IN_EXP):
3223 CASE_FLT_FN (BUILT_IN_EXP10):
3224 CASE_FLT_FN (BUILT_IN_EXP2):
3225 CASE_FLT_FN (BUILT_IN_FABS):
3226 CASE_FLT_FN (BUILT_IN_FDIM):
3227 CASE_FLT_FN (BUILT_IN_HYPOT):
3228 CASE_FLT_FN (BUILT_IN_POW10):
3229 return true;
3230
3231 CASE_FLT_FN (BUILT_IN_SQRT):
3232 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3233 nonnegative inputs. */
3234 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3235 return true;
3236
3237 break;
3238
3239 CASE_FLT_FN (BUILT_IN_POWI):
3240 /* True if the second argument is an even integer. */
3241 arg1 = gimple_call_arg (def_stmt, 1);
3242
3243 if (TREE_CODE (arg1) == INTEGER_CST
3244 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3245 return true;
3246
3247 break;
3248
3249 CASE_FLT_FN (BUILT_IN_POW):
3250 /* True if the second argument is an even integer-valued
3251 real. */
3252 arg1 = gimple_call_arg (def_stmt, 1);
3253
3254 if (TREE_CODE (arg1) == REAL_CST)
3255 {
3256 REAL_VALUE_TYPE c;
3257 HOST_WIDE_INT n;
3258
3259 c = TREE_REAL_CST (arg1);
3260 n = real_to_integer (&c);
3261
3262 if ((n & 1) == 0)
3263 {
3264 REAL_VALUE_TYPE cint;
3265 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3266 if (real_identical (&c, &cint))
3267 return true;
3268 }
3269 }
3270
3271 break;
3272
3273 default:
3274 return false;
3275 }
3276 }
3277 }
3278
3279 return false;
3280}