]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/gimple-fold.c
tree-vect-slp.c (vect_supported_load_permutation_p): Avoid redef of outer loop index...
[thirdparty/gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
23a5b65a 2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370
DN
26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
cbdd87d4 30#include "flags.h"
cbdd87d4 31#include "function.h"
7ee2468b 32#include "dumpfile.h"
442b4905 33#include "bitmap.h"
2fb9a547
AM
34#include "basic-block.h"
35#include "tree-ssa-alias.h"
36#include "internal-fn.h"
37#include "gimple-fold.h"
38#include "gimple-expr.h"
39#include "is-a.h"
18f429e2 40#include "gimple.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
442b4905
AM
43#include "gimple-ssa.h"
44#include "tree-ssanames.h"
45#include "tree-into-ssa.h"
46#include "tree-dfa.h"
7a300452 47#include "tree-ssa.h"
cbdd87d4 48#include "tree-ssa-propagate.h"
cbdd87d4 49#include "target.h"
450ad0cd
JH
50#include "ipa-utils.h"
51#include "gimple-pretty-print.h"
4484a35a 52#include "tree-ssa-address.h"
862d0b35 53#include "langhooks.h"
19e51b40 54#include "gimplify-me.h"
cbdd87d4 55
b3b9f3d0 56/* Return true when DECL can be referenced from current unit.
c44c2088
JH
57 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
58 We can get declarations that are not possible to reference for various
59 reasons:
1389294c 60
1389294c
JH
61 1) When analyzing C++ virtual tables.
62 C++ virtual tables do have known constructors even
63 when they are keyed to other compilation unit.
64 Those tables can contain pointers to methods and vars
65 in other units. Those methods have both STATIC and EXTERNAL
66 set.
67 2) In WHOPR mode devirtualization might lead to reference
68 to method that was partitioned elsehwere.
69 In this case we have static VAR_DECL or FUNCTION_DECL
70 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
71 declaring the body.
72 3) COMDAT functions referred by external vtables that
3e89949e 73 we devirtualize only during final compilation stage.
b3b9f3d0
JH
74 At this time we already decided that we will not output
75 the function body and thus we can't reference the symbol
76 directly. */
77
1389294c 78static bool
c44c2088 79can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 80{
2c8326a5 81 varpool_node *vnode;
1389294c 82 struct cgraph_node *node;
5e20cdc9 83 symtab_node *snode;
c44c2088 84
1632a686
JH
85 if (DECL_ABSTRACT (decl))
86 return false;
87
88 /* We are concerned only about static/external vars and functions. */
89 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
90 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
91 return true;
92
93 /* Static objects can be referred only if they was not optimized out yet. */
94 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
95 {
96 snode = symtab_get_node (decl);
97 if (!snode)
98 return false;
99 node = dyn_cast <cgraph_node> (snode);
100 return !node || !node->global.inlined_to;
101 }
102
6da8be89 103 /* We will later output the initializer, so we can refer to it.
c44c2088
JH
104 So we are concerned only when DECL comes from initializer of
105 external var. */
106 if (!from_decl
107 || TREE_CODE (from_decl) != VAR_DECL
108 || !DECL_EXTERNAL (from_decl)
6da8be89 109 || (flag_ltrans
67348ccc 110 && symtab_get_node (from_decl)->in_other_partition))
c44c2088 111 return true;
c44c2088
JH
112 /* We are folding reference from external vtable. The vtable may reffer
113 to a symbol keyed to other compilation unit. The other compilation
114 unit may be in separate DSO and the symbol may be hidden. */
115 if (DECL_VISIBILITY_SPECIFIED (decl)
116 && DECL_EXTERNAL (decl)
67348ccc 117 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
c44c2088 118 return false;
b3b9f3d0
JH
119 /* When function is public, we always can introduce new reference.
120 Exception are the COMDAT functions where introducing a direct
121 reference imply need to include function body in the curren tunit. */
122 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
123 return true;
124 /* We are not at ltrans stage; so don't worry about WHOPR.
125 Also when still gimplifying all referred comdat functions will be
2e9bb6ba 126 produced.
c44c2088
JH
127
128 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 129 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
130 output elsewhere when corresponding vtable is output.
131 This is however not possible - ABI specify that COMDATs are output in
132 units where they are used and when the other unit was compiled with LTO
133 it is possible that vtable was kept public while the function itself
134 was privatized. */
b3b9f3d0
JH
135 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
136 return true;
c44c2088
JH
137
138 /* OK we are seeing either COMDAT or static variable. In this case we must
139 check that the definition is still around so we can refer it. */
1389294c
JH
140 if (TREE_CODE (decl) == FUNCTION_DECL)
141 {
142 node = cgraph_get_node (decl);
b3b9f3d0
JH
143 /* Check that we still have function body and that we didn't took
144 the decision to eliminate offline copy of the function yet.
145 The second is important when devirtualization happens during final
146 compilation stage when making a new reference no longer makes callee
147 to be compiled. */
67348ccc 148 if (!node || !node->definition || node->global.inlined_to)
c44c2088
JH
149 {
150 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
151 return false;
152 }
1389294c
JH
153 }
154 else if (TREE_CODE (decl) == VAR_DECL)
155 {
156 vnode = varpool_get_node (decl);
67348ccc 157 if (!vnode || !vnode->definition)
c44c2088
JH
158 {
159 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
160 return false;
161 }
1389294c 162 }
b3b9f3d0 163 return true;
1389294c
JH
164}
165
0038d4e0 166/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
167 acceptable form for is_gimple_min_invariant.
168 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
169
170tree
c44c2088 171canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 172{
50619002
EB
173 tree orig_cval = cval;
174 STRIP_NOPS (cval);
315f5f1b
RG
175 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
176 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 177 {
315f5f1b
RG
178 tree ptr = TREE_OPERAND (cval, 0);
179 if (is_gimple_min_invariant (ptr))
180 cval = build1_loc (EXPR_LOCATION (cval),
181 ADDR_EXPR, TREE_TYPE (ptr),
182 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
183 ptr,
184 fold_convert (ptr_type_node,
185 TREE_OPERAND (cval, 1))));
17f39a39
JH
186 }
187 if (TREE_CODE (cval) == ADDR_EXPR)
188 {
5a27a197
RG
189 tree base = NULL_TREE;
190 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
191 {
192 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
193 if (base)
194 TREE_OPERAND (cval, 0) = base;
195 }
5a27a197
RG
196 else
197 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
198 if (!base)
199 return NULL_TREE;
b3b9f3d0 200
7501ca28
RG
201 if ((TREE_CODE (base) == VAR_DECL
202 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 203 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 204 return NULL_TREE;
7501ca28 205 if (TREE_CODE (base) == VAR_DECL)
46eb666a 206 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
207 else if (TREE_CODE (base) == FUNCTION_DECL)
208 {
209 /* Make sure we create a cgraph node for functions we'll reference.
210 They can be non-existent if the reference comes from an entry
211 of an external vtable for example. */
6f99e449 212 cgraph_get_create_node (base);
7501ca28 213 }
0038d4e0 214 /* Fixup types in global initializers. */
73aef89e
RG
215 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
216 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
217
218 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
219 cval = fold_convert (TREE_TYPE (orig_cval), cval);
220 return cval;
17f39a39 221 }
846abd0d
RB
222 if (TREE_OVERFLOW_P (cval))
223 return drop_tree_overflow (cval);
50619002 224 return orig_cval;
17f39a39 225}
cbdd87d4
RG
226
227/* If SYM is a constant variable with known value, return the value.
228 NULL_TREE is returned otherwise. */
229
230tree
231get_symbol_constant_value (tree sym)
232{
6a6dac52
JH
233 tree val = ctor_for_folding (sym);
234 if (val != error_mark_node)
cbdd87d4 235 {
cbdd87d4
RG
236 if (val)
237 {
9d60be38 238 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 239 if (val && is_gimple_min_invariant (val))
17f39a39 240 return val;
1389294c
JH
241 else
242 return NULL_TREE;
cbdd87d4
RG
243 }
244 /* Variables declared 'const' without an initializer
245 have zero as the initializer if they may not be
246 overridden at link or run time. */
247 if (!val
cbdd87d4
RG
248 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
249 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 250 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
251 }
252
253 return NULL_TREE;
254}
255
256
cbdd87d4
RG
257
258/* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
262
263static tree
264maybe_fold_reference (tree expr, bool is_lhs)
265{
266 tree *t = &expr;
17f39a39 267 tree result;
cbdd87d4 268
f0eddb90
RG
269 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
270 || TREE_CODE (expr) == REALPART_EXPR
271 || TREE_CODE (expr) == IMAGPART_EXPR)
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
273 return fold_unary_loc (EXPR_LOCATION (expr),
274 TREE_CODE (expr),
275 TREE_TYPE (expr),
276 TREE_OPERAND (expr, 0));
277 else if (TREE_CODE (expr) == BIT_FIELD_REF
278 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
279 return fold_ternary_loc (EXPR_LOCATION (expr),
280 TREE_CODE (expr),
281 TREE_TYPE (expr),
282 TREE_OPERAND (expr, 0),
283 TREE_OPERAND (expr, 1),
284 TREE_OPERAND (expr, 2));
285
286 while (handled_component_p (*t))
287 t = &TREE_OPERAND (*t, 0);
cbdd87d4 288
f0eddb90
RG
289 /* Canonicalize MEM_REFs invariant address operand. Do this first
290 to avoid feeding non-canonical MEM_REFs elsewhere. */
291 if (TREE_CODE (*t) == MEM_REF
292 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 293 {
f0eddb90
RG
294 bool volatile_p = TREE_THIS_VOLATILE (*t);
295 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
296 TREE_OPERAND (*t, 0),
297 TREE_OPERAND (*t, 1));
298 if (tem)
299 {
300 TREE_THIS_VOLATILE (tem) = volatile_p;
301 *t = tem;
302 tem = maybe_fold_reference (expr, is_lhs);
303 if (tem)
304 return tem;
305 return expr;
306 }
cbdd87d4
RG
307 }
308
f0eddb90
RG
309 if (!is_lhs
310 && (result = fold_const_aggregate_ref (expr))
311 && is_gimple_min_invariant (result))
312 return result;
cbdd87d4 313
70f34814
RG
314 /* Fold back MEM_REFs to reference trees. */
315 if (TREE_CODE (*t) == MEM_REF
316 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
317 && integer_zerop (TREE_OPERAND (*t, 1))
318 && (TREE_THIS_VOLATILE (*t)
319 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
320 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
321 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
322 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
323 /* We have to look out here to not drop a required conversion
324 from the rhs to the lhs if is_lhs, but we don't have the
325 rhs here to verify that. Thus require strict type
326 compatibility. */
327 && types_compatible_p (TREE_TYPE (*t),
328 TREE_TYPE (TREE_OPERAND
f0eddb90 329 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 330 {
70f34814
RG
331 tree tem;
332 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
333 tem = maybe_fold_reference (expr, is_lhs);
334 if (tem)
335 return tem;
336 return expr;
337 }
4d948885
RG
338 else if (TREE_CODE (*t) == TARGET_MEM_REF)
339 {
340 tree tem = maybe_fold_tmr (*t);
341 if (tem)
342 {
343 *t = tem;
344 tem = maybe_fold_reference (expr, is_lhs);
345 if (tem)
346 return tem;
347 return expr;
348 }
349 }
cbdd87d4
RG
350
351 return NULL_TREE;
352}
353
354
355/* Attempt to fold an assignment statement pointed-to by SI. Returns a
356 replacement rhs for the statement or NULL_TREE if no simplification
357 could be made. It is assumed that the operands have been previously
358 folded. */
359
360static tree
361fold_gimple_assign (gimple_stmt_iterator *si)
362{
363 gimple stmt = gsi_stmt (*si);
364 enum tree_code subcode = gimple_assign_rhs_code (stmt);
365 location_t loc = gimple_location (stmt);
366
367 tree result = NULL_TREE;
368
369 switch (get_gimple_rhs_class (subcode))
370 {
371 case GIMPLE_SINGLE_RHS:
372 {
373 tree rhs = gimple_assign_rhs1 (stmt);
374
4e71066d 375 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
376 return maybe_fold_reference (rhs, false);
377
bdf37f7a
JH
378 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
379 {
380 tree val = OBJ_TYPE_REF_EXPR (rhs);
381 if (is_gimple_min_invariant (val))
382 return val;
383 else if (flag_devirtualize && virtual_method_call_p (val))
384 {
385 bool final;
386 vec <cgraph_node *>targets
387 = possible_polymorphic_call_targets (val, &final);
388 if (final && targets.length () <= 1)
389 {
390 tree fndecl;
391 if (targets.length () == 1)
392 fndecl = targets[0]->decl;
393 else
394 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
395 val = fold_convert (TREE_TYPE (val), fndecl);
396 STRIP_USELESS_TYPE_CONVERSION (val);
397 return val;
398 }
399 }
400
401 }
cbdd87d4
RG
402 else if (TREE_CODE (rhs) == ADDR_EXPR)
403 {
70f34814
RG
404 tree ref = TREE_OPERAND (rhs, 0);
405 tree tem = maybe_fold_reference (ref, true);
406 if (tem
407 && TREE_CODE (tem) == MEM_REF
408 && integer_zerop (TREE_OPERAND (tem, 1)))
409 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
410 else if (tem)
cbdd87d4
RG
411 result = fold_convert (TREE_TYPE (rhs),
412 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
413 else if (TREE_CODE (ref) == MEM_REF
414 && integer_zerop (TREE_OPERAND (ref, 1)))
415 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
416 }
417
418 else if (TREE_CODE (rhs) == CONSTRUCTOR
419 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
420 && (CONSTRUCTOR_NELTS (rhs)
421 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
422 {
423 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
424 unsigned i;
425 tree val;
426
427 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
428 if (TREE_CODE (val) != INTEGER_CST
429 && TREE_CODE (val) != REAL_CST
430 && TREE_CODE (val) != FIXED_CST)
431 return NULL_TREE;
432
433 return build_vector_from_ctor (TREE_TYPE (rhs),
434 CONSTRUCTOR_ELTS (rhs));
435 }
436
437 else if (DECL_P (rhs))
9d60be38 438 return get_symbol_constant_value (rhs);
cbdd87d4
RG
439
440 /* If we couldn't fold the RHS, hand over to the generic
441 fold routines. */
442 if (result == NULL_TREE)
443 result = fold (rhs);
444
445 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
446 that may have been added by fold, and "useless" type
447 conversions that might now be apparent due to propagation. */
448 STRIP_USELESS_TYPE_CONVERSION (result);
449
450 if (result != rhs && valid_gimple_rhs_p (result))
451 return result;
452
453 return NULL_TREE;
454 }
455 break;
456
457 case GIMPLE_UNARY_RHS:
458 {
459 tree rhs = gimple_assign_rhs1 (stmt);
460
461 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
462 if (result)
463 {
464 /* If the operation was a conversion do _not_ mark a
465 resulting constant with TREE_OVERFLOW if the original
466 constant was not. These conversions have implementation
467 defined behavior and retaining the TREE_OVERFLOW flag
468 here would confuse later passes such as VRP. */
469 if (CONVERT_EXPR_CODE_P (subcode)
470 && TREE_CODE (result) == INTEGER_CST
471 && TREE_CODE (rhs) == INTEGER_CST)
472 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
473
474 STRIP_USELESS_TYPE_CONVERSION (result);
475 if (valid_gimple_rhs_p (result))
476 return result;
477 }
cbdd87d4
RG
478 }
479 break;
480
481 case GIMPLE_BINARY_RHS:
9b80d091
KT
482 /* Try to canonicalize for boolean-typed X the comparisons
483 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
484 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
485 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
486 {
487 tree lhs = gimple_assign_lhs (stmt);
488 tree op1 = gimple_assign_rhs1 (stmt);
489 tree op2 = gimple_assign_rhs2 (stmt);
490 tree type = TREE_TYPE (op1);
491
492 /* Check whether the comparison operands are of the same boolean
493 type as the result type is.
494 Check that second operand is an integer-constant with value
495 one or zero. */
496 if (TREE_CODE (op2) == INTEGER_CST
497 && (integer_zerop (op2) || integer_onep (op2))
498 && useless_type_conversion_p (TREE_TYPE (lhs), type))
499 {
500 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
501 bool is_logical_not = false;
502
503 /* X == 0 and X != 1 is a logical-not.of X
504 X == 1 and X != 0 is X */
505 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
506 || (cmp_code == NE_EXPR && integer_onep (op2)))
507 is_logical_not = true;
508
509 if (is_logical_not == false)
510 result = op1;
511 /* Only for one-bit precision typed X the transformation
512 !X -> ~X is valied. */
513 else if (TYPE_PRECISION (type) == 1)
514 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
515 type, op1);
516 /* Otherwise we use !X -> X ^ 1. */
517 else
518 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
519 type, op1, build_int_cst (type, 1));
520
521 }
522 }
cbdd87d4
RG
523
524 if (!result)
525 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
526 TREE_TYPE (gimple_assign_lhs (stmt)),
527 gimple_assign_rhs1 (stmt),
528 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
529
530 if (result)
531 {
532 STRIP_USELESS_TYPE_CONVERSION (result);
533 if (valid_gimple_rhs_p (result))
534 return result;
cbdd87d4
RG
535 }
536 break;
537
0354c0c7 538 case GIMPLE_TERNARY_RHS:
4e71066d
RG
539 /* Try to fold a conditional expression. */
540 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
541 {
542 tree op0 = gimple_assign_rhs1 (stmt);
543 tree tem;
544 bool set = false;
545 location_t cond_loc = gimple_location (stmt);
546
547 if (COMPARISON_CLASS_P (op0))
548 {
549 fold_defer_overflow_warnings ();
550 tem = fold_binary_loc (cond_loc,
551 TREE_CODE (op0), TREE_TYPE (op0),
552 TREE_OPERAND (op0, 0),
553 TREE_OPERAND (op0, 1));
554 /* This is actually a conditional expression, not a GIMPLE
555 conditional statement, however, the valid_gimple_rhs_p
556 test still applies. */
557 set = (tem && is_gimple_condexpr (tem)
558 && valid_gimple_rhs_p (tem));
559 fold_undefer_overflow_warnings (set, stmt, 0);
560 }
561 else if (is_gimple_min_invariant (op0))
562 {
563 tem = op0;
564 set = true;
565 }
566 else
567 return NULL_TREE;
568
569 if (set)
570 result = fold_build3_loc (cond_loc, COND_EXPR,
571 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
572 gimple_assign_rhs2 (stmt),
573 gimple_assign_rhs3 (stmt));
574 }
575
576 if (!result)
577 result = fold_ternary_loc (loc, subcode,
578 TREE_TYPE (gimple_assign_lhs (stmt)),
579 gimple_assign_rhs1 (stmt),
580 gimple_assign_rhs2 (stmt),
581 gimple_assign_rhs3 (stmt));
0354c0c7
BS
582
583 if (result)
584 {
585 STRIP_USELESS_TYPE_CONVERSION (result);
586 if (valid_gimple_rhs_p (result))
587 return result;
0354c0c7
BS
588 }
589 break;
590
cbdd87d4
RG
591 case GIMPLE_INVALID_RHS:
592 gcc_unreachable ();
593 }
594
595 return NULL_TREE;
596}
597
598/* Attempt to fold a conditional statement. Return true if any changes were
599 made. We only attempt to fold the condition expression, and do not perform
600 any transformation that would require alteration of the cfg. It is
601 assumed that the operands have been previously folded. */
602
603static bool
604fold_gimple_cond (gimple stmt)
605{
606 tree result = fold_binary_loc (gimple_location (stmt),
607 gimple_cond_code (stmt),
608 boolean_type_node,
609 gimple_cond_lhs (stmt),
610 gimple_cond_rhs (stmt));
611
612 if (result)
613 {
614 STRIP_USELESS_TYPE_CONVERSION (result);
615 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
616 {
617 gimple_cond_set_condition_from_tree (stmt, result);
618 return true;
619 }
620 }
621
622 return false;
623}
624
625/* Convert EXPR into a GIMPLE value suitable for substitution on the
626 RHS of an assignment. Insert the necessary statements before
627 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
628 is replaced. If the call is expected to produces a result, then it
629 is replaced by an assignment of the new RHS to the result variable.
630 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
631 GIMPLE_NOP. A proper VDEF chain is retained by making the first
632 VUSE and the last VDEF of the whole sequence be the same as the replaced
633 statement and using new SSA names for stores in between. */
cbdd87d4
RG
634
635void
636gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
637{
638 tree lhs;
cbdd87d4
RG
639 gimple stmt, new_stmt;
640 gimple_stmt_iterator i;
355a7673 641 gimple_seq stmts = NULL;
e256dfce 642 gimple laststore;
fe2ef088 643 tree reaching_vuse;
cbdd87d4
RG
644
645 stmt = gsi_stmt (*si_p);
646
647 gcc_assert (is_gimple_call (stmt));
648
45852dcc 649 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 650
e256dfce 651 lhs = gimple_call_lhs (stmt);
cbdd87d4 652 if (lhs == NULL_TREE)
6e572326
RG
653 {
654 gimplify_and_add (expr, &stmts);
655 /* We can end up with folding a memcpy of an empty class assignment
656 which gets optimized away by C++ gimplification. */
657 if (gimple_seq_empty_p (stmts))
658 {
9fdc58de 659 pop_gimplify_context (NULL);
6e572326
RG
660 if (gimple_in_ssa_p (cfun))
661 {
662 unlink_stmt_vdef (stmt);
663 release_defs (stmt);
664 }
77d19c72 665 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
666 return;
667 }
668 }
cbdd87d4 669 else
e256dfce
RG
670 {
671 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
672 new_stmt = gimple_build_assign (lhs, tmp);
673 i = gsi_last (stmts);
674 gsi_insert_after_without_update (&i, new_stmt,
675 GSI_CONTINUE_LINKING);
676 }
cbdd87d4
RG
677
678 pop_gimplify_context (NULL);
679
680 if (gimple_has_location (stmt))
681 annotate_all_with_location (stmts, gimple_location (stmt));
682
e256dfce
RG
683 /* First iterate over the replacement statements backward, assigning
684 virtual operands to their defining statements. */
685 laststore = NULL;
686 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
687 {
688 new_stmt = gsi_stmt (i);
949e47e5
JJ
689 if ((gimple_assign_single_p (new_stmt)
690 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
691 || (is_gimple_call (new_stmt)
692 && (gimple_call_flags (new_stmt)
693 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
e256dfce
RG
694 {
695 tree vdef;
696 if (!laststore)
697 vdef = gimple_vdef (stmt);
698 else
699 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
700 gimple_set_vdef (new_stmt, vdef);
52f26be4 701 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce
RG
702 SSA_NAME_DEF_STMT (vdef) = new_stmt;
703 laststore = new_stmt;
704 }
705 }
706
707 /* Second iterate over the statements forward, assigning virtual
708 operands to their uses. */
e256dfce 709 reaching_vuse = gimple_vuse (stmt);
cbdd87d4
RG
710 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
711 {
cbdd87d4 712 new_stmt = gsi_stmt (i);
a24ac460
RG
713 /* If the new statement possibly has a VUSE, update it with exact SSA
714 name we know will reach this one. */
715 if (gimple_has_mem_ops (new_stmt))
e256dfce
RG
716 gimple_set_vuse (new_stmt, reaching_vuse);
717 gimple_set_modified (new_stmt, true);
718 if (gimple_vdef (new_stmt))
719 reaching_vuse = gimple_vdef (new_stmt);
cbdd87d4
RG
720 }
721
e256dfce
RG
722 /* If the new sequence does not do a store release the virtual
723 definition of the original statement. */
724 if (reaching_vuse
725 && reaching_vuse == gimple_vuse (stmt))
cbdd87d4 726 {
e256dfce
RG
727 tree vdef = gimple_vdef (stmt);
728 if (vdef
729 && TREE_CODE (vdef) == SSA_NAME)
fe2ef088
MM
730 {
731 unlink_stmt_vdef (stmt);
e256dfce 732 release_ssa_name (vdef);
8a1561bc 733 }
cbdd87d4
RG
734 }
735
355a7673
MM
736 /* Finally replace the original statement with the sequence. */
737 gsi_replace_with_seq (si_p, stmts, false);
cbdd87d4
RG
738}
739
740/* Return the string length, maximum string length or maximum value of
741 ARG in LENGTH.
742 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
743 is not NULL and, for TYPE == 0, its value is not equal to the length
744 we determine or if we are unable to determine the length or value,
745 return false. VISITED is a bitmap of visited variables.
746 TYPE is 0 if string length should be returned, 1 for maximum string
747 length and 2 for maximum value ARG can have. */
748
749static bool
750get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
751{
752 tree var, val;
753 gimple def_stmt;
754
755 if (TREE_CODE (arg) != SSA_NAME)
756 {
cbdd87d4 757 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
763dc99d
AP
758 if (TREE_CODE (arg) == ADDR_EXPR
759 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
760 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
cbdd87d4
RG
761 {
762 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
763 if (TREE_CODE (aop0) == INDIRECT_REF
764 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
765 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
766 length, visited, type);
767 }
768
769 if (type == 2)
770 {
771 val = arg;
772 if (TREE_CODE (val) != INTEGER_CST
773 || tree_int_cst_sgn (val) < 0)
774 return false;
775 }
776 else
777 val = c_strlen (arg, 1);
778 if (!val)
779 return false;
780
781 if (*length)
782 {
783 if (type > 0)
784 {
785 if (TREE_CODE (*length) != INTEGER_CST
786 || TREE_CODE (val) != INTEGER_CST)
787 return false;
788
789 if (tree_int_cst_lt (*length, val))
790 *length = val;
791 return true;
792 }
793 else if (simple_cst_equal (val, *length) != 1)
794 return false;
795 }
796
797 *length = val;
798 return true;
799 }
800
491e0b9b
RG
801 /* If ARG is registered for SSA update we cannot look at its defining
802 statement. */
803 if (name_registered_for_update_p (arg))
804 return false;
805
cbdd87d4 806 /* If we were already here, break the infinite cycle. */
fcaa4ca4 807 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
cbdd87d4 808 return true;
cbdd87d4
RG
809
810 var = arg;
811 def_stmt = SSA_NAME_DEF_STMT (var);
812
813 switch (gimple_code (def_stmt))
814 {
815 case GIMPLE_ASSIGN:
816 /* The RHS of the statement defining VAR must either have a
817 constant length or come from another SSA_NAME with a constant
818 length. */
819 if (gimple_assign_single_p (def_stmt)
820 || gimple_assign_unary_nop_p (def_stmt))
821 {
822 tree rhs = gimple_assign_rhs1 (def_stmt);
823 return get_maxval_strlen (rhs, length, visited, type);
824 }
763dc99d
AP
825 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
826 {
827 tree op2 = gimple_assign_rhs2 (def_stmt);
828 tree op3 = gimple_assign_rhs3 (def_stmt);
829 return get_maxval_strlen (op2, length, visited, type)
830 && get_maxval_strlen (op3, length, visited, type);
831 }
cbdd87d4
RG
832 return false;
833
834 case GIMPLE_PHI:
835 {
836 /* All the arguments of the PHI node must have the same constant
837 length. */
838 unsigned i;
839
840 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
841 {
842 tree arg = gimple_phi_arg (def_stmt, i)->def;
843
844 /* If this PHI has itself as an argument, we cannot
845 determine the string length of this argument. However,
846 if we can find a constant string length for the other
847 PHI args then we can still be sure that this is a
848 constant string length. So be optimistic and just
849 continue with the next argument. */
850 if (arg == gimple_phi_result (def_stmt))
851 continue;
852
853 if (!get_maxval_strlen (arg, length, visited, type))
854 return false;
855 }
856 }
857 return true;
858
859 default:
860 return false;
861 }
862}
863
864
865/* Fold builtin call in statement STMT. Returns a simplified tree.
866 We may return a non-constant expression, including another call
867 to a different function and with different arguments, e.g.,
868 substituting memcpy for strcpy when the string length is known.
869 Note that some builtins expand into inline code that may not
870 be valid in GIMPLE. Callers must take care. */
871
872tree
873gimple_fold_builtin (gimple stmt)
874{
875 tree result, val[3];
876 tree callee, a;
877 int arg_idx, type;
878 bitmap visited;
879 bool ignore;
880 int nargs;
881 location_t loc = gimple_location (stmt);
882
cbdd87d4
RG
883 ignore = (gimple_call_lhs (stmt) == NULL);
884
885 /* First try the generic builtin folder. If that succeeds, return the
886 result directly. */
887 result = fold_call_stmt (stmt, ignore);
888 if (result)
889 {
890 if (ignore)
891 STRIP_NOPS (result);
5c944c6c
RB
892 else
893 result = fold_convert (gimple_call_return_type (stmt), result);
cbdd87d4
RG
894 return result;
895 }
896
897 /* Ignore MD builtins. */
898 callee = gimple_call_fndecl (stmt);
899 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
900 return NULL_TREE;
901
e7f9dae0
JJ
902 /* Give up for always_inline inline builtins until they are
903 inlined. */
904 if (avoid_folding_inline_builtin (callee))
905 return NULL_TREE;
906
cbdd87d4
RG
907 /* If the builtin could not be folded, and it has no argument list,
908 we're done. */
909 nargs = gimple_call_num_args (stmt);
910 if (nargs == 0)
911 return NULL_TREE;
912
913 /* Limit the work only for builtins we know how to simplify. */
914 switch (DECL_FUNCTION_CODE (callee))
915 {
916 case BUILT_IN_STRLEN:
917 case BUILT_IN_FPUTS:
918 case BUILT_IN_FPUTS_UNLOCKED:
919 arg_idx = 0;
920 type = 0;
921 break;
922 case BUILT_IN_STRCPY:
923 case BUILT_IN_STRNCPY:
9a7eefec 924 case BUILT_IN_STRCAT:
cbdd87d4
RG
925 arg_idx = 1;
926 type = 0;
927 break;
928 case BUILT_IN_MEMCPY_CHK:
929 case BUILT_IN_MEMPCPY_CHK:
930 case BUILT_IN_MEMMOVE_CHK:
931 case BUILT_IN_MEMSET_CHK:
932 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 933 case BUILT_IN_STPNCPY_CHK:
cbdd87d4
RG
934 arg_idx = 2;
935 type = 2;
936 break;
937 case BUILT_IN_STRCPY_CHK:
938 case BUILT_IN_STPCPY_CHK:
939 arg_idx = 1;
940 type = 1;
941 break;
942 case BUILT_IN_SNPRINTF_CHK:
943 case BUILT_IN_VSNPRINTF_CHK:
944 arg_idx = 1;
945 type = 2;
946 break;
947 default:
948 return NULL_TREE;
949 }
950
951 if (arg_idx >= nargs)
952 return NULL_TREE;
953
954 /* Try to use the dataflow information gathered by the CCP process. */
955 visited = BITMAP_ALLOC (NULL);
956 bitmap_clear (visited);
957
958 memset (val, 0, sizeof (val));
959 a = gimple_call_arg (stmt, arg_idx);
960 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
961 val[arg_idx] = NULL_TREE;
962
963 BITMAP_FREE (visited);
964
965 result = NULL_TREE;
966 switch (DECL_FUNCTION_CODE (callee))
967 {
968 case BUILT_IN_STRLEN:
969 if (val[0] && nargs == 1)
970 {
971 tree new_val =
972 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
973
974 /* If the result is not a valid gimple value, or not a cast
6e4da084 975 of a valid gimple value, then we cannot use the result. */
cbdd87d4 976 if (is_gimple_val (new_val)
3dbe9454 977 || (CONVERT_EXPR_P (new_val)
cbdd87d4
RG
978 && is_gimple_val (TREE_OPERAND (new_val, 0))))
979 return new_val;
980 }
981 break;
982
983 case BUILT_IN_STRCPY:
984 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
985 result = fold_builtin_strcpy (loc, callee,
986 gimple_call_arg (stmt, 0),
987 gimple_call_arg (stmt, 1),
988 val[1]);
989 break;
990
991 case BUILT_IN_STRNCPY:
992 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
993 result = fold_builtin_strncpy (loc, callee,
994 gimple_call_arg (stmt, 0),
995 gimple_call_arg (stmt, 1),
996 gimple_call_arg (stmt, 2),
997 val[1]);
998 break;
999
9a7eefec
JJ
1000 case BUILT_IN_STRCAT:
1001 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
1002 result = fold_builtin_strcat (loc, gimple_call_arg (stmt, 0),
1003 gimple_call_arg (stmt, 1),
1004 val[1]);
1005 break;
1006
cbdd87d4
RG
1007 case BUILT_IN_FPUTS:
1008 if (nargs == 2)
1009 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1010 gimple_call_arg (stmt, 1),
1011 ignore, false, val[0]);
1012 break;
1013
1014 case BUILT_IN_FPUTS_UNLOCKED:
1015 if (nargs == 2)
1016 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
1017 gimple_call_arg (stmt, 1),
1018 ignore, true, val[0]);
1019 break;
1020
1021 case BUILT_IN_MEMCPY_CHK:
1022 case BUILT_IN_MEMPCPY_CHK:
1023 case BUILT_IN_MEMMOVE_CHK:
1024 case BUILT_IN_MEMSET_CHK:
1025 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1026 result = fold_builtin_memory_chk (loc, callee,
1027 gimple_call_arg (stmt, 0),
1028 gimple_call_arg (stmt, 1),
1029 gimple_call_arg (stmt, 2),
1030 gimple_call_arg (stmt, 3),
1031 val[2], ignore,
1032 DECL_FUNCTION_CODE (callee));
1033 break;
1034
1035 case BUILT_IN_STRCPY_CHK:
1036 case BUILT_IN_STPCPY_CHK:
1037 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
1038 result = fold_builtin_stxcpy_chk (loc, callee,
1039 gimple_call_arg (stmt, 0),
1040 gimple_call_arg (stmt, 1),
1041 gimple_call_arg (stmt, 2),
1042 val[1], ignore,
1043 DECL_FUNCTION_CODE (callee));
1044 break;
1045
1046 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 1047 case BUILT_IN_STPNCPY_CHK:
cbdd87d4 1048 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
f3fc9b80 1049 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
cbdd87d4
RG
1050 gimple_call_arg (stmt, 1),
1051 gimple_call_arg (stmt, 2),
1052 gimple_call_arg (stmt, 3),
f3fc9b80
RG
1053 val[2], ignore,
1054 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
1055 break;
1056
1057 case BUILT_IN_SNPRINTF_CHK:
1058 case BUILT_IN_VSNPRINTF_CHK:
1059 if (val[1] && is_gimple_val (val[1]))
1060 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1061 DECL_FUNCTION_CODE (callee));
1062 break;
1063
1064 default:
1065 gcc_unreachable ();
1066 }
1067
1068 if (result && ignore)
1069 result = fold_ignored_result (result);
1070 return result;
1071}
1072
1ae6fe9b 1073
49c471e3
MJ
1074/* Return a binfo to be used for devirtualization of calls based on an object
1075 represented by a declaration (i.e. a global or automatically allocated one)
1076 or NULL if it cannot be found or is not safe. CST is expected to be an
1077 ADDR_EXPR of such object or the function will return NULL. Currently it is
c49bdb2e
JH
1078 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1079 EXPECTED_TYPE is type of the class virtual belongs to. */
49c471e3
MJ
1080
1081tree
c49bdb2e 1082gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
49c471e3
MJ
1083{
1084 HOST_WIDE_INT offset, size, max_size;
c49bdb2e 1085 tree base, type, binfo;
49c471e3
MJ
1086 bool last_artificial = false;
1087
1088 if (!flag_devirtualize
1089 || TREE_CODE (cst) != ADDR_EXPR
1090 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1091 return NULL_TREE;
1092
1093 cst = TREE_OPERAND (cst, 0);
49c471e3
MJ
1094 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1095 type = TREE_TYPE (base);
1096 if (!DECL_P (base)
1097 || max_size == -1
1098 || max_size != size
1099 || TREE_CODE (type) != RECORD_TYPE)
1100 return NULL_TREE;
1101
1102 /* Find the sub-object the constant actually refers to and mark whether it is
1103 an artificial one (as opposed to a user-defined one). */
1104 while (true)
1105 {
1106 HOST_WIDE_INT pos, size;
1107 tree fld;
1108
d84db1ed 1109 if (types_same_for_odr (type, expected_type))
49c471e3
MJ
1110 break;
1111 if (offset < 0)
1112 return NULL_TREE;
1113
1114 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1115 {
1116 if (TREE_CODE (fld) != FIELD_DECL)
1117 continue;
1118
1119 pos = int_bit_position (fld);
ae7e9ddd 1120 size = tree_to_uhwi (DECL_SIZE (fld));
49c471e3
MJ
1121 if (pos <= offset && (pos + size) > offset)
1122 break;
1123 }
1124 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1125 return NULL_TREE;
1126
1127 last_artificial = DECL_ARTIFICIAL (fld);
1128 type = TREE_TYPE (fld);
1129 offset -= pos;
1130 }
073a8998 1131 /* Artificial sub-objects are ancestors, we do not want to use them for
49c471e3
MJ
1132 devirtualization, at least not here. */
1133 if (last_artificial)
1134 return NULL_TREE;
1135 binfo = TYPE_BINFO (type);
1136 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1137 return NULL_TREE;
1138 else
1139 return binfo;
1140}
1141
cbdd87d4
RG
1142/* Attempt to fold a call statement referenced by the statement iterator GSI.
1143 The statement may be replaced by another statement, e.g., if the call
1144 simplifies to a constant value. Return true if any changes were made.
1145 It is assumed that the operands have been previously folded. */
1146
e021c122 1147static bool
ceeffab0 1148gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
1149{
1150 gimple stmt = gsi_stmt (*gsi);
3b45a007 1151 tree callee;
e021c122
RG
1152 bool changed = false;
1153 unsigned i;
cbdd87d4 1154
e021c122
RG
1155 /* Fold *& in call arguments. */
1156 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1157 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1158 {
1159 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1160 if (tmp)
1161 {
1162 gimple_call_set_arg (stmt, i, tmp);
1163 changed = true;
1164 }
1165 }
3b45a007
RG
1166
1167 /* Check for virtual calls that became direct calls. */
1168 callee = gimple_call_fn (stmt);
25583c4f 1169 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 1170 {
49c471e3
MJ
1171 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1172 {
450ad0cd
JH
1173 if (dump_file && virtual_method_call_p (callee)
1174 && !possible_polymorphic_call_target_p
1175 (callee, cgraph_get_node (gimple_call_addr_fndecl
1176 (OBJ_TYPE_REF_EXPR (callee)))))
1177 {
1178 fprintf (dump_file,
a70e9985 1179 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
1180 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1181 fprintf (dump_file, " to ");
1182 print_generic_expr (dump_file, callee, TDF_SLIM);
1183 fprintf (dump_file, "\n");
1184 }
1185
49c471e3 1186 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
1187 changed = true;
1188 }
a70e9985 1189 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 1190 {
61dd6a2e
JH
1191 bool final;
1192 vec <cgraph_node *>targets
1193 = possible_polymorphic_call_targets (callee, &final);
1194 if (final && targets.length () <= 1)
e021c122 1195 {
a70e9985 1196 tree lhs = gimple_call_lhs (stmt);
61dd6a2e 1197 if (targets.length () == 1)
cf3e5a89
JJ
1198 {
1199 gimple_call_set_fndecl (stmt, targets[0]->decl);
1200 changed = true;
a70e9985
JJ
1201 /* If the call becomes noreturn, remove the lhs. */
1202 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
1203 {
1204 if (TREE_CODE (lhs) == SSA_NAME)
1205 {
1206 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1207 tree def = get_or_create_ssa_default_def (cfun, var);
1208 gimple new_stmt = gimple_build_assign (lhs, def);
1209 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1210 }
1211 gimple_call_set_lhs (stmt, NULL_TREE);
1212 }
cf3e5a89 1213 }
a70e9985 1214 else
cf3e5a89
JJ
1215 {
1216 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
1217 gimple new_stmt = gimple_build_call (fndecl, 0);
1218 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
1219 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1220 {
1221 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
1222 tree def = get_or_create_ssa_default_def (cfun, var);
1223 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1224 update_call_from_tree (gsi, def);
1225 }
1226 else
1227 gsi_replace (gsi, new_stmt, true);
cf3e5a89
JJ
1228 return true;
1229 }
e021c122 1230 }
49c471e3 1231 }
e021c122 1232 }
49c471e3 1233
e021c122
RG
1234 if (inplace)
1235 return changed;
1236
1237 /* Check for builtins that CCP can handle using information not
1238 available in the generic fold routines. */
5c944c6c 1239 if (gimple_call_builtin_p (stmt))
e021c122
RG
1240 {
1241 tree result = gimple_fold_builtin (stmt);
f6dbed32 1242 if (result)
e021c122
RG
1243 {
1244 if (!update_call_from_tree (gsi, result))
1245 gimplify_and_update_call_from_tree (gsi, result);
1246 changed = true;
1247 }
5c944c6c 1248 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
ea679d55 1249 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007
RG
1250 }
1251
e021c122 1252 return changed;
cbdd87d4
RG
1253}
1254
1255/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1256 distinguishes both cases. */
1257
1258static bool
1259fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1260{
1261 bool changed = false;
1262 gimple stmt = gsi_stmt (*gsi);
1263 unsigned i;
1264
1265 /* Fold the main computation performed by the statement. */
1266 switch (gimple_code (stmt))
1267 {
1268 case GIMPLE_ASSIGN:
1269 {
1270 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 1271 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 1272 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
1273 tree new_rhs;
1274 /* First canonicalize operand order. This avoids building new
1275 trees if this is the only thing fold would later do. */
1276 if ((commutative_tree_code (subcode)
1277 || commutative_ternary_tree_code (subcode))
1278 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1279 gimple_assign_rhs2 (stmt), false))
1280 {
1281 tree tem = gimple_assign_rhs1 (stmt);
1282 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1283 gimple_assign_set_rhs2 (stmt, tem);
1284 changed = true;
1285 }
1286 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
1287 if (new_rhs
1288 && !useless_type_conversion_p (TREE_TYPE (lhs),
1289 TREE_TYPE (new_rhs)))
1290 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1291 if (new_rhs
1292 && (!inplace
1293 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1294 {
1295 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1296 changed = true;
1297 }
1298 break;
1299 }
1300
1301 case GIMPLE_COND:
1302 changed |= fold_gimple_cond (stmt);
1303 break;
1304
1305 case GIMPLE_CALL:
ceeffab0 1306 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
1307 break;
1308
1309 case GIMPLE_ASM:
1310 /* Fold *& in asm operands. */
38384150
JJ
1311 {
1312 size_t noutputs;
1313 const char **oconstraints;
1314 const char *constraint;
1315 bool allows_mem, allows_reg;
1316
1317 noutputs = gimple_asm_noutputs (stmt);
1318 oconstraints = XALLOCAVEC (const char *, noutputs);
1319
1320 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1321 {
1322 tree link = gimple_asm_output_op (stmt, i);
1323 tree op = TREE_VALUE (link);
1324 oconstraints[i]
1325 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1326 if (REFERENCE_CLASS_P (op)
1327 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1328 {
1329 TREE_VALUE (link) = op;
1330 changed = true;
1331 }
1332 }
1333 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1334 {
1335 tree link = gimple_asm_input_op (stmt, i);
1336 tree op = TREE_VALUE (link);
1337 constraint
1338 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1339 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1340 oconstraints, &allows_mem, &allows_reg);
1341 if (REFERENCE_CLASS_P (op)
1342 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1343 != NULL_TREE)
1344 {
1345 TREE_VALUE (link) = op;
1346 changed = true;
1347 }
1348 }
1349 }
cbdd87d4
RG
1350 break;
1351
bd422c4a
RG
1352 case GIMPLE_DEBUG:
1353 if (gimple_debug_bind_p (stmt))
1354 {
1355 tree val = gimple_debug_bind_get_value (stmt);
1356 if (val
1357 && REFERENCE_CLASS_P (val))
1358 {
1359 tree tem = maybe_fold_reference (val, false);
1360 if (tem)
1361 {
1362 gimple_debug_bind_set_value (stmt, tem);
1363 changed = true;
1364 }
1365 }
3e888a5e
RG
1366 else if (val
1367 && TREE_CODE (val) == ADDR_EXPR)
1368 {
1369 tree ref = TREE_OPERAND (val, 0);
1370 tree tem = maybe_fold_reference (ref, false);
1371 if (tem)
1372 {
1373 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1374 gimple_debug_bind_set_value (stmt, tem);
1375 changed = true;
1376 }
1377 }
bd422c4a
RG
1378 }
1379 break;
1380
cbdd87d4
RG
1381 default:;
1382 }
1383
1384 stmt = gsi_stmt (*gsi);
1385
37376165
RB
1386 /* Fold *& on the lhs. */
1387 if (gimple_has_lhs (stmt))
cbdd87d4
RG
1388 {
1389 tree lhs = gimple_get_lhs (stmt);
1390 if (lhs && REFERENCE_CLASS_P (lhs))
1391 {
1392 tree new_lhs = maybe_fold_reference (lhs, true);
1393 if (new_lhs)
1394 {
1395 gimple_set_lhs (stmt, new_lhs);
1396 changed = true;
1397 }
1398 }
1399 }
1400
1401 return changed;
1402}
1403
1404/* Fold the statement pointed to by GSI. In some cases, this function may
1405 replace the whole statement with a new one. Returns true iff folding
1406 makes any changes.
1407 The statement pointed to by GSI should be in valid gimple form but may
1408 be in unfolded state as resulting from for example constant propagation
1409 which can produce *&x = 0. */
1410
1411bool
1412fold_stmt (gimple_stmt_iterator *gsi)
1413{
1414 return fold_stmt_1 (gsi, false);
1415}
1416
59401b92 1417/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
1418 *&x created by constant propagation are handled. The statement cannot
1419 be replaced with a new one. Return true if the statement was
1420 changed, false otherwise.
59401b92 1421 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
1422 be in unfolded state as resulting from for example constant propagation
1423 which can produce *&x = 0. */
1424
1425bool
59401b92 1426fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 1427{
59401b92
RG
1428 gimple stmt = gsi_stmt (*gsi);
1429 bool changed = fold_stmt_1 (gsi, true);
1430 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
1431 return changed;
1432}
1433
e89065a1
SL
1434/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1435 if EXPR is null or we don't know how.
1436 If non-null, the result always has boolean type. */
1437
1438static tree
1439canonicalize_bool (tree expr, bool invert)
1440{
1441 if (!expr)
1442 return NULL_TREE;
1443 else if (invert)
1444 {
1445 if (integer_nonzerop (expr))
1446 return boolean_false_node;
1447 else if (integer_zerop (expr))
1448 return boolean_true_node;
1449 else if (TREE_CODE (expr) == SSA_NAME)
1450 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1451 build_int_cst (TREE_TYPE (expr), 0));
1452 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1453 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1454 boolean_type_node,
1455 TREE_OPERAND (expr, 0),
1456 TREE_OPERAND (expr, 1));
1457 else
1458 return NULL_TREE;
1459 }
1460 else
1461 {
1462 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1463 return expr;
1464 if (integer_nonzerop (expr))
1465 return boolean_true_node;
1466 else if (integer_zerop (expr))
1467 return boolean_false_node;
1468 else if (TREE_CODE (expr) == SSA_NAME)
1469 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1470 build_int_cst (TREE_TYPE (expr), 0));
1471 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1472 return fold_build2 (TREE_CODE (expr),
1473 boolean_type_node,
1474 TREE_OPERAND (expr, 0),
1475 TREE_OPERAND (expr, 1));
1476 else
1477 return NULL_TREE;
1478 }
1479}
1480
1481/* Check to see if a boolean expression EXPR is logically equivalent to the
1482 comparison (OP1 CODE OP2). Check for various identities involving
1483 SSA_NAMEs. */
1484
1485static bool
1486same_bool_comparison_p (const_tree expr, enum tree_code code,
1487 const_tree op1, const_tree op2)
1488{
1489 gimple s;
1490
1491 /* The obvious case. */
1492 if (TREE_CODE (expr) == code
1493 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1494 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1495 return true;
1496
1497 /* Check for comparing (name, name != 0) and the case where expr
1498 is an SSA_NAME with a definition matching the comparison. */
1499 if (TREE_CODE (expr) == SSA_NAME
1500 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1501 {
1502 if (operand_equal_p (expr, op1, 0))
1503 return ((code == NE_EXPR && integer_zerop (op2))
1504 || (code == EQ_EXPR && integer_nonzerop (op2)));
1505 s = SSA_NAME_DEF_STMT (expr);
1506 if (is_gimple_assign (s)
1507 && gimple_assign_rhs_code (s) == code
1508 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1509 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1510 return true;
1511 }
1512
1513 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1514 of name is a comparison, recurse. */
1515 if (TREE_CODE (op1) == SSA_NAME
1516 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1517 {
1518 s = SSA_NAME_DEF_STMT (op1);
1519 if (is_gimple_assign (s)
1520 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1521 {
1522 enum tree_code c = gimple_assign_rhs_code (s);
1523 if ((c == NE_EXPR && integer_zerop (op2))
1524 || (c == EQ_EXPR && integer_nonzerop (op2)))
1525 return same_bool_comparison_p (expr, c,
1526 gimple_assign_rhs1 (s),
1527 gimple_assign_rhs2 (s));
1528 if ((c == EQ_EXPR && integer_zerop (op2))
1529 || (c == NE_EXPR && integer_nonzerop (op2)))
1530 return same_bool_comparison_p (expr,
1531 invert_tree_comparison (c, false),
1532 gimple_assign_rhs1 (s),
1533 gimple_assign_rhs2 (s));
1534 }
1535 }
1536 return false;
1537}
1538
1539/* Check to see if two boolean expressions OP1 and OP2 are logically
1540 equivalent. */
1541
1542static bool
1543same_bool_result_p (const_tree op1, const_tree op2)
1544{
1545 /* Simple cases first. */
1546 if (operand_equal_p (op1, op2, 0))
1547 return true;
1548
1549 /* Check the cases where at least one of the operands is a comparison.
1550 These are a bit smarter than operand_equal_p in that they apply some
1551 identifies on SSA_NAMEs. */
1552 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1553 && same_bool_comparison_p (op1, TREE_CODE (op2),
1554 TREE_OPERAND (op2, 0),
1555 TREE_OPERAND (op2, 1)))
1556 return true;
1557 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1558 && same_bool_comparison_p (op2, TREE_CODE (op1),
1559 TREE_OPERAND (op1, 0),
1560 TREE_OPERAND (op1, 1)))
1561 return true;
1562
1563 /* Default case. */
1564 return false;
1565}
1566
1567/* Forward declarations for some mutually recursive functions. */
1568
1569static tree
1570and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1571 enum tree_code code2, tree op2a, tree op2b);
1572static tree
1573and_var_with_comparison (tree var, bool invert,
1574 enum tree_code code2, tree op2a, tree op2b);
1575static tree
1576and_var_with_comparison_1 (gimple stmt,
1577 enum tree_code code2, tree op2a, tree op2b);
1578static tree
1579or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1580 enum tree_code code2, tree op2a, tree op2b);
1581static tree
1582or_var_with_comparison (tree var, bool invert,
1583 enum tree_code code2, tree op2a, tree op2b);
1584static tree
1585or_var_with_comparison_1 (gimple stmt,
1586 enum tree_code code2, tree op2a, tree op2b);
1587
1588/* Helper function for and_comparisons_1: try to simplify the AND of the
1589 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1590 If INVERT is true, invert the value of the VAR before doing the AND.
1591 Return NULL_EXPR if we can't simplify this to a single expression. */
1592
1593static tree
1594and_var_with_comparison (tree var, bool invert,
1595 enum tree_code code2, tree op2a, tree op2b)
1596{
1597 tree t;
1598 gimple stmt = SSA_NAME_DEF_STMT (var);
1599
1600 /* We can only deal with variables whose definitions are assignments. */
1601 if (!is_gimple_assign (stmt))
1602 return NULL_TREE;
1603
1604 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1605 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1606 Then we only have to consider the simpler non-inverted cases. */
1607 if (invert)
1608 t = or_var_with_comparison_1 (stmt,
1609 invert_tree_comparison (code2, false),
1610 op2a, op2b);
1611 else
1612 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1613 return canonicalize_bool (t, invert);
1614}
1615
1616/* Try to simplify the AND of the ssa variable defined by the assignment
1617 STMT with the comparison specified by (OP2A CODE2 OP2B).
1618 Return NULL_EXPR if we can't simplify this to a single expression. */
1619
1620static tree
1621and_var_with_comparison_1 (gimple stmt,
1622 enum tree_code code2, tree op2a, tree op2b)
1623{
1624 tree var = gimple_assign_lhs (stmt);
1625 tree true_test_var = NULL_TREE;
1626 tree false_test_var = NULL_TREE;
1627 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1628
1629 /* Check for identities like (var AND (var == 0)) => false. */
1630 if (TREE_CODE (op2a) == SSA_NAME
1631 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1632 {
1633 if ((code2 == NE_EXPR && integer_zerop (op2b))
1634 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1635 {
1636 true_test_var = op2a;
1637 if (var == true_test_var)
1638 return var;
1639 }
1640 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1641 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1642 {
1643 false_test_var = op2a;
1644 if (var == false_test_var)
1645 return boolean_false_node;
1646 }
1647 }
1648
1649 /* If the definition is a comparison, recurse on it. */
1650 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1651 {
1652 tree t = and_comparisons_1 (innercode,
1653 gimple_assign_rhs1 (stmt),
1654 gimple_assign_rhs2 (stmt),
1655 code2,
1656 op2a,
1657 op2b);
1658 if (t)
1659 return t;
1660 }
1661
1662 /* If the definition is an AND or OR expression, we may be able to
1663 simplify by reassociating. */
eb9820c0
KT
1664 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1665 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
1666 {
1667 tree inner1 = gimple_assign_rhs1 (stmt);
1668 tree inner2 = gimple_assign_rhs2 (stmt);
1669 gimple s;
1670 tree t;
1671 tree partial = NULL_TREE;
eb9820c0 1672 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
1673
1674 /* Check for boolean identities that don't require recursive examination
1675 of inner1/inner2:
1676 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1677 inner1 AND (inner1 OR inner2) => inner1
1678 !inner1 AND (inner1 AND inner2) => false
1679 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1680 Likewise for similar cases involving inner2. */
1681 if (inner1 == true_test_var)
1682 return (is_and ? var : inner1);
1683 else if (inner2 == true_test_var)
1684 return (is_and ? var : inner2);
1685 else if (inner1 == false_test_var)
1686 return (is_and
1687 ? boolean_false_node
1688 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1689 else if (inner2 == false_test_var)
1690 return (is_and
1691 ? boolean_false_node
1692 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1693
1694 /* Next, redistribute/reassociate the AND across the inner tests.
1695 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1696 if (TREE_CODE (inner1) == SSA_NAME
1697 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1698 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1699 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1700 gimple_assign_rhs1 (s),
1701 gimple_assign_rhs2 (s),
1702 code2, op2a, op2b)))
1703 {
1704 /* Handle the AND case, where we are reassociating:
1705 (inner1 AND inner2) AND (op2a code2 op2b)
1706 => (t AND inner2)
1707 If the partial result t is a constant, we win. Otherwise
1708 continue on to try reassociating with the other inner test. */
1709 if (is_and)
1710 {
1711 if (integer_onep (t))
1712 return inner2;
1713 else if (integer_zerop (t))
1714 return boolean_false_node;
1715 }
1716
1717 /* Handle the OR case, where we are redistributing:
1718 (inner1 OR inner2) AND (op2a code2 op2b)
1719 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
1720 else if (integer_onep (t))
1721 return boolean_true_node;
1722
1723 /* Save partial result for later. */
1724 partial = t;
e89065a1
SL
1725 }
1726
1727 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1728 if (TREE_CODE (inner2) == SSA_NAME
1729 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1730 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1731 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1732 gimple_assign_rhs1 (s),
1733 gimple_assign_rhs2 (s),
1734 code2, op2a, op2b)))
1735 {
1736 /* Handle the AND case, where we are reassociating:
1737 (inner1 AND inner2) AND (op2a code2 op2b)
1738 => (inner1 AND t) */
1739 if (is_and)
1740 {
1741 if (integer_onep (t))
1742 return inner1;
1743 else if (integer_zerop (t))
1744 return boolean_false_node;
8236c8eb
JJ
1745 /* If both are the same, we can apply the identity
1746 (x AND x) == x. */
1747 else if (partial && same_bool_result_p (t, partial))
1748 return t;
e89065a1
SL
1749 }
1750
1751 /* Handle the OR case. where we are redistributing:
1752 (inner1 OR inner2) AND (op2a code2 op2b)
1753 => (t OR (inner1 AND (op2a code2 op2b)))
1754 => (t OR partial) */
1755 else
1756 {
1757 if (integer_onep (t))
1758 return boolean_true_node;
1759 else if (partial)
1760 {
1761 /* We already got a simplification for the other
1762 operand to the redistributed OR expression. The
1763 interesting case is when at least one is false.
1764 Or, if both are the same, we can apply the identity
1765 (x OR x) == x. */
1766 if (integer_zerop (partial))
1767 return t;
1768 else if (integer_zerop (t))
1769 return partial;
1770 else if (same_bool_result_p (t, partial))
1771 return t;
1772 }
1773 }
1774 }
1775 }
1776 return NULL_TREE;
1777}
1778
1779/* Try to simplify the AND of two comparisons defined by
1780 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1781 If this can be done without constructing an intermediate value,
1782 return the resulting tree; otherwise NULL_TREE is returned.
1783 This function is deliberately asymmetric as it recurses on SSA_DEFs
1784 in the first comparison but not the second. */
1785
1786static tree
1787and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1788 enum tree_code code2, tree op2a, tree op2b)
1789{
ae22ac3c 1790 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 1791
e89065a1
SL
1792 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1793 if (operand_equal_p (op1a, op2a, 0)
1794 && operand_equal_p (op1b, op2b, 0))
1795 {
eb9820c0 1796 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1797 tree t = combine_comparisons (UNKNOWN_LOCATION,
1798 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 1799 truth_type, op1a, op1b);
e89065a1
SL
1800 if (t)
1801 return t;
1802 }
1803
1804 /* Likewise the swapped case of the above. */
1805 if (operand_equal_p (op1a, op2b, 0)
1806 && operand_equal_p (op1b, op2a, 0))
1807 {
eb9820c0 1808 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1809 tree t = combine_comparisons (UNKNOWN_LOCATION,
1810 TRUTH_ANDIF_EXPR, code1,
1811 swap_tree_comparison (code2),
31ed6226 1812 truth_type, op1a, op1b);
e89065a1
SL
1813 if (t)
1814 return t;
1815 }
1816
1817 /* If both comparisons are of the same value against constants, we might
1818 be able to merge them. */
1819 if (operand_equal_p (op1a, op2a, 0)
1820 && TREE_CODE (op1b) == INTEGER_CST
1821 && TREE_CODE (op2b) == INTEGER_CST)
1822 {
1823 int cmp = tree_int_cst_compare (op1b, op2b);
1824
1825 /* If we have (op1a == op1b), we should either be able to
1826 return that or FALSE, depending on whether the constant op1b
1827 also satisfies the other comparison against op2b. */
1828 if (code1 == EQ_EXPR)
1829 {
1830 bool done = true;
1831 bool val;
1832 switch (code2)
1833 {
1834 case EQ_EXPR: val = (cmp == 0); break;
1835 case NE_EXPR: val = (cmp != 0); break;
1836 case LT_EXPR: val = (cmp < 0); break;
1837 case GT_EXPR: val = (cmp > 0); break;
1838 case LE_EXPR: val = (cmp <= 0); break;
1839 case GE_EXPR: val = (cmp >= 0); break;
1840 default: done = false;
1841 }
1842 if (done)
1843 {
1844 if (val)
1845 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1846 else
1847 return boolean_false_node;
1848 }
1849 }
1850 /* Likewise if the second comparison is an == comparison. */
1851 else if (code2 == EQ_EXPR)
1852 {
1853 bool done = true;
1854 bool val;
1855 switch (code1)
1856 {
1857 case EQ_EXPR: val = (cmp == 0); break;
1858 case NE_EXPR: val = (cmp != 0); break;
1859 case LT_EXPR: val = (cmp > 0); break;
1860 case GT_EXPR: val = (cmp < 0); break;
1861 case LE_EXPR: val = (cmp >= 0); break;
1862 case GE_EXPR: val = (cmp <= 0); break;
1863 default: done = false;
1864 }
1865 if (done)
1866 {
1867 if (val)
1868 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1869 else
1870 return boolean_false_node;
1871 }
1872 }
1873
1874 /* Same business with inequality tests. */
1875 else if (code1 == NE_EXPR)
1876 {
1877 bool val;
1878 switch (code2)
1879 {
1880 case EQ_EXPR: val = (cmp != 0); break;
1881 case NE_EXPR: val = (cmp == 0); break;
1882 case LT_EXPR: val = (cmp >= 0); break;
1883 case GT_EXPR: val = (cmp <= 0); break;
1884 case LE_EXPR: val = (cmp > 0); break;
1885 case GE_EXPR: val = (cmp < 0); break;
1886 default:
1887 val = false;
1888 }
1889 if (val)
1890 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1891 }
1892 else if (code2 == NE_EXPR)
1893 {
1894 bool val;
1895 switch (code1)
1896 {
1897 case EQ_EXPR: val = (cmp == 0); break;
1898 case NE_EXPR: val = (cmp != 0); break;
1899 case LT_EXPR: val = (cmp <= 0); break;
1900 case GT_EXPR: val = (cmp >= 0); break;
1901 case LE_EXPR: val = (cmp < 0); break;
1902 case GE_EXPR: val = (cmp > 0); break;
1903 default:
1904 val = false;
1905 }
1906 if (val)
1907 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1908 }
1909
1910 /* Chose the more restrictive of two < or <= comparisons. */
1911 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1912 && (code2 == LT_EXPR || code2 == LE_EXPR))
1913 {
1914 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1915 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1916 else
1917 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1918 }
1919
1920 /* Likewise chose the more restrictive of two > or >= comparisons. */
1921 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1922 && (code2 == GT_EXPR || code2 == GE_EXPR))
1923 {
1924 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1925 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1926 else
1927 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1928 }
1929
1930 /* Check for singleton ranges. */
1931 else if (cmp == 0
1932 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1933 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1934 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1935
1936 /* Check for disjoint ranges. */
1937 else if (cmp <= 0
1938 && (code1 == LT_EXPR || code1 == LE_EXPR)
1939 && (code2 == GT_EXPR || code2 == GE_EXPR))
1940 return boolean_false_node;
1941 else if (cmp >= 0
1942 && (code1 == GT_EXPR || code1 == GE_EXPR)
1943 && (code2 == LT_EXPR || code2 == LE_EXPR))
1944 return boolean_false_node;
1945 }
1946
1947 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1948 NAME's definition is a truth value. See if there are any simplifications
1949 that can be done against the NAME's definition. */
1950 if (TREE_CODE (op1a) == SSA_NAME
1951 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1952 && (integer_zerop (op1b) || integer_onep (op1b)))
1953 {
1954 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1955 || (code1 == NE_EXPR && integer_onep (op1b)));
1956 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1957 switch (gimple_code (stmt))
1958 {
1959 case GIMPLE_ASSIGN:
1960 /* Try to simplify by copy-propagating the definition. */
1961 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1962
1963 case GIMPLE_PHI:
1964 /* If every argument to the PHI produces the same result when
1965 ANDed with the second comparison, we win.
1966 Do not do this unless the type is bool since we need a bool
1967 result here anyway. */
1968 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1969 {
1970 tree result = NULL_TREE;
1971 unsigned i;
1972 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1973 {
1974 tree arg = gimple_phi_arg_def (stmt, i);
1975
1976 /* If this PHI has itself as an argument, ignore it.
1977 If all the other args produce the same result,
1978 we're still OK. */
1979 if (arg == gimple_phi_result (stmt))
1980 continue;
1981 else if (TREE_CODE (arg) == INTEGER_CST)
1982 {
1983 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1984 {
1985 if (!result)
1986 result = boolean_false_node;
1987 else if (!integer_zerop (result))
1988 return NULL_TREE;
1989 }
1990 else if (!result)
1991 result = fold_build2 (code2, boolean_type_node,
1992 op2a, op2b);
1993 else if (!same_bool_comparison_p (result,
1994 code2, op2a, op2b))
1995 return NULL_TREE;
1996 }
0e8b84ec
JJ
1997 else if (TREE_CODE (arg) == SSA_NAME
1998 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 1999 {
6c66f733
JJ
2000 tree temp;
2001 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2002 /* In simple cases we can look through PHI nodes,
2003 but we have to be careful with loops.
2004 See PR49073. */
2005 if (! dom_info_available_p (CDI_DOMINATORS)
2006 || gimple_bb (def_stmt) == gimple_bb (stmt)
2007 || dominated_by_p (CDI_DOMINATORS,
2008 gimple_bb (def_stmt),
2009 gimple_bb (stmt)))
2010 return NULL_TREE;
2011 temp = and_var_with_comparison (arg, invert, code2,
2012 op2a, op2b);
e89065a1
SL
2013 if (!temp)
2014 return NULL_TREE;
2015 else if (!result)
2016 result = temp;
2017 else if (!same_bool_result_p (result, temp))
2018 return NULL_TREE;
2019 }
2020 else
2021 return NULL_TREE;
2022 }
2023 return result;
2024 }
2025
2026 default:
2027 break;
2028 }
2029 }
2030 return NULL_TREE;
2031}
2032
2033/* Try to simplify the AND of two comparisons, specified by
2034 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2035 If this can be simplified to a single expression (without requiring
2036 introducing more SSA variables to hold intermediate values),
2037 return the resulting tree. Otherwise return NULL_TREE.
2038 If the result expression is non-null, it has boolean type. */
2039
2040tree
2041maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
2042 enum tree_code code2, tree op2a, tree op2b)
2043{
2044 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2045 if (t)
2046 return t;
2047 else
2048 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2049}
2050
2051/* Helper function for or_comparisons_1: try to simplify the OR of the
2052 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2053 If INVERT is true, invert the value of VAR before doing the OR.
2054 Return NULL_EXPR if we can't simplify this to a single expression. */
2055
2056static tree
2057or_var_with_comparison (tree var, bool invert,
2058 enum tree_code code2, tree op2a, tree op2b)
2059{
2060 tree t;
2061 gimple stmt = SSA_NAME_DEF_STMT (var);
2062
2063 /* We can only deal with variables whose definitions are assignments. */
2064 if (!is_gimple_assign (stmt))
2065 return NULL_TREE;
2066
2067 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2068 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2069 Then we only have to consider the simpler non-inverted cases. */
2070 if (invert)
2071 t = and_var_with_comparison_1 (stmt,
2072 invert_tree_comparison (code2, false),
2073 op2a, op2b);
2074 else
2075 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2076 return canonicalize_bool (t, invert);
2077}
2078
2079/* Try to simplify the OR of the ssa variable defined by the assignment
2080 STMT with the comparison specified by (OP2A CODE2 OP2B).
2081 Return NULL_EXPR if we can't simplify this to a single expression. */
2082
2083static tree
2084or_var_with_comparison_1 (gimple stmt,
2085 enum tree_code code2, tree op2a, tree op2b)
2086{
2087 tree var = gimple_assign_lhs (stmt);
2088 tree true_test_var = NULL_TREE;
2089 tree false_test_var = NULL_TREE;
2090 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2091
2092 /* Check for identities like (var OR (var != 0)) => true . */
2093 if (TREE_CODE (op2a) == SSA_NAME
2094 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2095 {
2096 if ((code2 == NE_EXPR && integer_zerop (op2b))
2097 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2098 {
2099 true_test_var = op2a;
2100 if (var == true_test_var)
2101 return var;
2102 }
2103 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2104 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2105 {
2106 false_test_var = op2a;
2107 if (var == false_test_var)
2108 return boolean_true_node;
2109 }
2110 }
2111
2112 /* If the definition is a comparison, recurse on it. */
2113 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2114 {
2115 tree t = or_comparisons_1 (innercode,
2116 gimple_assign_rhs1 (stmt),
2117 gimple_assign_rhs2 (stmt),
2118 code2,
2119 op2a,
2120 op2b);
2121 if (t)
2122 return t;
2123 }
2124
2125 /* If the definition is an AND or OR expression, we may be able to
2126 simplify by reassociating. */
eb9820c0
KT
2127 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2128 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
2129 {
2130 tree inner1 = gimple_assign_rhs1 (stmt);
2131 tree inner2 = gimple_assign_rhs2 (stmt);
2132 gimple s;
2133 tree t;
2134 tree partial = NULL_TREE;
eb9820c0 2135 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
2136
2137 /* Check for boolean identities that don't require recursive examination
2138 of inner1/inner2:
2139 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2140 inner1 OR (inner1 AND inner2) => inner1
2141 !inner1 OR (inner1 OR inner2) => true
2142 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2143 */
2144 if (inner1 == true_test_var)
2145 return (is_or ? var : inner1);
2146 else if (inner2 == true_test_var)
2147 return (is_or ? var : inner2);
2148 else if (inner1 == false_test_var)
2149 return (is_or
2150 ? boolean_true_node
2151 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2152 else if (inner2 == false_test_var)
2153 return (is_or
2154 ? boolean_true_node
2155 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2156
2157 /* Next, redistribute/reassociate the OR across the inner tests.
2158 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2159 if (TREE_CODE (inner1) == SSA_NAME
2160 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2161 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2162 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2163 gimple_assign_rhs1 (s),
2164 gimple_assign_rhs2 (s),
2165 code2, op2a, op2b)))
2166 {
2167 /* Handle the OR case, where we are reassociating:
2168 (inner1 OR inner2) OR (op2a code2 op2b)
2169 => (t OR inner2)
2170 If the partial result t is a constant, we win. Otherwise
2171 continue on to try reassociating with the other inner test. */
8236c8eb 2172 if (is_or)
e89065a1
SL
2173 {
2174 if (integer_onep (t))
2175 return boolean_true_node;
2176 else if (integer_zerop (t))
2177 return inner2;
2178 }
2179
2180 /* Handle the AND case, where we are redistributing:
2181 (inner1 AND inner2) OR (op2a code2 op2b)
2182 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
2183 else if (integer_zerop (t))
2184 return boolean_false_node;
2185
2186 /* Save partial result for later. */
2187 partial = t;
e89065a1
SL
2188 }
2189
2190 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2191 if (TREE_CODE (inner2) == SSA_NAME
2192 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2193 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2194 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2195 gimple_assign_rhs1 (s),
2196 gimple_assign_rhs2 (s),
2197 code2, op2a, op2b)))
2198 {
2199 /* Handle the OR case, where we are reassociating:
2200 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
2201 => (inner1 OR t)
2202 => (t OR partial) */
2203 if (is_or)
e89065a1
SL
2204 {
2205 if (integer_zerop (t))
2206 return inner1;
2207 else if (integer_onep (t))
2208 return boolean_true_node;
8236c8eb
JJ
2209 /* If both are the same, we can apply the identity
2210 (x OR x) == x. */
2211 else if (partial && same_bool_result_p (t, partial))
2212 return t;
e89065a1
SL
2213 }
2214
2215 /* Handle the AND case, where we are redistributing:
2216 (inner1 AND inner2) OR (op2a code2 op2b)
2217 => (t AND (inner1 OR (op2a code2 op2b)))
2218 => (t AND partial) */
2219 else
2220 {
2221 if (integer_zerop (t))
2222 return boolean_false_node;
2223 else if (partial)
2224 {
2225 /* We already got a simplification for the other
2226 operand to the redistributed AND expression. The
2227 interesting case is when at least one is true.
2228 Or, if both are the same, we can apply the identity
8236c8eb 2229 (x AND x) == x. */
e89065a1
SL
2230 if (integer_onep (partial))
2231 return t;
2232 else if (integer_onep (t))
2233 return partial;
2234 else if (same_bool_result_p (t, partial))
8236c8eb 2235 return t;
e89065a1
SL
2236 }
2237 }
2238 }
2239 }
2240 return NULL_TREE;
2241}
2242
2243/* Try to simplify the OR of two comparisons defined by
2244 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2245 If this can be done without constructing an intermediate value,
2246 return the resulting tree; otherwise NULL_TREE is returned.
2247 This function is deliberately asymmetric as it recurses on SSA_DEFs
2248 in the first comparison but not the second. */
2249
2250static tree
2251or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2252 enum tree_code code2, tree op2a, tree op2b)
2253{
ae22ac3c 2254 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 2255
e89065a1
SL
2256 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2257 if (operand_equal_p (op1a, op2a, 0)
2258 && operand_equal_p (op1b, op2b, 0))
2259 {
eb9820c0 2260 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2261 tree t = combine_comparisons (UNKNOWN_LOCATION,
2262 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 2263 truth_type, op1a, op1b);
e89065a1
SL
2264 if (t)
2265 return t;
2266 }
2267
2268 /* Likewise the swapped case of the above. */
2269 if (operand_equal_p (op1a, op2b, 0)
2270 && operand_equal_p (op1b, op2a, 0))
2271 {
eb9820c0 2272 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2273 tree t = combine_comparisons (UNKNOWN_LOCATION,
2274 TRUTH_ORIF_EXPR, code1,
2275 swap_tree_comparison (code2),
31ed6226 2276 truth_type, op1a, op1b);
e89065a1
SL
2277 if (t)
2278 return t;
2279 }
2280
2281 /* If both comparisons are of the same value against constants, we might
2282 be able to merge them. */
2283 if (operand_equal_p (op1a, op2a, 0)
2284 && TREE_CODE (op1b) == INTEGER_CST
2285 && TREE_CODE (op2b) == INTEGER_CST)
2286 {
2287 int cmp = tree_int_cst_compare (op1b, op2b);
2288
2289 /* If we have (op1a != op1b), we should either be able to
2290 return that or TRUE, depending on whether the constant op1b
2291 also satisfies the other comparison against op2b. */
2292 if (code1 == NE_EXPR)
2293 {
2294 bool done = true;
2295 bool val;
2296 switch (code2)
2297 {
2298 case EQ_EXPR: val = (cmp == 0); break;
2299 case NE_EXPR: val = (cmp != 0); break;
2300 case LT_EXPR: val = (cmp < 0); break;
2301 case GT_EXPR: val = (cmp > 0); break;
2302 case LE_EXPR: val = (cmp <= 0); break;
2303 case GE_EXPR: val = (cmp >= 0); break;
2304 default: done = false;
2305 }
2306 if (done)
2307 {
2308 if (val)
2309 return boolean_true_node;
2310 else
2311 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2312 }
2313 }
2314 /* Likewise if the second comparison is a != comparison. */
2315 else if (code2 == NE_EXPR)
2316 {
2317 bool done = true;
2318 bool val;
2319 switch (code1)
2320 {
2321 case EQ_EXPR: val = (cmp == 0); break;
2322 case NE_EXPR: val = (cmp != 0); break;
2323 case LT_EXPR: val = (cmp > 0); break;
2324 case GT_EXPR: val = (cmp < 0); break;
2325 case LE_EXPR: val = (cmp >= 0); break;
2326 case GE_EXPR: val = (cmp <= 0); break;
2327 default: done = false;
2328 }
2329 if (done)
2330 {
2331 if (val)
2332 return boolean_true_node;
2333 else
2334 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2335 }
2336 }
2337
2338 /* See if an equality test is redundant with the other comparison. */
2339 else if (code1 == EQ_EXPR)
2340 {
2341 bool val;
2342 switch (code2)
2343 {
2344 case EQ_EXPR: val = (cmp == 0); break;
2345 case NE_EXPR: val = (cmp != 0); break;
2346 case LT_EXPR: val = (cmp < 0); break;
2347 case GT_EXPR: val = (cmp > 0); break;
2348 case LE_EXPR: val = (cmp <= 0); break;
2349 case GE_EXPR: val = (cmp >= 0); break;
2350 default:
2351 val = false;
2352 }
2353 if (val)
2354 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2355 }
2356 else if (code2 == EQ_EXPR)
2357 {
2358 bool val;
2359 switch (code1)
2360 {
2361 case EQ_EXPR: val = (cmp == 0); break;
2362 case NE_EXPR: val = (cmp != 0); break;
2363 case LT_EXPR: val = (cmp > 0); break;
2364 case GT_EXPR: val = (cmp < 0); break;
2365 case LE_EXPR: val = (cmp >= 0); break;
2366 case GE_EXPR: val = (cmp <= 0); break;
2367 default:
2368 val = false;
2369 }
2370 if (val)
2371 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2372 }
2373
2374 /* Chose the less restrictive of two < or <= comparisons. */
2375 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2376 && (code2 == LT_EXPR || code2 == LE_EXPR))
2377 {
2378 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2379 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2380 else
2381 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2382 }
2383
2384 /* Likewise chose the less restrictive of two > or >= comparisons. */
2385 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2386 && (code2 == GT_EXPR || code2 == GE_EXPR))
2387 {
2388 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2389 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2390 else
2391 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2392 }
2393
2394 /* Check for singleton ranges. */
2395 else if (cmp == 0
2396 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2397 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2398 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2399
2400 /* Check for less/greater pairs that don't restrict the range at all. */
2401 else if (cmp >= 0
2402 && (code1 == LT_EXPR || code1 == LE_EXPR)
2403 && (code2 == GT_EXPR || code2 == GE_EXPR))
2404 return boolean_true_node;
2405 else if (cmp <= 0
2406 && (code1 == GT_EXPR || code1 == GE_EXPR)
2407 && (code2 == LT_EXPR || code2 == LE_EXPR))
2408 return boolean_true_node;
2409 }
2410
2411 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2412 NAME's definition is a truth value. See if there are any simplifications
2413 that can be done against the NAME's definition. */
2414 if (TREE_CODE (op1a) == SSA_NAME
2415 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2416 && (integer_zerop (op1b) || integer_onep (op1b)))
2417 {
2418 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2419 || (code1 == NE_EXPR && integer_onep (op1b)));
2420 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2421 switch (gimple_code (stmt))
2422 {
2423 case GIMPLE_ASSIGN:
2424 /* Try to simplify by copy-propagating the definition. */
2425 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2426
2427 case GIMPLE_PHI:
2428 /* If every argument to the PHI produces the same result when
2429 ORed with the second comparison, we win.
2430 Do not do this unless the type is bool since we need a bool
2431 result here anyway. */
2432 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2433 {
2434 tree result = NULL_TREE;
2435 unsigned i;
2436 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2437 {
2438 tree arg = gimple_phi_arg_def (stmt, i);
2439
2440 /* If this PHI has itself as an argument, ignore it.
2441 If all the other args produce the same result,
2442 we're still OK. */
2443 if (arg == gimple_phi_result (stmt))
2444 continue;
2445 else if (TREE_CODE (arg) == INTEGER_CST)
2446 {
2447 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2448 {
2449 if (!result)
2450 result = boolean_true_node;
2451 else if (!integer_onep (result))
2452 return NULL_TREE;
2453 }
2454 else if (!result)
2455 result = fold_build2 (code2, boolean_type_node,
2456 op2a, op2b);
2457 else if (!same_bool_comparison_p (result,
2458 code2, op2a, op2b))
2459 return NULL_TREE;
2460 }
0e8b84ec
JJ
2461 else if (TREE_CODE (arg) == SSA_NAME
2462 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 2463 {
6c66f733
JJ
2464 tree temp;
2465 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2466 /* In simple cases we can look through PHI nodes,
2467 but we have to be careful with loops.
2468 See PR49073. */
2469 if (! dom_info_available_p (CDI_DOMINATORS)
2470 || gimple_bb (def_stmt) == gimple_bb (stmt)
2471 || dominated_by_p (CDI_DOMINATORS,
2472 gimple_bb (def_stmt),
2473 gimple_bb (stmt)))
2474 return NULL_TREE;
2475 temp = or_var_with_comparison (arg, invert, code2,
2476 op2a, op2b);
e89065a1
SL
2477 if (!temp)
2478 return NULL_TREE;
2479 else if (!result)
2480 result = temp;
2481 else if (!same_bool_result_p (result, temp))
2482 return NULL_TREE;
2483 }
2484 else
2485 return NULL_TREE;
2486 }
2487 return result;
2488 }
2489
2490 default:
2491 break;
2492 }
2493 }
2494 return NULL_TREE;
2495}
2496
2497/* Try to simplify the OR of two comparisons, specified by
2498 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2499 If this can be simplified to a single expression (without requiring
2500 introducing more SSA variables to hold intermediate values),
2501 return the resulting tree. Otherwise return NULL_TREE.
2502 If the result expression is non-null, it has boolean type. */
2503
2504tree
2505maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2506 enum tree_code code2, tree op2a, tree op2b)
2507{
2508 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2509 if (t)
2510 return t;
2511 else
2512 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2513}
cfef45c8
RG
2514
2515
2516/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2517
2518 Either NULL_TREE, a simplified but non-constant or a constant
2519 is returned.
2520
2521 ??? This should go into a gimple-fold-inline.h file to be eventually
2522 privatized with the single valueize function used in the various TUs
2523 to avoid the indirect function call overhead. */
2524
2525tree
2526gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2527{
2528 location_t loc = gimple_location (stmt);
2529 switch (gimple_code (stmt))
2530 {
2531 case GIMPLE_ASSIGN:
2532 {
2533 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2534
2535 switch (get_gimple_rhs_class (subcode))
2536 {
2537 case GIMPLE_SINGLE_RHS:
2538 {
2539 tree rhs = gimple_assign_rhs1 (stmt);
2540 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2541
2542 if (TREE_CODE (rhs) == SSA_NAME)
2543 {
2544 /* If the RHS is an SSA_NAME, return its known constant value,
2545 if any. */
2546 return (*valueize) (rhs);
2547 }
2548 /* Handle propagating invariant addresses into address
2549 operations. */
2550 else if (TREE_CODE (rhs) == ADDR_EXPR
2551 && !is_gimple_min_invariant (rhs))
2552 {
d25c4172 2553 HOST_WIDE_INT offset = 0;
cfef45c8
RG
2554 tree base;
2555 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2556 &offset,
2557 valueize);
2558 if (base
2559 && (CONSTANT_CLASS_P (base)
2560 || decl_address_invariant_p (base)))
2561 return build_invariant_address (TREE_TYPE (rhs),
2562 base, offset);
2563 }
2564 else if (TREE_CODE (rhs) == CONSTRUCTOR
2565 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2566 && (CONSTRUCTOR_NELTS (rhs)
2567 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2568 {
2569 unsigned i;
d2a12ae7 2570 tree val, *vec;
cfef45c8 2571
d2a12ae7
RG
2572 vec = XALLOCAVEC (tree,
2573 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
2574 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2575 {
2576 val = (*valueize) (val);
2577 if (TREE_CODE (val) == INTEGER_CST
2578 || TREE_CODE (val) == REAL_CST
2579 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 2580 vec[i] = val;
cfef45c8
RG
2581 else
2582 return NULL_TREE;
2583 }
2584
d2a12ae7 2585 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 2586 }
bdf37f7a
JH
2587 if (subcode == OBJ_TYPE_REF)
2588 {
2589 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
2590 /* If callee is constant, we can fold away the wrapper. */
2591 if (is_gimple_min_invariant (val))
2592 return val;
2593 }
cfef45c8
RG
2594
2595 if (kind == tcc_reference)
2596 {
2597 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2598 || TREE_CODE (rhs) == REALPART_EXPR
2599 || TREE_CODE (rhs) == IMAGPART_EXPR)
2600 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2601 {
2602 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2603 return fold_unary_loc (EXPR_LOCATION (rhs),
2604 TREE_CODE (rhs),
2605 TREE_TYPE (rhs), val);
2606 }
2607 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2608 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2609 {
2610 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2611 return fold_ternary_loc (EXPR_LOCATION (rhs),
2612 TREE_CODE (rhs),
2613 TREE_TYPE (rhs), val,
2614 TREE_OPERAND (rhs, 1),
2615 TREE_OPERAND (rhs, 2));
2616 }
2617 else if (TREE_CODE (rhs) == MEM_REF
2618 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2619 {
2620 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2621 if (TREE_CODE (val) == ADDR_EXPR
2622 && is_gimple_min_invariant (val))
2623 {
2624 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2625 unshare_expr (val),
2626 TREE_OPERAND (rhs, 1));
2627 if (tem)
2628 rhs = tem;
2629 }
2630 }
2631 return fold_const_aggregate_ref_1 (rhs, valueize);
2632 }
2633 else if (kind == tcc_declaration)
2634 return get_symbol_constant_value (rhs);
2635 return rhs;
2636 }
2637
2638 case GIMPLE_UNARY_RHS:
2639 {
2640 /* Handle unary operators that can appear in GIMPLE form.
2641 Note that we know the single operand must be a constant,
2642 so this should almost always return a simplified RHS. */
315f5f1b 2643 tree lhs = gimple_assign_lhs (stmt);
cfef45c8
RG
2644 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2645
2646 /* Conversions are useless for CCP purposes if they are
2647 value-preserving. Thus the restrictions that
315f5f1b
RG
2648 useless_type_conversion_p places for restrict qualification
2649 of pointer types should not apply here.
2650 Substitution later will only substitute to allowed places. */
cfef45c8
RG
2651 if (CONVERT_EXPR_CODE_P (subcode)
2652 && POINTER_TYPE_P (TREE_TYPE (lhs))
315f5f1b 2653 && POINTER_TYPE_P (TREE_TYPE (op0))
8f420307
EB
2654 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2655 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2656 && TYPE_MODE (TREE_TYPE (lhs))
2657 == TYPE_MODE (TREE_TYPE (op0)))
315f5f1b 2658 return op0;
cfef45c8
RG
2659
2660 return
2661 fold_unary_ignore_overflow_loc (loc, subcode,
2662 gimple_expr_type (stmt), op0);
2663 }
2664
2665 case GIMPLE_BINARY_RHS:
2666 {
2667 /* Handle binary operators that can appear in GIMPLE form. */
2668 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2669 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2670
2671 /* Translate &x + CST into an invariant form suitable for
2672 further propagation. */
2673 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2674 && TREE_CODE (op0) == ADDR_EXPR
2675 && TREE_CODE (op1) == INTEGER_CST)
2676 {
2677 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
2678 return build_fold_addr_expr_loc
2679 (loc,
2680 fold_build2 (MEM_REF,
cfef45c8
RG
2681 TREE_TYPE (TREE_TYPE (op0)),
2682 unshare_expr (op0), off));
2683 }
2684
2685 return fold_binary_loc (loc, subcode,
2686 gimple_expr_type (stmt), op0, op1);
2687 }
2688
2689 case GIMPLE_TERNARY_RHS:
2690 {
2691 /* Handle ternary operators that can appear in GIMPLE form. */
2692 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2693 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2694 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2695
d3878abf
RG
2696 /* Fold embedded expressions in ternary codes. */
2697 if ((subcode == COND_EXPR
2698 || subcode == VEC_COND_EXPR)
2699 && COMPARISON_CLASS_P (op0))
2700 {
2701 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2702 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2703 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2704 TREE_TYPE (op0), op00, op01);
2705 if (tem)
2706 op0 = tem;
2707 }
2708
cfef45c8
RG
2709 return fold_ternary_loc (loc, subcode,
2710 gimple_expr_type (stmt), op0, op1, op2);
2711 }
2712
2713 default:
2714 gcc_unreachable ();
2715 }
2716 }
2717
2718 case GIMPLE_CALL:
2719 {
25583c4f
RS
2720 tree fn;
2721
2722 if (gimple_call_internal_p (stmt))
31e071ae
MP
2723 {
2724 enum tree_code subcode = ERROR_MARK;
2725 switch (gimple_call_internal_fn (stmt))
2726 {
2727 case IFN_UBSAN_CHECK_ADD:
2728 subcode = PLUS_EXPR;
2729 break;
2730 case IFN_UBSAN_CHECK_SUB:
2731 subcode = MINUS_EXPR;
2732 break;
2733 case IFN_UBSAN_CHECK_MUL:
2734 subcode = MULT_EXPR;
2735 break;
2736 default:
2737 return NULL_TREE;
2738 }
2739 tree op0 = (*valueize) (gimple_call_arg (stmt, 0));
2740 tree op1 = (*valueize) (gimple_call_arg (stmt, 1));
2741
2742 if (TREE_CODE (op0) != INTEGER_CST
2743 || TREE_CODE (op1) != INTEGER_CST)
2744 return NULL_TREE;
2745 tree res = fold_binary_loc (loc, subcode,
2746 TREE_TYPE (gimple_call_arg (stmt, 0)),
2747 op0, op1);
2748 if (res
2749 && TREE_CODE (res) == INTEGER_CST
2750 && !TREE_OVERFLOW (res))
2751 return res;
2752 return NULL_TREE;
2753 }
25583c4f
RS
2754
2755 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
2756 if (TREE_CODE (fn) == ADDR_EXPR
2757 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
2758 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
2759 && gimple_builtin_call_types_compatible_p (stmt,
2760 TREE_OPERAND (fn, 0)))
cfef45c8
RG
2761 {
2762 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2763 tree call, retval;
2764 unsigned i;
2765 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2766 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2767 call = build_call_array_loc (loc,
2768 gimple_call_return_type (stmt),
2769 fn, gimple_call_num_args (stmt), args);
2770 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2771 if (retval)
5c944c6c
RB
2772 {
2773 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2774 STRIP_NOPS (retval);
2775 retval = fold_convert (gimple_call_return_type (stmt), retval);
2776 }
cfef45c8
RG
2777 return retval;
2778 }
2779 return NULL_TREE;
2780 }
2781
2782 default:
2783 return NULL_TREE;
2784 }
2785}
2786
2787/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2788 Returns NULL_TREE if folding to a constant is not possible, otherwise
2789 returns a constant according to is_gimple_min_invariant. */
2790
2791tree
2792gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2793{
2794 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2795 if (res && is_gimple_min_invariant (res))
2796 return res;
2797 return NULL_TREE;
2798}
2799
2800
2801/* The following set of functions are supposed to fold references using
2802 their constant initializers. */
2803
2804static tree fold_ctor_reference (tree type, tree ctor,
2805 unsigned HOST_WIDE_INT offset,
c44c2088 2806 unsigned HOST_WIDE_INT size, tree);
cfef45c8
RG
2807
2808/* See if we can find constructor defining value of BASE.
2809 When we know the consructor with constant offset (such as
2810 base is array[40] and we do know constructor of array), then
2811 BIT_OFFSET is adjusted accordingly.
2812
2813 As a special case, return error_mark_node when constructor
2814 is not explicitly available, but it is known to be zero
2815 such as 'static const int a;'. */
2816static tree
2817get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2818 tree (*valueize)(tree))
2819{
2820 HOST_WIDE_INT bit_offset2, size, max_size;
2821 if (TREE_CODE (base) == MEM_REF)
2822 {
2823 if (!integer_zerop (TREE_OPERAND (base, 1)))
2824 {
9541ffee 2825 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8
RG
2826 return NULL_TREE;
2827 *bit_offset += (mem_ref_offset (base).low
2828 * BITS_PER_UNIT);
2829 }
2830
2831 if (valueize
2832 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2833 base = valueize (TREE_OPERAND (base, 0));
2834 if (!base || TREE_CODE (base) != ADDR_EXPR)
2835 return NULL_TREE;
2836 base = TREE_OPERAND (base, 0);
2837 }
2838
2839 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2840 DECL_INITIAL. If BASE is a nested reference into another
2841 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2842 the inner reference. */
2843 switch (TREE_CODE (base))
2844 {
2845 case VAR_DECL:
cfef45c8 2846 case CONST_DECL:
6a6dac52
JH
2847 {
2848 tree init = ctor_for_folding (base);
2849
688010ba 2850 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
2851 NULL means unknown, while error_mark_node is 0. */
2852 if (init == error_mark_node)
2853 return NULL_TREE;
2854 if (!init)
2855 return error_mark_node;
2856 return init;
2857 }
cfef45c8
RG
2858
2859 case ARRAY_REF:
2860 case COMPONENT_REF:
2861 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2862 if (max_size == -1 || size != max_size)
2863 return NULL_TREE;
2864 *bit_offset += bit_offset2;
2865 return get_base_constructor (base, bit_offset, valueize);
2866
2867 case STRING_CST:
2868 case CONSTRUCTOR:
2869 return base;
2870
2871 default:
2872 return NULL_TREE;
2873 }
2874}
2875
2876/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2877 to the memory at bit OFFSET.
2878
2879 We do only simple job of folding byte accesses. */
2880
2881static tree
2882fold_string_cst_ctor_reference (tree type, tree ctor,
2883 unsigned HOST_WIDE_INT offset,
2884 unsigned HOST_WIDE_INT size)
2885{
2886 if (INTEGRAL_TYPE_P (type)
2887 && (TYPE_MODE (type)
2888 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2889 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2890 == MODE_INT)
2891 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2892 && size == BITS_PER_UNIT
2893 && !(offset % BITS_PER_UNIT))
2894 {
2895 offset /= BITS_PER_UNIT;
2896 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2897 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2898 [offset]));
2899 /* Folding
2900 const char a[20]="hello";
2901 return a[10];
2902
2903 might lead to offset greater than string length. In this case we
2904 know value is either initialized to 0 or out of bounds. Return 0
2905 in both cases. */
2906 return build_zero_cst (type);
2907 }
2908 return NULL_TREE;
2909}
2910
2911/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2912 SIZE to the memory at bit OFFSET. */
2913
2914static tree
2915fold_array_ctor_reference (tree type, tree ctor,
2916 unsigned HOST_WIDE_INT offset,
c44c2088
JH
2917 unsigned HOST_WIDE_INT size,
2918 tree from_decl)
cfef45c8
RG
2919{
2920 unsigned HOST_WIDE_INT cnt;
2921 tree cfield, cval;
2922 double_int low_bound, elt_size;
2923 double_int index, max_index;
2924 double_int access_index;
b48e22b2 2925 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
2926 HOST_WIDE_INT inner_offset;
2927
2928 /* Compute low bound and elt size. */
eb8f1123
RG
2929 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2930 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
2931 if (domain_type && TYPE_MIN_VALUE (domain_type))
2932 {
2933 /* Static constructors for variably sized objects makes no sense. */
2934 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 2935 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
2936 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2937 }
2938 else
2939 low_bound = double_int_zero;
2940 /* Static constructors for variably sized objects makes no sense. */
c3284718 2941 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8
RG
2942 == INTEGER_CST);
2943 elt_size =
2944 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2945
2946
2947 /* We can handle only constantly sized accesses that are known to not
2948 be larger than size of array element. */
2949 if (!TYPE_SIZE_UNIT (type)
2950 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
30078c0a
MP
2951 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))
2952 || elt_size.is_zero ())
cfef45c8
RG
2953 return NULL_TREE;
2954
2955 /* Compute the array index we look for. */
27bcd47c
LC
2956 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2957 .udiv (elt_size, TRUNC_DIV_EXPR);
2958 access_index += low_bound;
b48e22b2 2959 if (index_type)
27bcd47c
LC
2960 access_index = access_index.ext (TYPE_PRECISION (index_type),
2961 TYPE_UNSIGNED (index_type));
cfef45c8
RG
2962
2963 /* And offset within the access. */
27bcd47c 2964 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
2965
2966 /* See if the array field is large enough to span whole access. We do not
2967 care to fold accesses spanning multiple array indexes. */
27bcd47c 2968 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
2969 return NULL_TREE;
2970
27bcd47c 2971 index = low_bound - double_int_one;
b48e22b2 2972 if (index_type)
27bcd47c 2973 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
b48e22b2 2974
cfef45c8
RG
2975 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2976 {
2977 /* Array constructor might explicitely set index, or specify range
2978 or leave index NULL meaning that it is next index after previous
2979 one. */
2980 if (cfield)
2981 {
2982 if (TREE_CODE (cfield) == INTEGER_CST)
2983 max_index = index = tree_to_double_int (cfield);
2984 else
2985 {
2986 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2987 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2988 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2989 }
2990 }
2991 else
b48e22b2 2992 {
27bcd47c 2993 index += double_int_one;
b48e22b2 2994 if (index_type)
27bcd47c
LC
2995 index = index.ext (TYPE_PRECISION (index_type),
2996 TYPE_UNSIGNED (index_type));
b48e22b2
EB
2997 max_index = index;
2998 }
cfef45c8
RG
2999
3000 /* Do we have match? */
27bcd47c
LC
3001 if (access_index.cmp (index, 1) >= 0
3002 && access_index.cmp (max_index, 1) <= 0)
c44c2088
JH
3003 return fold_ctor_reference (type, cval, inner_offset, size,
3004 from_decl);
cfef45c8
RG
3005 }
3006 /* When memory is not explicitely mentioned in constructor,
3007 it is 0 (or out of range). */
3008 return build_zero_cst (type);
3009}
3010
3011/* CTOR is CONSTRUCTOR of an aggregate or vector.
3012 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
3013
3014static tree
3015fold_nonarray_ctor_reference (tree type, tree ctor,
3016 unsigned HOST_WIDE_INT offset,
c44c2088
JH
3017 unsigned HOST_WIDE_INT size,
3018 tree from_decl)
cfef45c8
RG
3019{
3020 unsigned HOST_WIDE_INT cnt;
3021 tree cfield, cval;
3022
3023 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
3024 cval)
3025 {
3026 tree byte_offset = DECL_FIELD_OFFSET (cfield);
3027 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
3028 tree field_size = DECL_SIZE (cfield);
3029 double_int bitoffset;
3030 double_int byte_offset_cst = tree_to_double_int (byte_offset);
27bcd47c 3031 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
b8b2b009 3032 double_int bitoffset_end, access_end;
cfef45c8
RG
3033
3034 /* Variable sized objects in static constructors makes no sense,
3035 but field_size can be NULL for flexible array members. */
3036 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
3037 && TREE_CODE (byte_offset) == INTEGER_CST
3038 && (field_size != NULL_TREE
3039 ? TREE_CODE (field_size) == INTEGER_CST
3040 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
3041
3042 /* Compute bit offset of the field. */
27bcd47c
LC
3043 bitoffset = tree_to_double_int (field_offset)
3044 + byte_offset_cst * bits_per_unit_cst;
cfef45c8
RG
3045 /* Compute bit offset where the field ends. */
3046 if (field_size != NULL_TREE)
27bcd47c 3047 bitoffset_end = bitoffset + tree_to_double_int (field_size);
cfef45c8
RG
3048 else
3049 bitoffset_end = double_int_zero;
3050
27bcd47c
LC
3051 access_end = double_int::from_uhwi (offset)
3052 + double_int::from_uhwi (size);
b8b2b009
JJ
3053
3054 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
3055 [BITOFFSET, BITOFFSET_END)? */
27bcd47c 3056 if (access_end.cmp (bitoffset, 0) > 0
cfef45c8 3057 && (field_size == NULL_TREE
27bcd47c 3058 || double_int::from_uhwi (offset).slt (bitoffset_end)))
cfef45c8 3059 {
27bcd47c 3060 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
cfef45c8
RG
3061 /* We do have overlap. Now see if field is large enough to
3062 cover the access. Give up for accesses spanning multiple
3063 fields. */
27bcd47c 3064 if (access_end.cmp (bitoffset_end, 0) > 0)
cfef45c8 3065 return NULL_TREE;
27bcd47c 3066 if (double_int::from_uhwi (offset).slt (bitoffset))
b8b2b009 3067 return NULL_TREE;
cfef45c8 3068 return fold_ctor_reference (type, cval,
27bcd47c 3069 inner_offset.to_uhwi (), size,
c44c2088 3070 from_decl);
cfef45c8
RG
3071 }
3072 }
3073 /* When memory is not explicitely mentioned in constructor, it is 0. */
3074 return build_zero_cst (type);
3075}
3076
3077/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
3078 to the memory at bit OFFSET. */
3079
3080static tree
3081fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 3082 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
3083{
3084 tree ret;
3085
3086 /* We found the field with exact match. */
3087 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
3088 && !offset)
9d60be38 3089 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
3090
3091 /* We are at the end of walk, see if we can view convert the
3092 result. */
3093 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
3094 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3095 && operand_equal_p (TYPE_SIZE (type),
3096 TYPE_SIZE (TREE_TYPE (ctor)), 0))
3097 {
9d60be38 3098 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
3099 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
3100 if (ret)
3101 STRIP_NOPS (ret);
3102 return ret;
3103 }
3104 if (TREE_CODE (ctor) == STRING_CST)
3105 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3106 if (TREE_CODE (ctor) == CONSTRUCTOR)
3107 {
3108
eb8f1123
RG
3109 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3110 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
3111 return fold_array_ctor_reference (type, ctor, offset, size,
3112 from_decl);
cfef45c8 3113 else
c44c2088
JH
3114 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3115 from_decl);
cfef45c8
RG
3116 }
3117
3118 return NULL_TREE;
3119}
3120
3121/* Return the tree representing the element referenced by T if T is an
3122 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3123 names using VALUEIZE. Return NULL_TREE otherwise. */
3124
3125tree
3126fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3127{
3128 tree ctor, idx, base;
3129 HOST_WIDE_INT offset, size, max_size;
3130 tree tem;
3131
f8a7df45
RG
3132 if (TREE_THIS_VOLATILE (t))
3133 return NULL_TREE;
3134
cfef45c8
RG
3135 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3136 return get_symbol_constant_value (t);
3137
3138 tem = fold_read_from_constant_string (t);
3139 if (tem)
3140 return tem;
3141
3142 switch (TREE_CODE (t))
3143 {
3144 case ARRAY_REF:
3145 case ARRAY_RANGE_REF:
3146 /* Constant indexes are handled well by get_base_constructor.
3147 Only special case variable offsets.
3148 FIXME: This code can't handle nested references with variable indexes
3149 (they will be handled only by iteration of ccp). Perhaps we can bring
3150 get_ref_base_and_extent here and make it use a valueize callback. */
3151 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3152 && valueize
3153 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 3154 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
3155 {
3156 tree low_bound, unit_size;
b48e22b2 3157 double_int doffset;
cfef45c8
RG
3158
3159 /* If the resulting bit-offset is constant, track it. */
3160 if ((low_bound = array_ref_low_bound (t),
b48e22b2 3161 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 3162 && (unit_size = array_ref_element_size (t),
cc269bb6 3163 tree_fits_uhwi_p (unit_size))
27bcd47c
LC
3164 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3165 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3166 doffset.fits_shwi ()))
cfef45c8 3167 {
27bcd47c 3168 offset = doffset.to_shwi ();
eb1ce453 3169 offset *= tree_to_uhwi (unit_size);
cfef45c8
RG
3170 offset *= BITS_PER_UNIT;
3171
3172 base = TREE_OPERAND (t, 0);
3173 ctor = get_base_constructor (base, &offset, valueize);
3174 /* Empty constructor. Always fold to 0. */
3175 if (ctor == error_mark_node)
3176 return build_zero_cst (TREE_TYPE (t));
3177 /* Out of bound array access. Value is undefined,
3178 but don't fold. */
3179 if (offset < 0)
3180 return NULL_TREE;
3181 /* We can not determine ctor. */
3182 if (!ctor)
3183 return NULL_TREE;
3184 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
eb1ce453 3185 tree_to_uhwi (unit_size)
c44c2088
JH
3186 * BITS_PER_UNIT,
3187 base);
cfef45c8
RG
3188 }
3189 }
3190 /* Fallthru. */
3191
3192 case COMPONENT_REF:
3193 case BIT_FIELD_REF:
3194 case TARGET_MEM_REF:
3195 case MEM_REF:
3196 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3197 ctor = get_base_constructor (base, &offset, valueize);
3198
3199 /* Empty constructor. Always fold to 0. */
3200 if (ctor == error_mark_node)
3201 return build_zero_cst (TREE_TYPE (t));
3202 /* We do not know precise address. */
3203 if (max_size == -1 || max_size != size)
3204 return NULL_TREE;
3205 /* We can not determine ctor. */
3206 if (!ctor)
3207 return NULL_TREE;
3208
3209 /* Out of bound array access. Value is undefined, but don't fold. */
3210 if (offset < 0)
3211 return NULL_TREE;
3212
c44c2088
JH
3213 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3214 base);
cfef45c8
RG
3215
3216 case REALPART_EXPR:
3217 case IMAGPART_EXPR:
3218 {
3219 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3220 if (c && TREE_CODE (c) == COMPLEX_CST)
3221 return fold_build1_loc (EXPR_LOCATION (t),
3222 TREE_CODE (t), TREE_TYPE (t), c);
3223 break;
3224 }
3225
3226 default:
3227 break;
3228 }
3229
3230 return NULL_TREE;
3231}
3232
3233tree
3234fold_const_aggregate_ref (tree t)
3235{
3236 return fold_const_aggregate_ref_1 (t, NULL);
3237}
06bc3ec7 3238
81fa35bd
MJ
3239/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3240 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3241 KNOWN_BINFO carries the binfo describing the true type of
3242 OBJ_TYPE_REF_OBJECT(REF). */
3243
3244tree
3245gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3246{
3247 unsigned HOST_WIDE_INT offset, size;
2aa3da06 3248 tree v, fn, vtable, init;
81fa35bd 3249
c44c2088 3250 vtable = v = BINFO_VTABLE (known_binfo);
81fa35bd
MJ
3251 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3252 if (!v)
3253 return NULL_TREE;
3254
3255 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3256 {
ae7e9ddd 3257 offset = tree_to_uhwi (TREE_OPERAND (v, 1)) * BITS_PER_UNIT;
81fa35bd
MJ
3258 v = TREE_OPERAND (v, 0);
3259 }
3260 else
3261 offset = 0;
3262
3263 if (TREE_CODE (v) != ADDR_EXPR)
3264 return NULL_TREE;
3265 v = TREE_OPERAND (v, 0);
3266
3267 if (TREE_CODE (v) != VAR_DECL
2aa3da06 3268 || !DECL_VIRTUAL_P (v))
81fa35bd 3269 return NULL_TREE;
2aa3da06
JH
3270 init = ctor_for_folding (v);
3271
3272 /* The virtual tables should always be born with constructors.
3273 and we always should assume that they are avaialble for
3274 folding. At the moment we do not stream them in all cases,
3275 but it should never happen that ctor seem unreachable. */
3276 gcc_assert (init);
3277 if (init == error_mark_node)
3278 {
3279 gcc_assert (in_lto_p);
3280 return NULL_TREE;
3281 }
81fa35bd 3282 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 3283 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
81fa35bd 3284 offset += token * size;
2aa3da06 3285 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
668b6894 3286 offset, size, v);
8e8483e6 3287 if (!fn || integer_zerop (fn))
81fa35bd
MJ
3288 return NULL_TREE;
3289 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3290 || TREE_CODE (fn) == FDESC_EXPR);
3291 fn = TREE_OPERAND (fn, 0);
3292 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3293
3294 /* When cgraph node is missing and function is not public, we cannot
3295 devirtualize. This can happen in WHOPR when the actual method
3296 ends up in other partition, because we found devirtualization
3297 possibility too late. */
c44c2088 3298 if (!can_refer_decl_in_current_unit_p (fn, vtable))
81fa35bd
MJ
3299 return NULL_TREE;
3300
7501ca28
RG
3301 /* Make sure we create a cgraph node for functions we'll reference.
3302 They can be non-existent if the reference comes from an entry
3303 of an external vtable for example. */
3304 cgraph_get_create_node (fn);
3305
81fa35bd
MJ
3306 return fn;
3307}
3308
06bc3ec7
BS
3309/* Return true iff VAL is a gimple expression that is known to be
3310 non-negative. Restricted to floating-point inputs. */
3311
3312bool
3313gimple_val_nonnegative_real_p (tree val)
3314{
3315 gimple def_stmt;
3316
3317 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3318
3319 /* Use existing logic for non-gimple trees. */
3320 if (tree_expr_nonnegative_p (val))
3321 return true;
3322
3323 if (TREE_CODE (val) != SSA_NAME)
3324 return false;
3325
3326 /* Currently we look only at the immediately defining statement
3327 to make this determination, since recursion on defining
3328 statements of operands can lead to quadratic behavior in the
3329 worst case. This is expected to catch almost all occurrences
3330 in practice. It would be possible to implement limited-depth
3331 recursion if important cases are lost. Alternatively, passes
3332 that need this information (such as the pow/powi lowering code
3333 in the cse_sincos pass) could be revised to provide it through
3334 dataflow propagation. */
3335
3336 def_stmt = SSA_NAME_DEF_STMT (val);
3337
3338 if (is_gimple_assign (def_stmt))
3339 {
3340 tree op0, op1;
3341
3342 /* See fold-const.c:tree_expr_nonnegative_p for additional
3343 cases that could be handled with recursion. */
3344
3345 switch (gimple_assign_rhs_code (def_stmt))
3346 {
3347 case ABS_EXPR:
3348 /* Always true for floating-point operands. */
3349 return true;
3350
3351 case MULT_EXPR:
3352 /* True if the two operands are identical (since we are
3353 restricted to floating-point inputs). */
3354 op0 = gimple_assign_rhs1 (def_stmt);
3355 op1 = gimple_assign_rhs2 (def_stmt);
3356
3357 if (op0 == op1
3358 || operand_equal_p (op0, op1, 0))
3359 return true;
3360
3361 default:
3362 return false;
3363 }
3364 }
3365 else if (is_gimple_call (def_stmt))
3366 {
3367 tree fndecl = gimple_call_fndecl (def_stmt);
3368 if (fndecl
3369 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3370 {
3371 tree arg1;
3372
3373 switch (DECL_FUNCTION_CODE (fndecl))
3374 {
3375 CASE_FLT_FN (BUILT_IN_ACOS):
3376 CASE_FLT_FN (BUILT_IN_ACOSH):
3377 CASE_FLT_FN (BUILT_IN_CABS):
3378 CASE_FLT_FN (BUILT_IN_COSH):
3379 CASE_FLT_FN (BUILT_IN_ERFC):
3380 CASE_FLT_FN (BUILT_IN_EXP):
3381 CASE_FLT_FN (BUILT_IN_EXP10):
3382 CASE_FLT_FN (BUILT_IN_EXP2):
3383 CASE_FLT_FN (BUILT_IN_FABS):
3384 CASE_FLT_FN (BUILT_IN_FDIM):
3385 CASE_FLT_FN (BUILT_IN_HYPOT):
3386 CASE_FLT_FN (BUILT_IN_POW10):
3387 return true;
3388
3389 CASE_FLT_FN (BUILT_IN_SQRT):
3390 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3391 nonnegative inputs. */
3392 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3393 return true;
3394
3395 break;
3396
3397 CASE_FLT_FN (BUILT_IN_POWI):
3398 /* True if the second argument is an even integer. */
3399 arg1 = gimple_call_arg (def_stmt, 1);
3400
3401 if (TREE_CODE (arg1) == INTEGER_CST
3402 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3403 return true;
3404
3405 break;
3406
3407 CASE_FLT_FN (BUILT_IN_POW):
3408 /* True if the second argument is an even integer-valued
3409 real. */
3410 arg1 = gimple_call_arg (def_stmt, 1);
3411
3412 if (TREE_CODE (arg1) == REAL_CST)
3413 {
3414 REAL_VALUE_TYPE c;
3415 HOST_WIDE_INT n;
3416
3417 c = TREE_REAL_CST (arg1);
3418 n = real_to_integer (&c);
3419
3420 if ((n & 1) == 0)
3421 {
3422 REAL_VALUE_TYPE cint;
3423 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3424 if (real_identical (&c, &cint))
3425 return true;
3426 }
3427 }
3428
3429 break;
3430
3431 default:
3432 return false;
3433 }
3434 }
3435 }
3436
3437 return false;
3438}
b184c8f1
AM
3439
3440/* Given a pointer value OP0, return a simplified version of an
3441 indirection through OP0, or NULL_TREE if no simplification is
3442 possible. Note that the resulting type may be different from
3443 the type pointed to in the sense that it is still compatible
3444 from the langhooks point of view. */
3445
3446tree
3447gimple_fold_indirect_ref (tree t)
3448{
3449 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3450 tree sub = t;
3451 tree subtype;
3452
3453 STRIP_NOPS (sub);
3454 subtype = TREE_TYPE (sub);
3455 if (!POINTER_TYPE_P (subtype))
3456 return NULL_TREE;
3457
3458 if (TREE_CODE (sub) == ADDR_EXPR)
3459 {
3460 tree op = TREE_OPERAND (sub, 0);
3461 tree optype = TREE_TYPE (op);
3462 /* *&p => p */
3463 if (useless_type_conversion_p (type, optype))
3464 return op;
3465
3466 /* *(foo *)&fooarray => fooarray[0] */
3467 if (TREE_CODE (optype) == ARRAY_TYPE
3468 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3469 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3470 {
3471 tree type_domain = TYPE_DOMAIN (optype);
3472 tree min_val = size_zero_node;
3473 if (type_domain && TYPE_MIN_VALUE (type_domain))
3474 min_val = TYPE_MIN_VALUE (type_domain);
3475 if (TREE_CODE (min_val) == INTEGER_CST)
3476 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3477 }
3478 /* *(foo *)&complexfoo => __real__ complexfoo */
3479 else if (TREE_CODE (optype) == COMPLEX_TYPE
3480 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3481 return fold_build1 (REALPART_EXPR, type, op);
3482 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3483 else if (TREE_CODE (optype) == VECTOR_TYPE
3484 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3485 {
3486 tree part_width = TYPE_SIZE (type);
3487 tree index = bitsize_int (0);
3488 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3489 }
3490 }
3491
3492 /* *(p + CST) -> ... */
3493 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3494 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3495 {
3496 tree addr = TREE_OPERAND (sub, 0);
3497 tree off = TREE_OPERAND (sub, 1);
3498 tree addrtype;
3499
3500 STRIP_NOPS (addr);
3501 addrtype = TREE_TYPE (addr);
3502
3503 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3504 if (TREE_CODE (addr) == ADDR_EXPR
3505 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3506 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 3507 && tree_fits_uhwi_p (off))
b184c8f1 3508 {
ae7e9ddd 3509 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
3510 tree part_width = TYPE_SIZE (type);
3511 unsigned HOST_WIDE_INT part_widthi
9439e9a1 3512 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
3513 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3514 tree index = bitsize_int (indexi);
3515 if (offset / part_widthi
e934916c 3516 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
3517 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3518 part_width, index);
3519 }
3520
3521 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3522 if (TREE_CODE (addr) == ADDR_EXPR
3523 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3524 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3525 {
3526 tree size = TYPE_SIZE_UNIT (type);
3527 if (tree_int_cst_equal (size, off))
3528 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3529 }
3530
3531 /* *(p + CST) -> MEM_REF <p, CST>. */
3532 if (TREE_CODE (addr) != ADDR_EXPR
3533 || DECL_P (TREE_OPERAND (addr, 0)))
3534 return fold_build2 (MEM_REF, type,
3535 addr,
3536 build_int_cst_wide (ptype,
3537 TREE_INT_CST_LOW (off),
3538 TREE_INT_CST_HIGH (off)));
3539 }
3540
3541 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3542 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3543 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3544 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3545 {
3546 tree type_domain;
3547 tree min_val = size_zero_node;
3548 tree osub = sub;
3549 sub = gimple_fold_indirect_ref (sub);
3550 if (! sub)
3551 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3552 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3553 if (type_domain && TYPE_MIN_VALUE (type_domain))
3554 min_val = TYPE_MIN_VALUE (type_domain);
3555 if (TREE_CODE (min_val) == INTEGER_CST)
3556 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3557 }
3558
3559 return NULL_TREE;
3560}
19e51b40
JJ
3561
3562/* Return true if CODE is an operation that when operating on signed
3563 integer types involves undefined behavior on overflow and the
3564 operation can be expressed with unsigned arithmetic. */
3565
3566bool
3567arith_code_with_undefined_signed_overflow (tree_code code)
3568{
3569 switch (code)
3570 {
3571 case PLUS_EXPR:
3572 case MINUS_EXPR:
3573 case MULT_EXPR:
3574 case NEGATE_EXPR:
3575 case POINTER_PLUS_EXPR:
3576 return true;
3577 default:
3578 return false;
3579 }
3580}
3581
3582/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
3583 operation that can be transformed to unsigned arithmetic by converting
3584 its operand, carrying out the operation in the corresponding unsigned
3585 type and converting the result back to the original type.
3586
3587 Returns a sequence of statements that replace STMT and also contain
3588 a modified form of STMT itself. */
3589
3590gimple_seq
3591rewrite_to_defined_overflow (gimple stmt)
3592{
3593 if (dump_file && (dump_flags & TDF_DETAILS))
3594 {
3595 fprintf (dump_file, "rewriting stmt with undefined signed "
3596 "overflow ");
3597 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3598 }
3599
3600 tree lhs = gimple_assign_lhs (stmt);
3601 tree type = unsigned_type_for (TREE_TYPE (lhs));
3602 gimple_seq stmts = NULL;
3603 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
3604 {
3605 gimple_seq stmts2 = NULL;
3606 gimple_set_op (stmt, i,
3607 force_gimple_operand (fold_convert (type,
3608 gimple_op (stmt, i)),
3609 &stmts2, true, NULL_TREE));
3610 gimple_seq_add_seq (&stmts, stmts2);
3611 }
3612 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
3613 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
3614 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
3615 gimple_seq_add_stmt (&stmts, stmt);
3616 gimple cvt = gimple_build_assign_with_ops
3617 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
3618 gimple_seq_add_stmt (&stmts, cvt);
3619
3620 return stmts;
3621}