]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/gimple-fold.c
gcc/ada/
[thirdparty/gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2013 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "dumpfile.h"
29 #include "bitmap.h"
30 #include "gimple.h"
31 #include "gimplify.h"
32 #include "gimple-iterator.h"
33 #include "gimple-ssa.h"
34 #include "tree-ssanames.h"
35 #include "tree-into-ssa.h"
36 #include "tree-dfa.h"
37 #include "tree-ssa.h"
38 #include "tree-ssa-propagate.h"
39 #include "target.h"
40 #include "ipa-utils.h"
41 #include "gimple-pretty-print.h"
42 #include "tree-ssa-address.h"
43 #include "langhooks.h"
44
45 /* Return true when DECL can be referenced from current unit.
46 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
47 We can get declarations that are not possible to reference for various
48 reasons:
49
50 1) When analyzing C++ virtual tables.
51 C++ virtual tables do have known constructors even
52 when they are keyed to other compilation unit.
53 Those tables can contain pointers to methods and vars
54 in other units. Those methods have both STATIC and EXTERNAL
55 set.
56 2) In WHOPR mode devirtualization might lead to reference
57 to method that was partitioned elsehwere.
58 In this case we have static VAR_DECL or FUNCTION_DECL
59 that has no corresponding callgraph/varpool node
60 declaring the body.
61 3) COMDAT functions referred by external vtables that
62 we devirtualize only during final compilation stage.
63 At this time we already decided that we will not output
64 the function body and thus we can't reference the symbol
65 directly. */
66
67 static bool
68 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
69 {
70 struct varpool_node *vnode;
71 struct cgraph_node *node;
72 symtab_node *snode;
73
74 if (DECL_ABSTRACT (decl))
75 return false;
76
77 /* We are concerned only about static/external vars and functions. */
78 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
79 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
80 return true;
81
82 /* Static objects can be referred only if they was not optimized out yet. */
83 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
84 {
85 snode = symtab_get_node (decl);
86 if (!snode)
87 return false;
88 node = dyn_cast <cgraph_node> (snode);
89 return !node || !node->global.inlined_to;
90 }
91
92 /* We will later output the initializer, so we can refer to it.
93 So we are concerned only when DECL comes from initializer of
94 external var. */
95 if (!from_decl
96 || TREE_CODE (from_decl) != VAR_DECL
97 || !DECL_EXTERNAL (from_decl)
98 || (flag_ltrans
99 && symtab_get_node (from_decl)->in_other_partition))
100 return true;
101 /* We are folding reference from external vtable. The vtable may reffer
102 to a symbol keyed to other compilation unit. The other compilation
103 unit may be in separate DSO and the symbol may be hidden. */
104 if (DECL_VISIBILITY_SPECIFIED (decl)
105 && DECL_EXTERNAL (decl)
106 && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
107 return false;
108 /* When function is public, we always can introduce new reference.
109 Exception are the COMDAT functions where introducing a direct
110 reference imply need to include function body in the curren tunit. */
111 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
112 return true;
113 /* We are not at ltrans stage; so don't worry about WHOPR.
114 Also when still gimplifying all referred comdat functions will be
115 produced.
116
117 As observed in PR20991 for already optimized out comdat virtual functions
118 it may be tempting to not necessarily give up because the copy will be
119 output elsewhere when corresponding vtable is output.
120 This is however not possible - ABI specify that COMDATs are output in
121 units where they are used and when the other unit was compiled with LTO
122 it is possible that vtable was kept public while the function itself
123 was privatized. */
124 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
125 return true;
126
127 /* OK we are seeing either COMDAT or static variable. In this case we must
128 check that the definition is still around so we can refer it. */
129 if (TREE_CODE (decl) == FUNCTION_DECL)
130 {
131 node = cgraph_get_node (decl);
132 /* Check that we still have function body and that we didn't took
133 the decision to eliminate offline copy of the function yet.
134 The second is important when devirtualization happens during final
135 compilation stage when making a new reference no longer makes callee
136 to be compiled. */
137 if (!node || !node->definition || node->global.inlined_to)
138 {
139 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
140 return false;
141 }
142 }
143 else if (TREE_CODE (decl) == VAR_DECL)
144 {
145 vnode = varpool_get_node (decl);
146 if (!vnode || !vnode->definition)
147 {
148 gcc_checking_assert (!TREE_ASM_WRITTEN (decl));
149 return false;
150 }
151 }
152 return true;
153 }
154
155 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
156 acceptable form for is_gimple_min_invariant.
157 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
158
159 tree
160 canonicalize_constructor_val (tree cval, tree from_decl)
161 {
162 tree orig_cval = cval;
163 STRIP_NOPS (cval);
164 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
165 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
166 {
167 tree ptr = TREE_OPERAND (cval, 0);
168 if (is_gimple_min_invariant (ptr))
169 cval = build1_loc (EXPR_LOCATION (cval),
170 ADDR_EXPR, TREE_TYPE (ptr),
171 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
172 ptr,
173 fold_convert (ptr_type_node,
174 TREE_OPERAND (cval, 1))));
175 }
176 if (TREE_CODE (cval) == ADDR_EXPR)
177 {
178 tree base = NULL_TREE;
179 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
180 {
181 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
182 if (base)
183 TREE_OPERAND (cval, 0) = base;
184 }
185 else
186 base = get_base_address (TREE_OPERAND (cval, 0));
187 if (!base)
188 return NULL_TREE;
189
190 if ((TREE_CODE (base) == VAR_DECL
191 || TREE_CODE (base) == FUNCTION_DECL)
192 && !can_refer_decl_in_current_unit_p (base, from_decl))
193 return NULL_TREE;
194 if (TREE_CODE (base) == VAR_DECL)
195 TREE_ADDRESSABLE (base) = 1;
196 else if (TREE_CODE (base) == FUNCTION_DECL)
197 {
198 /* Make sure we create a cgraph node for functions we'll reference.
199 They can be non-existent if the reference comes from an entry
200 of an external vtable for example. */
201 cgraph_get_create_node (base);
202 }
203 /* Fixup types in global initializers. */
204 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
205 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
206
207 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
208 cval = fold_convert (TREE_TYPE (orig_cval), cval);
209 return cval;
210 }
211 if (TREE_OVERFLOW_P (cval))
212 return drop_tree_overflow (cval);
213 return orig_cval;
214 }
215
216 /* If SYM is a constant variable with known value, return the value.
217 NULL_TREE is returned otherwise. */
218
219 tree
220 get_symbol_constant_value (tree sym)
221 {
222 tree val = ctor_for_folding (sym);
223 if (val != error_mark_node)
224 {
225 if (val)
226 {
227 val = canonicalize_constructor_val (unshare_expr (val), sym);
228 if (val && is_gimple_min_invariant (val))
229 return val;
230 else
231 return NULL_TREE;
232 }
233 /* Variables declared 'const' without an initializer
234 have zero as the initializer if they may not be
235 overridden at link or run time. */
236 if (!val
237 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
238 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
239 return build_zero_cst (TREE_TYPE (sym));
240 }
241
242 return NULL_TREE;
243 }
244
245
246
247 /* Subroutine of fold_stmt. We perform several simplifications of the
248 memory reference tree EXPR and make sure to re-gimplify them properly
249 after propagation of constant addresses. IS_LHS is true if the
250 reference is supposed to be an lvalue. */
251
252 static tree
253 maybe_fold_reference (tree expr, bool is_lhs)
254 {
255 tree *t = &expr;
256 tree result;
257
258 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
259 || TREE_CODE (expr) == REALPART_EXPR
260 || TREE_CODE (expr) == IMAGPART_EXPR)
261 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
262 return fold_unary_loc (EXPR_LOCATION (expr),
263 TREE_CODE (expr),
264 TREE_TYPE (expr),
265 TREE_OPERAND (expr, 0));
266 else if (TREE_CODE (expr) == BIT_FIELD_REF
267 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
268 return fold_ternary_loc (EXPR_LOCATION (expr),
269 TREE_CODE (expr),
270 TREE_TYPE (expr),
271 TREE_OPERAND (expr, 0),
272 TREE_OPERAND (expr, 1),
273 TREE_OPERAND (expr, 2));
274
275 while (handled_component_p (*t))
276 t = &TREE_OPERAND (*t, 0);
277
278 /* Canonicalize MEM_REFs invariant address operand. Do this first
279 to avoid feeding non-canonical MEM_REFs elsewhere. */
280 if (TREE_CODE (*t) == MEM_REF
281 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
282 {
283 bool volatile_p = TREE_THIS_VOLATILE (*t);
284 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
285 TREE_OPERAND (*t, 0),
286 TREE_OPERAND (*t, 1));
287 if (tem)
288 {
289 TREE_THIS_VOLATILE (tem) = volatile_p;
290 *t = tem;
291 tem = maybe_fold_reference (expr, is_lhs);
292 if (tem)
293 return tem;
294 return expr;
295 }
296 }
297
298 if (!is_lhs
299 && (result = fold_const_aggregate_ref (expr))
300 && is_gimple_min_invariant (result))
301 return result;
302
303 /* Fold back MEM_REFs to reference trees. */
304 if (TREE_CODE (*t) == MEM_REF
305 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
306 && integer_zerop (TREE_OPERAND (*t, 1))
307 && (TREE_THIS_VOLATILE (*t)
308 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
309 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
310 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
311 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
312 /* We have to look out here to not drop a required conversion
313 from the rhs to the lhs if is_lhs, but we don't have the
314 rhs here to verify that. Thus require strict type
315 compatibility. */
316 && types_compatible_p (TREE_TYPE (*t),
317 TREE_TYPE (TREE_OPERAND
318 (TREE_OPERAND (*t, 0), 0))))
319 {
320 tree tem;
321 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
322 tem = maybe_fold_reference (expr, is_lhs);
323 if (tem)
324 return tem;
325 return expr;
326 }
327 else if (TREE_CODE (*t) == TARGET_MEM_REF)
328 {
329 tree tem = maybe_fold_tmr (*t);
330 if (tem)
331 {
332 *t = tem;
333 tem = maybe_fold_reference (expr, is_lhs);
334 if (tem)
335 return tem;
336 return expr;
337 }
338 }
339
340 return NULL_TREE;
341 }
342
343
344 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
345 replacement rhs for the statement or NULL_TREE if no simplification
346 could be made. It is assumed that the operands have been previously
347 folded. */
348
349 static tree
350 fold_gimple_assign (gimple_stmt_iterator *si)
351 {
352 gimple stmt = gsi_stmt (*si);
353 enum tree_code subcode = gimple_assign_rhs_code (stmt);
354 location_t loc = gimple_location (stmt);
355
356 tree result = NULL_TREE;
357
358 switch (get_gimple_rhs_class (subcode))
359 {
360 case GIMPLE_SINGLE_RHS:
361 {
362 tree rhs = gimple_assign_rhs1 (stmt);
363
364 if (REFERENCE_CLASS_P (rhs))
365 return maybe_fold_reference (rhs, false);
366
367 else if (TREE_CODE (rhs) == ADDR_EXPR)
368 {
369 tree ref = TREE_OPERAND (rhs, 0);
370 tree tem = maybe_fold_reference (ref, true);
371 if (tem
372 && TREE_CODE (tem) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem, 1)))
374 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
375 else if (tem)
376 result = fold_convert (TREE_TYPE (rhs),
377 build_fold_addr_expr_loc (loc, tem));
378 else if (TREE_CODE (ref) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
381 }
382
383 else if (TREE_CODE (rhs) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
387 {
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
389 unsigned i;
390 tree val;
391
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
393 if (TREE_CODE (val) != INTEGER_CST
394 && TREE_CODE (val) != REAL_CST
395 && TREE_CODE (val) != FIXED_CST)
396 return NULL_TREE;
397
398 return build_vector_from_ctor (TREE_TYPE (rhs),
399 CONSTRUCTOR_ELTS (rhs));
400 }
401
402 else if (DECL_P (rhs))
403 return get_symbol_constant_value (rhs);
404
405 /* If we couldn't fold the RHS, hand over to the generic
406 fold routines. */
407 if (result == NULL_TREE)
408 result = fold (rhs);
409
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result);
414
415 if (result != rhs && valid_gimple_rhs_p (result))
416 return result;
417
418 return NULL_TREE;
419 }
420 break;
421
422 case GIMPLE_UNARY_RHS:
423 {
424 tree rhs = gimple_assign_rhs1 (stmt);
425
426 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
427 if (result)
428 {
429 /* If the operation was a conversion do _not_ mark a
430 resulting constant with TREE_OVERFLOW if the original
431 constant was not. These conversions have implementation
432 defined behavior and retaining the TREE_OVERFLOW flag
433 here would confuse later passes such as VRP. */
434 if (CONVERT_EXPR_CODE_P (subcode)
435 && TREE_CODE (result) == INTEGER_CST
436 && TREE_CODE (rhs) == INTEGER_CST)
437 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
438
439 STRIP_USELESS_TYPE_CONVERSION (result);
440 if (valid_gimple_rhs_p (result))
441 return result;
442 }
443 }
444 break;
445
446 case GIMPLE_BINARY_RHS:
447 /* Try to canonicalize for boolean-typed X the comparisons
448 X == 0, X == 1, X != 0, and X != 1. */
449 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
450 || gimple_assign_rhs_code (stmt) == NE_EXPR)
451 {
452 tree lhs = gimple_assign_lhs (stmt);
453 tree op1 = gimple_assign_rhs1 (stmt);
454 tree op2 = gimple_assign_rhs2 (stmt);
455 tree type = TREE_TYPE (op1);
456
457 /* Check whether the comparison operands are of the same boolean
458 type as the result type is.
459 Check that second operand is an integer-constant with value
460 one or zero. */
461 if (TREE_CODE (op2) == INTEGER_CST
462 && (integer_zerop (op2) || integer_onep (op2))
463 && useless_type_conversion_p (TREE_TYPE (lhs), type))
464 {
465 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
466 bool is_logical_not = false;
467
468 /* X == 0 and X != 1 is a logical-not.of X
469 X == 1 and X != 0 is X */
470 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
471 || (cmp_code == NE_EXPR && integer_onep (op2)))
472 is_logical_not = true;
473
474 if (is_logical_not == false)
475 result = op1;
476 /* Only for one-bit precision typed X the transformation
477 !X -> ~X is valied. */
478 else if (TYPE_PRECISION (type) == 1)
479 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
480 type, op1);
481 /* Otherwise we use !X -> X ^ 1. */
482 else
483 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
484 type, op1, build_int_cst (type, 1));
485
486 }
487 }
488
489 if (!result)
490 result = fold_binary_loc (loc, subcode,
491 TREE_TYPE (gimple_assign_lhs (stmt)),
492 gimple_assign_rhs1 (stmt),
493 gimple_assign_rhs2 (stmt));
494
495 if (result)
496 {
497 STRIP_USELESS_TYPE_CONVERSION (result);
498 if (valid_gimple_rhs_p (result))
499 return result;
500 }
501 break;
502
503 case GIMPLE_TERNARY_RHS:
504 /* Try to fold a conditional expression. */
505 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
506 {
507 tree op0 = gimple_assign_rhs1 (stmt);
508 tree tem;
509 bool set = false;
510 location_t cond_loc = gimple_location (stmt);
511
512 if (COMPARISON_CLASS_P (op0))
513 {
514 fold_defer_overflow_warnings ();
515 tem = fold_binary_loc (cond_loc,
516 TREE_CODE (op0), TREE_TYPE (op0),
517 TREE_OPERAND (op0, 0),
518 TREE_OPERAND (op0, 1));
519 /* This is actually a conditional expression, not a GIMPLE
520 conditional statement, however, the valid_gimple_rhs_p
521 test still applies. */
522 set = (tem && is_gimple_condexpr (tem)
523 && valid_gimple_rhs_p (tem));
524 fold_undefer_overflow_warnings (set, stmt, 0);
525 }
526 else if (is_gimple_min_invariant (op0))
527 {
528 tem = op0;
529 set = true;
530 }
531 else
532 return NULL_TREE;
533
534 if (set)
535 result = fold_build3_loc (cond_loc, COND_EXPR,
536 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
537 gimple_assign_rhs2 (stmt),
538 gimple_assign_rhs3 (stmt));
539 }
540
541 if (!result)
542 result = fold_ternary_loc (loc, subcode,
543 TREE_TYPE (gimple_assign_lhs (stmt)),
544 gimple_assign_rhs1 (stmt),
545 gimple_assign_rhs2 (stmt),
546 gimple_assign_rhs3 (stmt));
547
548 if (result)
549 {
550 STRIP_USELESS_TYPE_CONVERSION (result);
551 if (valid_gimple_rhs_p (result))
552 return result;
553 }
554 break;
555
556 case GIMPLE_INVALID_RHS:
557 gcc_unreachable ();
558 }
559
560 return NULL_TREE;
561 }
562
563 /* Attempt to fold a conditional statement. Return true if any changes were
564 made. We only attempt to fold the condition expression, and do not perform
565 any transformation that would require alteration of the cfg. It is
566 assumed that the operands have been previously folded. */
567
568 static bool
569 fold_gimple_cond (gimple stmt)
570 {
571 tree result = fold_binary_loc (gimple_location (stmt),
572 gimple_cond_code (stmt),
573 boolean_type_node,
574 gimple_cond_lhs (stmt),
575 gimple_cond_rhs (stmt));
576
577 if (result)
578 {
579 STRIP_USELESS_TYPE_CONVERSION (result);
580 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
581 {
582 gimple_cond_set_condition_from_tree (stmt, result);
583 return true;
584 }
585 }
586
587 return false;
588 }
589
590 /* Convert EXPR into a GIMPLE value suitable for substitution on the
591 RHS of an assignment. Insert the necessary statements before
592 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
593 is replaced. If the call is expected to produces a result, then it
594 is replaced by an assignment of the new RHS to the result variable.
595 If the result is to be ignored, then the call is replaced by a
596 GIMPLE_NOP. A proper VDEF chain is retained by making the first
597 VUSE and the last VDEF of the whole sequence be the same as the replaced
598 statement and using new SSA names for stores in between. */
599
600 void
601 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
602 {
603 tree lhs;
604 gimple stmt, new_stmt;
605 gimple_stmt_iterator i;
606 gimple_seq stmts = NULL;
607 struct gimplify_ctx gctx;
608 gimple laststore;
609 tree reaching_vuse;
610
611 stmt = gsi_stmt (*si_p);
612
613 gcc_assert (is_gimple_call (stmt));
614
615 push_gimplify_context (&gctx);
616 gctx.into_ssa = gimple_in_ssa_p (cfun);
617
618 lhs = gimple_call_lhs (stmt);
619 if (lhs == NULL_TREE)
620 {
621 gimplify_and_add (expr, &stmts);
622 /* We can end up with folding a memcpy of an empty class assignment
623 which gets optimized away by C++ gimplification. */
624 if (gimple_seq_empty_p (stmts))
625 {
626 pop_gimplify_context (NULL);
627 if (gimple_in_ssa_p (cfun))
628 {
629 unlink_stmt_vdef (stmt);
630 release_defs (stmt);
631 }
632 gsi_replace (si_p, gimple_build_nop (), true);
633 return;
634 }
635 }
636 else
637 {
638 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
639 new_stmt = gimple_build_assign (lhs, tmp);
640 i = gsi_last (stmts);
641 gsi_insert_after_without_update (&i, new_stmt,
642 GSI_CONTINUE_LINKING);
643 }
644
645 pop_gimplify_context (NULL);
646
647 if (gimple_has_location (stmt))
648 annotate_all_with_location (stmts, gimple_location (stmt));
649
650 /* First iterate over the replacement statements backward, assigning
651 virtual operands to their defining statements. */
652 laststore = NULL;
653 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
654 {
655 new_stmt = gsi_stmt (i);
656 if ((gimple_assign_single_p (new_stmt)
657 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
658 || (is_gimple_call (new_stmt)
659 && (gimple_call_flags (new_stmt)
660 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
661 {
662 tree vdef;
663 if (!laststore)
664 vdef = gimple_vdef (stmt);
665 else
666 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
667 gimple_set_vdef (new_stmt, vdef);
668 if (vdef && TREE_CODE (vdef) == SSA_NAME)
669 SSA_NAME_DEF_STMT (vdef) = new_stmt;
670 laststore = new_stmt;
671 }
672 }
673
674 /* Second iterate over the statements forward, assigning virtual
675 operands to their uses. */
676 reaching_vuse = gimple_vuse (stmt);
677 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
678 {
679 new_stmt = gsi_stmt (i);
680 /* If the new statement possibly has a VUSE, update it with exact SSA
681 name we know will reach this one. */
682 if (gimple_has_mem_ops (new_stmt))
683 gimple_set_vuse (new_stmt, reaching_vuse);
684 gimple_set_modified (new_stmt, true);
685 if (gimple_vdef (new_stmt))
686 reaching_vuse = gimple_vdef (new_stmt);
687 }
688
689 /* If the new sequence does not do a store release the virtual
690 definition of the original statement. */
691 if (reaching_vuse
692 && reaching_vuse == gimple_vuse (stmt))
693 {
694 tree vdef = gimple_vdef (stmt);
695 if (vdef
696 && TREE_CODE (vdef) == SSA_NAME)
697 {
698 unlink_stmt_vdef (stmt);
699 release_ssa_name (vdef);
700 }
701 }
702
703 /* Finally replace the original statement with the sequence. */
704 gsi_replace_with_seq (si_p, stmts, false);
705 }
706
707 /* Return the string length, maximum string length or maximum value of
708 ARG in LENGTH.
709 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
710 is not NULL and, for TYPE == 0, its value is not equal to the length
711 we determine or if we are unable to determine the length or value,
712 return false. VISITED is a bitmap of visited variables.
713 TYPE is 0 if string length should be returned, 1 for maximum string
714 length and 2 for maximum value ARG can have. */
715
716 static bool
717 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
718 {
719 tree var, val;
720 gimple def_stmt;
721
722 if (TREE_CODE (arg) != SSA_NAME)
723 {
724 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
725 if (TREE_CODE (arg) == ADDR_EXPR
726 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
727 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
728 {
729 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
730 if (TREE_CODE (aop0) == INDIRECT_REF
731 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
732 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
733 length, visited, type);
734 }
735
736 if (type == 2)
737 {
738 val = arg;
739 if (TREE_CODE (val) != INTEGER_CST
740 || tree_int_cst_sgn (val) < 0)
741 return false;
742 }
743 else
744 val = c_strlen (arg, 1);
745 if (!val)
746 return false;
747
748 if (*length)
749 {
750 if (type > 0)
751 {
752 if (TREE_CODE (*length) != INTEGER_CST
753 || TREE_CODE (val) != INTEGER_CST)
754 return false;
755
756 if (tree_int_cst_lt (*length, val))
757 *length = val;
758 return true;
759 }
760 else if (simple_cst_equal (val, *length) != 1)
761 return false;
762 }
763
764 *length = val;
765 return true;
766 }
767
768 /* If ARG is registered for SSA update we cannot look at its defining
769 statement. */
770 if (name_registered_for_update_p (arg))
771 return false;
772
773 /* If we were already here, break the infinite cycle. */
774 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
775 return true;
776
777 var = arg;
778 def_stmt = SSA_NAME_DEF_STMT (var);
779
780 switch (gimple_code (def_stmt))
781 {
782 case GIMPLE_ASSIGN:
783 /* The RHS of the statement defining VAR must either have a
784 constant length or come from another SSA_NAME with a constant
785 length. */
786 if (gimple_assign_single_p (def_stmt)
787 || gimple_assign_unary_nop_p (def_stmt))
788 {
789 tree rhs = gimple_assign_rhs1 (def_stmt);
790 return get_maxval_strlen (rhs, length, visited, type);
791 }
792 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
793 {
794 tree op2 = gimple_assign_rhs2 (def_stmt);
795 tree op3 = gimple_assign_rhs3 (def_stmt);
796 return get_maxval_strlen (op2, length, visited, type)
797 && get_maxval_strlen (op3, length, visited, type);
798 }
799 return false;
800
801 case GIMPLE_PHI:
802 {
803 /* All the arguments of the PHI node must have the same constant
804 length. */
805 unsigned i;
806
807 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
808 {
809 tree arg = gimple_phi_arg (def_stmt, i)->def;
810
811 /* If this PHI has itself as an argument, we cannot
812 determine the string length of this argument. However,
813 if we can find a constant string length for the other
814 PHI args then we can still be sure that this is a
815 constant string length. So be optimistic and just
816 continue with the next argument. */
817 if (arg == gimple_phi_result (def_stmt))
818 continue;
819
820 if (!get_maxval_strlen (arg, length, visited, type))
821 return false;
822 }
823 }
824 return true;
825
826 default:
827 return false;
828 }
829 }
830
831
832 /* Fold builtin call in statement STMT. Returns a simplified tree.
833 We may return a non-constant expression, including another call
834 to a different function and with different arguments, e.g.,
835 substituting memcpy for strcpy when the string length is known.
836 Note that some builtins expand into inline code that may not
837 be valid in GIMPLE. Callers must take care. */
838
839 tree
840 gimple_fold_builtin (gimple stmt)
841 {
842 tree result, val[3];
843 tree callee, a;
844 int arg_idx, type;
845 bitmap visited;
846 bool ignore;
847 int nargs;
848 location_t loc = gimple_location (stmt);
849
850 gcc_assert (is_gimple_call (stmt));
851
852 ignore = (gimple_call_lhs (stmt) == NULL);
853
854 /* First try the generic builtin folder. If that succeeds, return the
855 result directly. */
856 result = fold_call_stmt (stmt, ignore);
857 if (result)
858 {
859 if (ignore)
860 STRIP_NOPS (result);
861 return result;
862 }
863
864 /* Ignore MD builtins. */
865 callee = gimple_call_fndecl (stmt);
866 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
867 return NULL_TREE;
868
869 /* Give up for always_inline inline builtins until they are
870 inlined. */
871 if (avoid_folding_inline_builtin (callee))
872 return NULL_TREE;
873
874 /* If the builtin could not be folded, and it has no argument list,
875 we're done. */
876 nargs = gimple_call_num_args (stmt);
877 if (nargs == 0)
878 return NULL_TREE;
879
880 /* Limit the work only for builtins we know how to simplify. */
881 switch (DECL_FUNCTION_CODE (callee))
882 {
883 case BUILT_IN_STRLEN:
884 case BUILT_IN_FPUTS:
885 case BUILT_IN_FPUTS_UNLOCKED:
886 arg_idx = 0;
887 type = 0;
888 break;
889 case BUILT_IN_STRCPY:
890 case BUILT_IN_STRNCPY:
891 arg_idx = 1;
892 type = 0;
893 break;
894 case BUILT_IN_MEMCPY_CHK:
895 case BUILT_IN_MEMPCPY_CHK:
896 case BUILT_IN_MEMMOVE_CHK:
897 case BUILT_IN_MEMSET_CHK:
898 case BUILT_IN_STRNCPY_CHK:
899 case BUILT_IN_STPNCPY_CHK:
900 arg_idx = 2;
901 type = 2;
902 break;
903 case BUILT_IN_STRCPY_CHK:
904 case BUILT_IN_STPCPY_CHK:
905 arg_idx = 1;
906 type = 1;
907 break;
908 case BUILT_IN_SNPRINTF_CHK:
909 case BUILT_IN_VSNPRINTF_CHK:
910 arg_idx = 1;
911 type = 2;
912 break;
913 default:
914 return NULL_TREE;
915 }
916
917 if (arg_idx >= nargs)
918 return NULL_TREE;
919
920 /* Try to use the dataflow information gathered by the CCP process. */
921 visited = BITMAP_ALLOC (NULL);
922 bitmap_clear (visited);
923
924 memset (val, 0, sizeof (val));
925 a = gimple_call_arg (stmt, arg_idx);
926 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
927 val[arg_idx] = NULL_TREE;
928
929 BITMAP_FREE (visited);
930
931 result = NULL_TREE;
932 switch (DECL_FUNCTION_CODE (callee))
933 {
934 case BUILT_IN_STRLEN:
935 if (val[0] && nargs == 1)
936 {
937 tree new_val =
938 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
939
940 /* If the result is not a valid gimple value, or not a cast
941 of a valid gimple value, then we cannot use the result. */
942 if (is_gimple_val (new_val)
943 || (CONVERT_EXPR_P (new_val)
944 && is_gimple_val (TREE_OPERAND (new_val, 0))))
945 return new_val;
946 }
947 break;
948
949 case BUILT_IN_STRCPY:
950 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
951 result = fold_builtin_strcpy (loc, callee,
952 gimple_call_arg (stmt, 0),
953 gimple_call_arg (stmt, 1),
954 val[1]);
955 break;
956
957 case BUILT_IN_STRNCPY:
958 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
959 result = fold_builtin_strncpy (loc, callee,
960 gimple_call_arg (stmt, 0),
961 gimple_call_arg (stmt, 1),
962 gimple_call_arg (stmt, 2),
963 val[1]);
964 break;
965
966 case BUILT_IN_FPUTS:
967 if (nargs == 2)
968 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
969 gimple_call_arg (stmt, 1),
970 ignore, false, val[0]);
971 break;
972
973 case BUILT_IN_FPUTS_UNLOCKED:
974 if (nargs == 2)
975 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
976 gimple_call_arg (stmt, 1),
977 ignore, true, val[0]);
978 break;
979
980 case BUILT_IN_MEMCPY_CHK:
981 case BUILT_IN_MEMPCPY_CHK:
982 case BUILT_IN_MEMMOVE_CHK:
983 case BUILT_IN_MEMSET_CHK:
984 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
985 result = fold_builtin_memory_chk (loc, callee,
986 gimple_call_arg (stmt, 0),
987 gimple_call_arg (stmt, 1),
988 gimple_call_arg (stmt, 2),
989 gimple_call_arg (stmt, 3),
990 val[2], ignore,
991 DECL_FUNCTION_CODE (callee));
992 break;
993
994 case BUILT_IN_STRCPY_CHK:
995 case BUILT_IN_STPCPY_CHK:
996 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
997 result = fold_builtin_stxcpy_chk (loc, callee,
998 gimple_call_arg (stmt, 0),
999 gimple_call_arg (stmt, 1),
1000 gimple_call_arg (stmt, 2),
1001 val[1], ignore,
1002 DECL_FUNCTION_CODE (callee));
1003 break;
1004
1005 case BUILT_IN_STRNCPY_CHK:
1006 case BUILT_IN_STPNCPY_CHK:
1007 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
1008 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
1009 gimple_call_arg (stmt, 1),
1010 gimple_call_arg (stmt, 2),
1011 gimple_call_arg (stmt, 3),
1012 val[2], ignore,
1013 DECL_FUNCTION_CODE (callee));
1014 break;
1015
1016 case BUILT_IN_SNPRINTF_CHK:
1017 case BUILT_IN_VSNPRINTF_CHK:
1018 if (val[1] && is_gimple_val (val[1]))
1019 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
1020 DECL_FUNCTION_CODE (callee));
1021 break;
1022
1023 default:
1024 gcc_unreachable ();
1025 }
1026
1027 if (result && ignore)
1028 result = fold_ignored_result (result);
1029 return result;
1030 }
1031
1032
1033 /* Return a binfo to be used for devirtualization of calls based on an object
1034 represented by a declaration (i.e. a global or automatically allocated one)
1035 or NULL if it cannot be found or is not safe. CST is expected to be an
1036 ADDR_EXPR of such object or the function will return NULL. Currently it is
1037 safe to use such binfo only if it has no base binfo (i.e. no ancestors)
1038 EXPECTED_TYPE is type of the class virtual belongs to. */
1039
1040 tree
1041 gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type)
1042 {
1043 HOST_WIDE_INT offset, size, max_size;
1044 tree base, type, binfo;
1045 bool last_artificial = false;
1046
1047 if (!flag_devirtualize
1048 || TREE_CODE (cst) != ADDR_EXPR
1049 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1050 return NULL_TREE;
1051
1052 cst = TREE_OPERAND (cst, 0);
1053 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1054 type = TREE_TYPE (base);
1055 if (!DECL_P (base)
1056 || max_size == -1
1057 || max_size != size
1058 || TREE_CODE (type) != RECORD_TYPE)
1059 return NULL_TREE;
1060
1061 /* Find the sub-object the constant actually refers to and mark whether it is
1062 an artificial one (as opposed to a user-defined one). */
1063 while (true)
1064 {
1065 HOST_WIDE_INT pos, size;
1066 tree fld;
1067
1068 if (types_same_for_odr (type, expected_type))
1069 break;
1070 if (offset < 0)
1071 return NULL_TREE;
1072
1073 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1074 {
1075 if (TREE_CODE (fld) != FIELD_DECL)
1076 continue;
1077
1078 pos = int_bit_position (fld);
1079 size = tree_low_cst (DECL_SIZE (fld), 1);
1080 if (pos <= offset && (pos + size) > offset)
1081 break;
1082 }
1083 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1084 return NULL_TREE;
1085
1086 last_artificial = DECL_ARTIFICIAL (fld);
1087 type = TREE_TYPE (fld);
1088 offset -= pos;
1089 }
1090 /* Artificial sub-objects are ancestors, we do not want to use them for
1091 devirtualization, at least not here. */
1092 if (last_artificial)
1093 return NULL_TREE;
1094 binfo = TYPE_BINFO (type);
1095 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1096 return NULL_TREE;
1097 else
1098 return binfo;
1099 }
1100
1101 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1102 The statement may be replaced by another statement, e.g., if the call
1103 simplifies to a constant value. Return true if any changes were made.
1104 It is assumed that the operands have been previously folded. */
1105
1106 static bool
1107 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1108 {
1109 gimple stmt = gsi_stmt (*gsi);
1110 tree callee;
1111 bool changed = false;
1112 unsigned i;
1113
1114 /* Fold *& in call arguments. */
1115 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1116 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1117 {
1118 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1119 if (tmp)
1120 {
1121 gimple_call_set_arg (stmt, i, tmp);
1122 changed = true;
1123 }
1124 }
1125
1126 /* Check for virtual calls that became direct calls. */
1127 callee = gimple_call_fn (stmt);
1128 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1129 {
1130 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1131 {
1132 if (dump_file && virtual_method_call_p (callee)
1133 && !possible_polymorphic_call_target_p
1134 (callee, cgraph_get_node (gimple_call_addr_fndecl
1135 (OBJ_TYPE_REF_EXPR (callee)))))
1136 {
1137 fprintf (dump_file,
1138 "Type inheritnace inconsistent devirtualization of ");
1139 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1140 fprintf (dump_file, " to ");
1141 print_generic_expr (dump_file, callee, TDF_SLIM);
1142 fprintf (dump_file, "\n");
1143 }
1144
1145 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1146 changed = true;
1147 }
1148 else if (virtual_method_call_p (callee))
1149 {
1150 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1151 tree binfo = gimple_extract_devirt_binfo_from_cst
1152 (obj, obj_type_ref_class (callee));
1153 if (binfo)
1154 {
1155 HOST_WIDE_INT token
1156 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1157 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1158 if (fndecl)
1159 {
1160 #ifdef ENABLE_CHECKING
1161 gcc_assert (possible_polymorphic_call_target_p
1162 (callee, cgraph_get_node (fndecl)));
1163
1164 #endif
1165 gimple_call_set_fndecl (stmt, fndecl);
1166 changed = true;
1167 }
1168 }
1169 }
1170 }
1171
1172 if (inplace)
1173 return changed;
1174
1175 /* Check for builtins that CCP can handle using information not
1176 available in the generic fold routines. */
1177 callee = gimple_call_fndecl (stmt);
1178 if (callee && DECL_BUILT_IN (callee))
1179 {
1180 tree result = gimple_fold_builtin (stmt);
1181 if (result)
1182 {
1183 if (!update_call_from_tree (gsi, result))
1184 gimplify_and_update_call_from_tree (gsi, result);
1185 changed = true;
1186 }
1187 else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
1188 changed |= targetm.gimple_fold_builtin (gsi);
1189 }
1190
1191 return changed;
1192 }
1193
1194 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1195 distinguishes both cases. */
1196
1197 static bool
1198 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1199 {
1200 bool changed = false;
1201 gimple stmt = gsi_stmt (*gsi);
1202 unsigned i;
1203
1204 /* Fold the main computation performed by the statement. */
1205 switch (gimple_code (stmt))
1206 {
1207 case GIMPLE_ASSIGN:
1208 {
1209 unsigned old_num_ops = gimple_num_ops (stmt);
1210 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1211 tree lhs = gimple_assign_lhs (stmt);
1212 tree new_rhs;
1213 /* First canonicalize operand order. This avoids building new
1214 trees if this is the only thing fold would later do. */
1215 if ((commutative_tree_code (subcode)
1216 || commutative_ternary_tree_code (subcode))
1217 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1218 gimple_assign_rhs2 (stmt), false))
1219 {
1220 tree tem = gimple_assign_rhs1 (stmt);
1221 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1222 gimple_assign_set_rhs2 (stmt, tem);
1223 changed = true;
1224 }
1225 new_rhs = fold_gimple_assign (gsi);
1226 if (new_rhs
1227 && !useless_type_conversion_p (TREE_TYPE (lhs),
1228 TREE_TYPE (new_rhs)))
1229 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1230 if (new_rhs
1231 && (!inplace
1232 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1233 {
1234 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1235 changed = true;
1236 }
1237 break;
1238 }
1239
1240 case GIMPLE_COND:
1241 changed |= fold_gimple_cond (stmt);
1242 break;
1243
1244 case GIMPLE_CALL:
1245 changed |= gimple_fold_call (gsi, inplace);
1246 break;
1247
1248 case GIMPLE_ASM:
1249 /* Fold *& in asm operands. */
1250 {
1251 size_t noutputs;
1252 const char **oconstraints;
1253 const char *constraint;
1254 bool allows_mem, allows_reg;
1255
1256 noutputs = gimple_asm_noutputs (stmt);
1257 oconstraints = XALLOCAVEC (const char *, noutputs);
1258
1259 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1260 {
1261 tree link = gimple_asm_output_op (stmt, i);
1262 tree op = TREE_VALUE (link);
1263 oconstraints[i]
1264 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1265 if (REFERENCE_CLASS_P (op)
1266 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1267 {
1268 TREE_VALUE (link) = op;
1269 changed = true;
1270 }
1271 }
1272 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1273 {
1274 tree link = gimple_asm_input_op (stmt, i);
1275 tree op = TREE_VALUE (link);
1276 constraint
1277 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1278 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1279 oconstraints, &allows_mem, &allows_reg);
1280 if (REFERENCE_CLASS_P (op)
1281 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1282 != NULL_TREE)
1283 {
1284 TREE_VALUE (link) = op;
1285 changed = true;
1286 }
1287 }
1288 }
1289 break;
1290
1291 case GIMPLE_DEBUG:
1292 if (gimple_debug_bind_p (stmt))
1293 {
1294 tree val = gimple_debug_bind_get_value (stmt);
1295 if (val
1296 && REFERENCE_CLASS_P (val))
1297 {
1298 tree tem = maybe_fold_reference (val, false);
1299 if (tem)
1300 {
1301 gimple_debug_bind_set_value (stmt, tem);
1302 changed = true;
1303 }
1304 }
1305 else if (val
1306 && TREE_CODE (val) == ADDR_EXPR)
1307 {
1308 tree ref = TREE_OPERAND (val, 0);
1309 tree tem = maybe_fold_reference (ref, false);
1310 if (tem)
1311 {
1312 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1313 gimple_debug_bind_set_value (stmt, tem);
1314 changed = true;
1315 }
1316 }
1317 }
1318 break;
1319
1320 default:;
1321 }
1322
1323 stmt = gsi_stmt (*gsi);
1324
1325 /* Fold *& on the lhs. */
1326 if (gimple_has_lhs (stmt))
1327 {
1328 tree lhs = gimple_get_lhs (stmt);
1329 if (lhs && REFERENCE_CLASS_P (lhs))
1330 {
1331 tree new_lhs = maybe_fold_reference (lhs, true);
1332 if (new_lhs)
1333 {
1334 gimple_set_lhs (stmt, new_lhs);
1335 changed = true;
1336 }
1337 }
1338 }
1339
1340 return changed;
1341 }
1342
1343 /* Fold the statement pointed to by GSI. In some cases, this function may
1344 replace the whole statement with a new one. Returns true iff folding
1345 makes any changes.
1346 The statement pointed to by GSI should be in valid gimple form but may
1347 be in unfolded state as resulting from for example constant propagation
1348 which can produce *&x = 0. */
1349
1350 bool
1351 fold_stmt (gimple_stmt_iterator *gsi)
1352 {
1353 return fold_stmt_1 (gsi, false);
1354 }
1355
1356 /* Perform the minimal folding on statement *GSI. Only operations like
1357 *&x created by constant propagation are handled. The statement cannot
1358 be replaced with a new one. Return true if the statement was
1359 changed, false otherwise.
1360 The statement *GSI should be in valid gimple form but may
1361 be in unfolded state as resulting from for example constant propagation
1362 which can produce *&x = 0. */
1363
1364 bool
1365 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1366 {
1367 gimple stmt = gsi_stmt (*gsi);
1368 bool changed = fold_stmt_1 (gsi, true);
1369 gcc_assert (gsi_stmt (*gsi) == stmt);
1370 return changed;
1371 }
1372
1373 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1374 if EXPR is null or we don't know how.
1375 If non-null, the result always has boolean type. */
1376
1377 static tree
1378 canonicalize_bool (tree expr, bool invert)
1379 {
1380 if (!expr)
1381 return NULL_TREE;
1382 else if (invert)
1383 {
1384 if (integer_nonzerop (expr))
1385 return boolean_false_node;
1386 else if (integer_zerop (expr))
1387 return boolean_true_node;
1388 else if (TREE_CODE (expr) == SSA_NAME)
1389 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1390 build_int_cst (TREE_TYPE (expr), 0));
1391 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1392 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1393 boolean_type_node,
1394 TREE_OPERAND (expr, 0),
1395 TREE_OPERAND (expr, 1));
1396 else
1397 return NULL_TREE;
1398 }
1399 else
1400 {
1401 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1402 return expr;
1403 if (integer_nonzerop (expr))
1404 return boolean_true_node;
1405 else if (integer_zerop (expr))
1406 return boolean_false_node;
1407 else if (TREE_CODE (expr) == SSA_NAME)
1408 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1409 build_int_cst (TREE_TYPE (expr), 0));
1410 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1411 return fold_build2 (TREE_CODE (expr),
1412 boolean_type_node,
1413 TREE_OPERAND (expr, 0),
1414 TREE_OPERAND (expr, 1));
1415 else
1416 return NULL_TREE;
1417 }
1418 }
1419
1420 /* Check to see if a boolean expression EXPR is logically equivalent to the
1421 comparison (OP1 CODE OP2). Check for various identities involving
1422 SSA_NAMEs. */
1423
1424 static bool
1425 same_bool_comparison_p (const_tree expr, enum tree_code code,
1426 const_tree op1, const_tree op2)
1427 {
1428 gimple s;
1429
1430 /* The obvious case. */
1431 if (TREE_CODE (expr) == code
1432 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1433 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1434 return true;
1435
1436 /* Check for comparing (name, name != 0) and the case where expr
1437 is an SSA_NAME with a definition matching the comparison. */
1438 if (TREE_CODE (expr) == SSA_NAME
1439 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1440 {
1441 if (operand_equal_p (expr, op1, 0))
1442 return ((code == NE_EXPR && integer_zerop (op2))
1443 || (code == EQ_EXPR && integer_nonzerop (op2)));
1444 s = SSA_NAME_DEF_STMT (expr);
1445 if (is_gimple_assign (s)
1446 && gimple_assign_rhs_code (s) == code
1447 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1448 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1449 return true;
1450 }
1451
1452 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1453 of name is a comparison, recurse. */
1454 if (TREE_CODE (op1) == SSA_NAME
1455 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1456 {
1457 s = SSA_NAME_DEF_STMT (op1);
1458 if (is_gimple_assign (s)
1459 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1460 {
1461 enum tree_code c = gimple_assign_rhs_code (s);
1462 if ((c == NE_EXPR && integer_zerop (op2))
1463 || (c == EQ_EXPR && integer_nonzerop (op2)))
1464 return same_bool_comparison_p (expr, c,
1465 gimple_assign_rhs1 (s),
1466 gimple_assign_rhs2 (s));
1467 if ((c == EQ_EXPR && integer_zerop (op2))
1468 || (c == NE_EXPR && integer_nonzerop (op2)))
1469 return same_bool_comparison_p (expr,
1470 invert_tree_comparison (c, false),
1471 gimple_assign_rhs1 (s),
1472 gimple_assign_rhs2 (s));
1473 }
1474 }
1475 return false;
1476 }
1477
1478 /* Check to see if two boolean expressions OP1 and OP2 are logically
1479 equivalent. */
1480
1481 static bool
1482 same_bool_result_p (const_tree op1, const_tree op2)
1483 {
1484 /* Simple cases first. */
1485 if (operand_equal_p (op1, op2, 0))
1486 return true;
1487
1488 /* Check the cases where at least one of the operands is a comparison.
1489 These are a bit smarter than operand_equal_p in that they apply some
1490 identifies on SSA_NAMEs. */
1491 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1492 && same_bool_comparison_p (op1, TREE_CODE (op2),
1493 TREE_OPERAND (op2, 0),
1494 TREE_OPERAND (op2, 1)))
1495 return true;
1496 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1497 && same_bool_comparison_p (op2, TREE_CODE (op1),
1498 TREE_OPERAND (op1, 0),
1499 TREE_OPERAND (op1, 1)))
1500 return true;
1501
1502 /* Default case. */
1503 return false;
1504 }
1505
1506 /* Forward declarations for some mutually recursive functions. */
1507
1508 static tree
1509 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1510 enum tree_code code2, tree op2a, tree op2b);
1511 static tree
1512 and_var_with_comparison (tree var, bool invert,
1513 enum tree_code code2, tree op2a, tree op2b);
1514 static tree
1515 and_var_with_comparison_1 (gimple stmt,
1516 enum tree_code code2, tree op2a, tree op2b);
1517 static tree
1518 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1519 enum tree_code code2, tree op2a, tree op2b);
1520 static tree
1521 or_var_with_comparison (tree var, bool invert,
1522 enum tree_code code2, tree op2a, tree op2b);
1523 static tree
1524 or_var_with_comparison_1 (gimple stmt,
1525 enum tree_code code2, tree op2a, tree op2b);
1526
1527 /* Helper function for and_comparisons_1: try to simplify the AND of the
1528 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1529 If INVERT is true, invert the value of the VAR before doing the AND.
1530 Return NULL_EXPR if we can't simplify this to a single expression. */
1531
1532 static tree
1533 and_var_with_comparison (tree var, bool invert,
1534 enum tree_code code2, tree op2a, tree op2b)
1535 {
1536 tree t;
1537 gimple stmt = SSA_NAME_DEF_STMT (var);
1538
1539 /* We can only deal with variables whose definitions are assignments. */
1540 if (!is_gimple_assign (stmt))
1541 return NULL_TREE;
1542
1543 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1544 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1545 Then we only have to consider the simpler non-inverted cases. */
1546 if (invert)
1547 t = or_var_with_comparison_1 (stmt,
1548 invert_tree_comparison (code2, false),
1549 op2a, op2b);
1550 else
1551 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1552 return canonicalize_bool (t, invert);
1553 }
1554
1555 /* Try to simplify the AND of the ssa variable defined by the assignment
1556 STMT with the comparison specified by (OP2A CODE2 OP2B).
1557 Return NULL_EXPR if we can't simplify this to a single expression. */
1558
1559 static tree
1560 and_var_with_comparison_1 (gimple stmt,
1561 enum tree_code code2, tree op2a, tree op2b)
1562 {
1563 tree var = gimple_assign_lhs (stmt);
1564 tree true_test_var = NULL_TREE;
1565 tree false_test_var = NULL_TREE;
1566 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1567
1568 /* Check for identities like (var AND (var == 0)) => false. */
1569 if (TREE_CODE (op2a) == SSA_NAME
1570 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1571 {
1572 if ((code2 == NE_EXPR && integer_zerop (op2b))
1573 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1574 {
1575 true_test_var = op2a;
1576 if (var == true_test_var)
1577 return var;
1578 }
1579 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1580 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1581 {
1582 false_test_var = op2a;
1583 if (var == false_test_var)
1584 return boolean_false_node;
1585 }
1586 }
1587
1588 /* If the definition is a comparison, recurse on it. */
1589 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1590 {
1591 tree t = and_comparisons_1 (innercode,
1592 gimple_assign_rhs1 (stmt),
1593 gimple_assign_rhs2 (stmt),
1594 code2,
1595 op2a,
1596 op2b);
1597 if (t)
1598 return t;
1599 }
1600
1601 /* If the definition is an AND or OR expression, we may be able to
1602 simplify by reassociating. */
1603 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1604 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1605 {
1606 tree inner1 = gimple_assign_rhs1 (stmt);
1607 tree inner2 = gimple_assign_rhs2 (stmt);
1608 gimple s;
1609 tree t;
1610 tree partial = NULL_TREE;
1611 bool is_and = (innercode == BIT_AND_EXPR);
1612
1613 /* Check for boolean identities that don't require recursive examination
1614 of inner1/inner2:
1615 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1616 inner1 AND (inner1 OR inner2) => inner1
1617 !inner1 AND (inner1 AND inner2) => false
1618 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1619 Likewise for similar cases involving inner2. */
1620 if (inner1 == true_test_var)
1621 return (is_and ? var : inner1);
1622 else if (inner2 == true_test_var)
1623 return (is_and ? var : inner2);
1624 else if (inner1 == false_test_var)
1625 return (is_and
1626 ? boolean_false_node
1627 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1628 else if (inner2 == false_test_var)
1629 return (is_and
1630 ? boolean_false_node
1631 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1632
1633 /* Next, redistribute/reassociate the AND across the inner tests.
1634 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1635 if (TREE_CODE (inner1) == SSA_NAME
1636 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1637 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1638 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1639 gimple_assign_rhs1 (s),
1640 gimple_assign_rhs2 (s),
1641 code2, op2a, op2b)))
1642 {
1643 /* Handle the AND case, where we are reassociating:
1644 (inner1 AND inner2) AND (op2a code2 op2b)
1645 => (t AND inner2)
1646 If the partial result t is a constant, we win. Otherwise
1647 continue on to try reassociating with the other inner test. */
1648 if (is_and)
1649 {
1650 if (integer_onep (t))
1651 return inner2;
1652 else if (integer_zerop (t))
1653 return boolean_false_node;
1654 }
1655
1656 /* Handle the OR case, where we are redistributing:
1657 (inner1 OR inner2) AND (op2a code2 op2b)
1658 => (t OR (inner2 AND (op2a code2 op2b))) */
1659 else if (integer_onep (t))
1660 return boolean_true_node;
1661
1662 /* Save partial result for later. */
1663 partial = t;
1664 }
1665
1666 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1667 if (TREE_CODE (inner2) == SSA_NAME
1668 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1669 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1670 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1671 gimple_assign_rhs1 (s),
1672 gimple_assign_rhs2 (s),
1673 code2, op2a, op2b)))
1674 {
1675 /* Handle the AND case, where we are reassociating:
1676 (inner1 AND inner2) AND (op2a code2 op2b)
1677 => (inner1 AND t) */
1678 if (is_and)
1679 {
1680 if (integer_onep (t))
1681 return inner1;
1682 else if (integer_zerop (t))
1683 return boolean_false_node;
1684 /* If both are the same, we can apply the identity
1685 (x AND x) == x. */
1686 else if (partial && same_bool_result_p (t, partial))
1687 return t;
1688 }
1689
1690 /* Handle the OR case. where we are redistributing:
1691 (inner1 OR inner2) AND (op2a code2 op2b)
1692 => (t OR (inner1 AND (op2a code2 op2b)))
1693 => (t OR partial) */
1694 else
1695 {
1696 if (integer_onep (t))
1697 return boolean_true_node;
1698 else if (partial)
1699 {
1700 /* We already got a simplification for the other
1701 operand to the redistributed OR expression. The
1702 interesting case is when at least one is false.
1703 Or, if both are the same, we can apply the identity
1704 (x OR x) == x. */
1705 if (integer_zerop (partial))
1706 return t;
1707 else if (integer_zerop (t))
1708 return partial;
1709 else if (same_bool_result_p (t, partial))
1710 return t;
1711 }
1712 }
1713 }
1714 }
1715 return NULL_TREE;
1716 }
1717
1718 /* Try to simplify the AND of two comparisons defined by
1719 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1720 If this can be done without constructing an intermediate value,
1721 return the resulting tree; otherwise NULL_TREE is returned.
1722 This function is deliberately asymmetric as it recurses on SSA_DEFs
1723 in the first comparison but not the second. */
1724
1725 static tree
1726 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1727 enum tree_code code2, tree op2a, tree op2b)
1728 {
1729 tree truth_type = truth_type_for (TREE_TYPE (op1a));
1730
1731 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1732 if (operand_equal_p (op1a, op2a, 0)
1733 && operand_equal_p (op1b, op2b, 0))
1734 {
1735 /* Result will be either NULL_TREE, or a combined comparison. */
1736 tree t = combine_comparisons (UNKNOWN_LOCATION,
1737 TRUTH_ANDIF_EXPR, code1, code2,
1738 truth_type, op1a, op1b);
1739 if (t)
1740 return t;
1741 }
1742
1743 /* Likewise the swapped case of the above. */
1744 if (operand_equal_p (op1a, op2b, 0)
1745 && operand_equal_p (op1b, op2a, 0))
1746 {
1747 /* Result will be either NULL_TREE, or a combined comparison. */
1748 tree t = combine_comparisons (UNKNOWN_LOCATION,
1749 TRUTH_ANDIF_EXPR, code1,
1750 swap_tree_comparison (code2),
1751 truth_type, op1a, op1b);
1752 if (t)
1753 return t;
1754 }
1755
1756 /* If both comparisons are of the same value against constants, we might
1757 be able to merge them. */
1758 if (operand_equal_p (op1a, op2a, 0)
1759 && TREE_CODE (op1b) == INTEGER_CST
1760 && TREE_CODE (op2b) == INTEGER_CST)
1761 {
1762 int cmp = tree_int_cst_compare (op1b, op2b);
1763
1764 /* If we have (op1a == op1b), we should either be able to
1765 return that or FALSE, depending on whether the constant op1b
1766 also satisfies the other comparison against op2b. */
1767 if (code1 == EQ_EXPR)
1768 {
1769 bool done = true;
1770 bool val;
1771 switch (code2)
1772 {
1773 case EQ_EXPR: val = (cmp == 0); break;
1774 case NE_EXPR: val = (cmp != 0); break;
1775 case LT_EXPR: val = (cmp < 0); break;
1776 case GT_EXPR: val = (cmp > 0); break;
1777 case LE_EXPR: val = (cmp <= 0); break;
1778 case GE_EXPR: val = (cmp >= 0); break;
1779 default: done = false;
1780 }
1781 if (done)
1782 {
1783 if (val)
1784 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1785 else
1786 return boolean_false_node;
1787 }
1788 }
1789 /* Likewise if the second comparison is an == comparison. */
1790 else if (code2 == EQ_EXPR)
1791 {
1792 bool done = true;
1793 bool val;
1794 switch (code1)
1795 {
1796 case EQ_EXPR: val = (cmp == 0); break;
1797 case NE_EXPR: val = (cmp != 0); break;
1798 case LT_EXPR: val = (cmp > 0); break;
1799 case GT_EXPR: val = (cmp < 0); break;
1800 case LE_EXPR: val = (cmp >= 0); break;
1801 case GE_EXPR: val = (cmp <= 0); break;
1802 default: done = false;
1803 }
1804 if (done)
1805 {
1806 if (val)
1807 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1808 else
1809 return boolean_false_node;
1810 }
1811 }
1812
1813 /* Same business with inequality tests. */
1814 else if (code1 == NE_EXPR)
1815 {
1816 bool val;
1817 switch (code2)
1818 {
1819 case EQ_EXPR: val = (cmp != 0); break;
1820 case NE_EXPR: val = (cmp == 0); break;
1821 case LT_EXPR: val = (cmp >= 0); break;
1822 case GT_EXPR: val = (cmp <= 0); break;
1823 case LE_EXPR: val = (cmp > 0); break;
1824 case GE_EXPR: val = (cmp < 0); break;
1825 default:
1826 val = false;
1827 }
1828 if (val)
1829 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1830 }
1831 else if (code2 == NE_EXPR)
1832 {
1833 bool val;
1834 switch (code1)
1835 {
1836 case EQ_EXPR: val = (cmp == 0); break;
1837 case NE_EXPR: val = (cmp != 0); break;
1838 case LT_EXPR: val = (cmp <= 0); break;
1839 case GT_EXPR: val = (cmp >= 0); break;
1840 case LE_EXPR: val = (cmp < 0); break;
1841 case GE_EXPR: val = (cmp > 0); break;
1842 default:
1843 val = false;
1844 }
1845 if (val)
1846 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1847 }
1848
1849 /* Chose the more restrictive of two < or <= comparisons. */
1850 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1851 && (code2 == LT_EXPR || code2 == LE_EXPR))
1852 {
1853 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1854 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1855 else
1856 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1857 }
1858
1859 /* Likewise chose the more restrictive of two > or >= comparisons. */
1860 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1861 && (code2 == GT_EXPR || code2 == GE_EXPR))
1862 {
1863 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1864 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1865 else
1866 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1867 }
1868
1869 /* Check for singleton ranges. */
1870 else if (cmp == 0
1871 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1872 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1873 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1874
1875 /* Check for disjoint ranges. */
1876 else if (cmp <= 0
1877 && (code1 == LT_EXPR || code1 == LE_EXPR)
1878 && (code2 == GT_EXPR || code2 == GE_EXPR))
1879 return boolean_false_node;
1880 else if (cmp >= 0
1881 && (code1 == GT_EXPR || code1 == GE_EXPR)
1882 && (code2 == LT_EXPR || code2 == LE_EXPR))
1883 return boolean_false_node;
1884 }
1885
1886 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1887 NAME's definition is a truth value. See if there are any simplifications
1888 that can be done against the NAME's definition. */
1889 if (TREE_CODE (op1a) == SSA_NAME
1890 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1891 && (integer_zerop (op1b) || integer_onep (op1b)))
1892 {
1893 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1894 || (code1 == NE_EXPR && integer_onep (op1b)));
1895 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1896 switch (gimple_code (stmt))
1897 {
1898 case GIMPLE_ASSIGN:
1899 /* Try to simplify by copy-propagating the definition. */
1900 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1901
1902 case GIMPLE_PHI:
1903 /* If every argument to the PHI produces the same result when
1904 ANDed with the second comparison, we win.
1905 Do not do this unless the type is bool since we need a bool
1906 result here anyway. */
1907 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1908 {
1909 tree result = NULL_TREE;
1910 unsigned i;
1911 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1912 {
1913 tree arg = gimple_phi_arg_def (stmt, i);
1914
1915 /* If this PHI has itself as an argument, ignore it.
1916 If all the other args produce the same result,
1917 we're still OK. */
1918 if (arg == gimple_phi_result (stmt))
1919 continue;
1920 else if (TREE_CODE (arg) == INTEGER_CST)
1921 {
1922 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1923 {
1924 if (!result)
1925 result = boolean_false_node;
1926 else if (!integer_zerop (result))
1927 return NULL_TREE;
1928 }
1929 else if (!result)
1930 result = fold_build2 (code2, boolean_type_node,
1931 op2a, op2b);
1932 else if (!same_bool_comparison_p (result,
1933 code2, op2a, op2b))
1934 return NULL_TREE;
1935 }
1936 else if (TREE_CODE (arg) == SSA_NAME
1937 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1938 {
1939 tree temp;
1940 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1941 /* In simple cases we can look through PHI nodes,
1942 but we have to be careful with loops.
1943 See PR49073. */
1944 if (! dom_info_available_p (CDI_DOMINATORS)
1945 || gimple_bb (def_stmt) == gimple_bb (stmt)
1946 || dominated_by_p (CDI_DOMINATORS,
1947 gimple_bb (def_stmt),
1948 gimple_bb (stmt)))
1949 return NULL_TREE;
1950 temp = and_var_with_comparison (arg, invert, code2,
1951 op2a, op2b);
1952 if (!temp)
1953 return NULL_TREE;
1954 else if (!result)
1955 result = temp;
1956 else if (!same_bool_result_p (result, temp))
1957 return NULL_TREE;
1958 }
1959 else
1960 return NULL_TREE;
1961 }
1962 return result;
1963 }
1964
1965 default:
1966 break;
1967 }
1968 }
1969 return NULL_TREE;
1970 }
1971
1972 /* Try to simplify the AND of two comparisons, specified by
1973 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1974 If this can be simplified to a single expression (without requiring
1975 introducing more SSA variables to hold intermediate values),
1976 return the resulting tree. Otherwise return NULL_TREE.
1977 If the result expression is non-null, it has boolean type. */
1978
1979 tree
1980 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1981 enum tree_code code2, tree op2a, tree op2b)
1982 {
1983 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1984 if (t)
1985 return t;
1986 else
1987 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1988 }
1989
1990 /* Helper function for or_comparisons_1: try to simplify the OR of the
1991 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1992 If INVERT is true, invert the value of VAR before doing the OR.
1993 Return NULL_EXPR if we can't simplify this to a single expression. */
1994
1995 static tree
1996 or_var_with_comparison (tree var, bool invert,
1997 enum tree_code code2, tree op2a, tree op2b)
1998 {
1999 tree t;
2000 gimple stmt = SSA_NAME_DEF_STMT (var);
2001
2002 /* We can only deal with variables whose definitions are assignments. */
2003 if (!is_gimple_assign (stmt))
2004 return NULL_TREE;
2005
2006 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2007 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
2008 Then we only have to consider the simpler non-inverted cases. */
2009 if (invert)
2010 t = and_var_with_comparison_1 (stmt,
2011 invert_tree_comparison (code2, false),
2012 op2a, op2b);
2013 else
2014 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
2015 return canonicalize_bool (t, invert);
2016 }
2017
2018 /* Try to simplify the OR of the ssa variable defined by the assignment
2019 STMT with the comparison specified by (OP2A CODE2 OP2B).
2020 Return NULL_EXPR if we can't simplify this to a single expression. */
2021
2022 static tree
2023 or_var_with_comparison_1 (gimple stmt,
2024 enum tree_code code2, tree op2a, tree op2b)
2025 {
2026 tree var = gimple_assign_lhs (stmt);
2027 tree true_test_var = NULL_TREE;
2028 tree false_test_var = NULL_TREE;
2029 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2030
2031 /* Check for identities like (var OR (var != 0)) => true . */
2032 if (TREE_CODE (op2a) == SSA_NAME
2033 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2034 {
2035 if ((code2 == NE_EXPR && integer_zerop (op2b))
2036 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2037 {
2038 true_test_var = op2a;
2039 if (var == true_test_var)
2040 return var;
2041 }
2042 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2043 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2044 {
2045 false_test_var = op2a;
2046 if (var == false_test_var)
2047 return boolean_true_node;
2048 }
2049 }
2050
2051 /* If the definition is a comparison, recurse on it. */
2052 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2053 {
2054 tree t = or_comparisons_1 (innercode,
2055 gimple_assign_rhs1 (stmt),
2056 gimple_assign_rhs2 (stmt),
2057 code2,
2058 op2a,
2059 op2b);
2060 if (t)
2061 return t;
2062 }
2063
2064 /* If the definition is an AND or OR expression, we may be able to
2065 simplify by reassociating. */
2066 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2067 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2068 {
2069 tree inner1 = gimple_assign_rhs1 (stmt);
2070 tree inner2 = gimple_assign_rhs2 (stmt);
2071 gimple s;
2072 tree t;
2073 tree partial = NULL_TREE;
2074 bool is_or = (innercode == BIT_IOR_EXPR);
2075
2076 /* Check for boolean identities that don't require recursive examination
2077 of inner1/inner2:
2078 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2079 inner1 OR (inner1 AND inner2) => inner1
2080 !inner1 OR (inner1 OR inner2) => true
2081 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2082 */
2083 if (inner1 == true_test_var)
2084 return (is_or ? var : inner1);
2085 else if (inner2 == true_test_var)
2086 return (is_or ? var : inner2);
2087 else if (inner1 == false_test_var)
2088 return (is_or
2089 ? boolean_true_node
2090 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2091 else if (inner2 == false_test_var)
2092 return (is_or
2093 ? boolean_true_node
2094 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2095
2096 /* Next, redistribute/reassociate the OR across the inner tests.
2097 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2098 if (TREE_CODE (inner1) == SSA_NAME
2099 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2100 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2101 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2102 gimple_assign_rhs1 (s),
2103 gimple_assign_rhs2 (s),
2104 code2, op2a, op2b)))
2105 {
2106 /* Handle the OR case, where we are reassociating:
2107 (inner1 OR inner2) OR (op2a code2 op2b)
2108 => (t OR inner2)
2109 If the partial result t is a constant, we win. Otherwise
2110 continue on to try reassociating with the other inner test. */
2111 if (is_or)
2112 {
2113 if (integer_onep (t))
2114 return boolean_true_node;
2115 else if (integer_zerop (t))
2116 return inner2;
2117 }
2118
2119 /* Handle the AND case, where we are redistributing:
2120 (inner1 AND inner2) OR (op2a code2 op2b)
2121 => (t AND (inner2 OR (op2a code op2b))) */
2122 else if (integer_zerop (t))
2123 return boolean_false_node;
2124
2125 /* Save partial result for later. */
2126 partial = t;
2127 }
2128
2129 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2130 if (TREE_CODE (inner2) == SSA_NAME
2131 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2132 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2133 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2134 gimple_assign_rhs1 (s),
2135 gimple_assign_rhs2 (s),
2136 code2, op2a, op2b)))
2137 {
2138 /* Handle the OR case, where we are reassociating:
2139 (inner1 OR inner2) OR (op2a code2 op2b)
2140 => (inner1 OR t)
2141 => (t OR partial) */
2142 if (is_or)
2143 {
2144 if (integer_zerop (t))
2145 return inner1;
2146 else if (integer_onep (t))
2147 return boolean_true_node;
2148 /* If both are the same, we can apply the identity
2149 (x OR x) == x. */
2150 else if (partial && same_bool_result_p (t, partial))
2151 return t;
2152 }
2153
2154 /* Handle the AND case, where we are redistributing:
2155 (inner1 AND inner2) OR (op2a code2 op2b)
2156 => (t AND (inner1 OR (op2a code2 op2b)))
2157 => (t AND partial) */
2158 else
2159 {
2160 if (integer_zerop (t))
2161 return boolean_false_node;
2162 else if (partial)
2163 {
2164 /* We already got a simplification for the other
2165 operand to the redistributed AND expression. The
2166 interesting case is when at least one is true.
2167 Or, if both are the same, we can apply the identity
2168 (x AND x) == x. */
2169 if (integer_onep (partial))
2170 return t;
2171 else if (integer_onep (t))
2172 return partial;
2173 else if (same_bool_result_p (t, partial))
2174 return t;
2175 }
2176 }
2177 }
2178 }
2179 return NULL_TREE;
2180 }
2181
2182 /* Try to simplify the OR of two comparisons defined by
2183 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2184 If this can be done without constructing an intermediate value,
2185 return the resulting tree; otherwise NULL_TREE is returned.
2186 This function is deliberately asymmetric as it recurses on SSA_DEFs
2187 in the first comparison but not the second. */
2188
2189 static tree
2190 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2191 enum tree_code code2, tree op2a, tree op2b)
2192 {
2193 tree truth_type = truth_type_for (TREE_TYPE (op1a));
2194
2195 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2196 if (operand_equal_p (op1a, op2a, 0)
2197 && operand_equal_p (op1b, op2b, 0))
2198 {
2199 /* Result will be either NULL_TREE, or a combined comparison. */
2200 tree t = combine_comparisons (UNKNOWN_LOCATION,
2201 TRUTH_ORIF_EXPR, code1, code2,
2202 truth_type, op1a, op1b);
2203 if (t)
2204 return t;
2205 }
2206
2207 /* Likewise the swapped case of the above. */
2208 if (operand_equal_p (op1a, op2b, 0)
2209 && operand_equal_p (op1b, op2a, 0))
2210 {
2211 /* Result will be either NULL_TREE, or a combined comparison. */
2212 tree t = combine_comparisons (UNKNOWN_LOCATION,
2213 TRUTH_ORIF_EXPR, code1,
2214 swap_tree_comparison (code2),
2215 truth_type, op1a, op1b);
2216 if (t)
2217 return t;
2218 }
2219
2220 /* If both comparisons are of the same value against constants, we might
2221 be able to merge them. */
2222 if (operand_equal_p (op1a, op2a, 0)
2223 && TREE_CODE (op1b) == INTEGER_CST
2224 && TREE_CODE (op2b) == INTEGER_CST)
2225 {
2226 int cmp = tree_int_cst_compare (op1b, op2b);
2227
2228 /* If we have (op1a != op1b), we should either be able to
2229 return that or TRUE, depending on whether the constant op1b
2230 also satisfies the other comparison against op2b. */
2231 if (code1 == NE_EXPR)
2232 {
2233 bool done = true;
2234 bool val;
2235 switch (code2)
2236 {
2237 case EQ_EXPR: val = (cmp == 0); break;
2238 case NE_EXPR: val = (cmp != 0); break;
2239 case LT_EXPR: val = (cmp < 0); break;
2240 case GT_EXPR: val = (cmp > 0); break;
2241 case LE_EXPR: val = (cmp <= 0); break;
2242 case GE_EXPR: val = (cmp >= 0); break;
2243 default: done = false;
2244 }
2245 if (done)
2246 {
2247 if (val)
2248 return boolean_true_node;
2249 else
2250 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2251 }
2252 }
2253 /* Likewise if the second comparison is a != comparison. */
2254 else if (code2 == NE_EXPR)
2255 {
2256 bool done = true;
2257 bool val;
2258 switch (code1)
2259 {
2260 case EQ_EXPR: val = (cmp == 0); break;
2261 case NE_EXPR: val = (cmp != 0); break;
2262 case LT_EXPR: val = (cmp > 0); break;
2263 case GT_EXPR: val = (cmp < 0); break;
2264 case LE_EXPR: val = (cmp >= 0); break;
2265 case GE_EXPR: val = (cmp <= 0); break;
2266 default: done = false;
2267 }
2268 if (done)
2269 {
2270 if (val)
2271 return boolean_true_node;
2272 else
2273 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2274 }
2275 }
2276
2277 /* See if an equality test is redundant with the other comparison. */
2278 else if (code1 == EQ_EXPR)
2279 {
2280 bool val;
2281 switch (code2)
2282 {
2283 case EQ_EXPR: val = (cmp == 0); break;
2284 case NE_EXPR: val = (cmp != 0); break;
2285 case LT_EXPR: val = (cmp < 0); break;
2286 case GT_EXPR: val = (cmp > 0); break;
2287 case LE_EXPR: val = (cmp <= 0); break;
2288 case GE_EXPR: val = (cmp >= 0); break;
2289 default:
2290 val = false;
2291 }
2292 if (val)
2293 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2294 }
2295 else if (code2 == EQ_EXPR)
2296 {
2297 bool val;
2298 switch (code1)
2299 {
2300 case EQ_EXPR: val = (cmp == 0); break;
2301 case NE_EXPR: val = (cmp != 0); break;
2302 case LT_EXPR: val = (cmp > 0); break;
2303 case GT_EXPR: val = (cmp < 0); break;
2304 case LE_EXPR: val = (cmp >= 0); break;
2305 case GE_EXPR: val = (cmp <= 0); break;
2306 default:
2307 val = false;
2308 }
2309 if (val)
2310 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2311 }
2312
2313 /* Chose the less restrictive of two < or <= comparisons. */
2314 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2315 && (code2 == LT_EXPR || code2 == LE_EXPR))
2316 {
2317 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2318 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2319 else
2320 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2321 }
2322
2323 /* Likewise chose the less restrictive of two > or >= comparisons. */
2324 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2325 && (code2 == GT_EXPR || code2 == GE_EXPR))
2326 {
2327 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2328 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2329 else
2330 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2331 }
2332
2333 /* Check for singleton ranges. */
2334 else if (cmp == 0
2335 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2336 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2337 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2338
2339 /* Check for less/greater pairs that don't restrict the range at all. */
2340 else if (cmp >= 0
2341 && (code1 == LT_EXPR || code1 == LE_EXPR)
2342 && (code2 == GT_EXPR || code2 == GE_EXPR))
2343 return boolean_true_node;
2344 else if (cmp <= 0
2345 && (code1 == GT_EXPR || code1 == GE_EXPR)
2346 && (code2 == LT_EXPR || code2 == LE_EXPR))
2347 return boolean_true_node;
2348 }
2349
2350 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2351 NAME's definition is a truth value. See if there are any simplifications
2352 that can be done against the NAME's definition. */
2353 if (TREE_CODE (op1a) == SSA_NAME
2354 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2355 && (integer_zerop (op1b) || integer_onep (op1b)))
2356 {
2357 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2358 || (code1 == NE_EXPR && integer_onep (op1b)));
2359 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2360 switch (gimple_code (stmt))
2361 {
2362 case GIMPLE_ASSIGN:
2363 /* Try to simplify by copy-propagating the definition. */
2364 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2365
2366 case GIMPLE_PHI:
2367 /* If every argument to the PHI produces the same result when
2368 ORed with the second comparison, we win.
2369 Do not do this unless the type is bool since we need a bool
2370 result here anyway. */
2371 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2372 {
2373 tree result = NULL_TREE;
2374 unsigned i;
2375 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2376 {
2377 tree arg = gimple_phi_arg_def (stmt, i);
2378
2379 /* If this PHI has itself as an argument, ignore it.
2380 If all the other args produce the same result,
2381 we're still OK. */
2382 if (arg == gimple_phi_result (stmt))
2383 continue;
2384 else if (TREE_CODE (arg) == INTEGER_CST)
2385 {
2386 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2387 {
2388 if (!result)
2389 result = boolean_true_node;
2390 else if (!integer_onep (result))
2391 return NULL_TREE;
2392 }
2393 else if (!result)
2394 result = fold_build2 (code2, boolean_type_node,
2395 op2a, op2b);
2396 else if (!same_bool_comparison_p (result,
2397 code2, op2a, op2b))
2398 return NULL_TREE;
2399 }
2400 else if (TREE_CODE (arg) == SSA_NAME
2401 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2402 {
2403 tree temp;
2404 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2405 /* In simple cases we can look through PHI nodes,
2406 but we have to be careful with loops.
2407 See PR49073. */
2408 if (! dom_info_available_p (CDI_DOMINATORS)
2409 || gimple_bb (def_stmt) == gimple_bb (stmt)
2410 || dominated_by_p (CDI_DOMINATORS,
2411 gimple_bb (def_stmt),
2412 gimple_bb (stmt)))
2413 return NULL_TREE;
2414 temp = or_var_with_comparison (arg, invert, code2,
2415 op2a, op2b);
2416 if (!temp)
2417 return NULL_TREE;
2418 else if (!result)
2419 result = temp;
2420 else if (!same_bool_result_p (result, temp))
2421 return NULL_TREE;
2422 }
2423 else
2424 return NULL_TREE;
2425 }
2426 return result;
2427 }
2428
2429 default:
2430 break;
2431 }
2432 }
2433 return NULL_TREE;
2434 }
2435
2436 /* Try to simplify the OR of two comparisons, specified by
2437 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2438 If this can be simplified to a single expression (without requiring
2439 introducing more SSA variables to hold intermediate values),
2440 return the resulting tree. Otherwise return NULL_TREE.
2441 If the result expression is non-null, it has boolean type. */
2442
2443 tree
2444 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2445 enum tree_code code2, tree op2a, tree op2b)
2446 {
2447 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2448 if (t)
2449 return t;
2450 else
2451 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2452 }
2453
2454
2455 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2456
2457 Either NULL_TREE, a simplified but non-constant or a constant
2458 is returned.
2459
2460 ??? This should go into a gimple-fold-inline.h file to be eventually
2461 privatized with the single valueize function used in the various TUs
2462 to avoid the indirect function call overhead. */
2463
2464 tree
2465 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2466 {
2467 location_t loc = gimple_location (stmt);
2468 switch (gimple_code (stmt))
2469 {
2470 case GIMPLE_ASSIGN:
2471 {
2472 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2473
2474 switch (get_gimple_rhs_class (subcode))
2475 {
2476 case GIMPLE_SINGLE_RHS:
2477 {
2478 tree rhs = gimple_assign_rhs1 (stmt);
2479 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2480
2481 if (TREE_CODE (rhs) == SSA_NAME)
2482 {
2483 /* If the RHS is an SSA_NAME, return its known constant value,
2484 if any. */
2485 return (*valueize) (rhs);
2486 }
2487 /* Handle propagating invariant addresses into address
2488 operations. */
2489 else if (TREE_CODE (rhs) == ADDR_EXPR
2490 && !is_gimple_min_invariant (rhs))
2491 {
2492 HOST_WIDE_INT offset = 0;
2493 tree base;
2494 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2495 &offset,
2496 valueize);
2497 if (base
2498 && (CONSTANT_CLASS_P (base)
2499 || decl_address_invariant_p (base)))
2500 return build_invariant_address (TREE_TYPE (rhs),
2501 base, offset);
2502 }
2503 else if (TREE_CODE (rhs) == CONSTRUCTOR
2504 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2505 && (CONSTRUCTOR_NELTS (rhs)
2506 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2507 {
2508 unsigned i;
2509 tree val, *vec;
2510
2511 vec = XALLOCAVEC (tree,
2512 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
2513 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2514 {
2515 val = (*valueize) (val);
2516 if (TREE_CODE (val) == INTEGER_CST
2517 || TREE_CODE (val) == REAL_CST
2518 || TREE_CODE (val) == FIXED_CST)
2519 vec[i] = val;
2520 else
2521 return NULL_TREE;
2522 }
2523
2524 return build_vector (TREE_TYPE (rhs), vec);
2525 }
2526
2527 if (kind == tcc_reference)
2528 {
2529 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2530 || TREE_CODE (rhs) == REALPART_EXPR
2531 || TREE_CODE (rhs) == IMAGPART_EXPR)
2532 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2533 {
2534 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2535 return fold_unary_loc (EXPR_LOCATION (rhs),
2536 TREE_CODE (rhs),
2537 TREE_TYPE (rhs), val);
2538 }
2539 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2540 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2541 {
2542 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2543 return fold_ternary_loc (EXPR_LOCATION (rhs),
2544 TREE_CODE (rhs),
2545 TREE_TYPE (rhs), val,
2546 TREE_OPERAND (rhs, 1),
2547 TREE_OPERAND (rhs, 2));
2548 }
2549 else if (TREE_CODE (rhs) == MEM_REF
2550 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2551 {
2552 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2553 if (TREE_CODE (val) == ADDR_EXPR
2554 && is_gimple_min_invariant (val))
2555 {
2556 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2557 unshare_expr (val),
2558 TREE_OPERAND (rhs, 1));
2559 if (tem)
2560 rhs = tem;
2561 }
2562 }
2563 return fold_const_aggregate_ref_1 (rhs, valueize);
2564 }
2565 else if (kind == tcc_declaration)
2566 return get_symbol_constant_value (rhs);
2567 return rhs;
2568 }
2569
2570 case GIMPLE_UNARY_RHS:
2571 {
2572 /* Handle unary operators that can appear in GIMPLE form.
2573 Note that we know the single operand must be a constant,
2574 so this should almost always return a simplified RHS. */
2575 tree lhs = gimple_assign_lhs (stmt);
2576 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2577
2578 /* Conversions are useless for CCP purposes if they are
2579 value-preserving. Thus the restrictions that
2580 useless_type_conversion_p places for restrict qualification
2581 of pointer types should not apply here.
2582 Substitution later will only substitute to allowed places. */
2583 if (CONVERT_EXPR_CODE_P (subcode)
2584 && POINTER_TYPE_P (TREE_TYPE (lhs))
2585 && POINTER_TYPE_P (TREE_TYPE (op0))
2586 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2587 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2588 && TYPE_MODE (TREE_TYPE (lhs))
2589 == TYPE_MODE (TREE_TYPE (op0)))
2590 return op0;
2591
2592 return
2593 fold_unary_ignore_overflow_loc (loc, subcode,
2594 gimple_expr_type (stmt), op0);
2595 }
2596
2597 case GIMPLE_BINARY_RHS:
2598 {
2599 /* Handle binary operators that can appear in GIMPLE form. */
2600 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2601 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2602
2603 /* Translate &x + CST into an invariant form suitable for
2604 further propagation. */
2605 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2606 && TREE_CODE (op0) == ADDR_EXPR
2607 && TREE_CODE (op1) == INTEGER_CST)
2608 {
2609 tree off = fold_convert (ptr_type_node, op1);
2610 return build_fold_addr_expr_loc
2611 (loc,
2612 fold_build2 (MEM_REF,
2613 TREE_TYPE (TREE_TYPE (op0)),
2614 unshare_expr (op0), off));
2615 }
2616
2617 return fold_binary_loc (loc, subcode,
2618 gimple_expr_type (stmt), op0, op1);
2619 }
2620
2621 case GIMPLE_TERNARY_RHS:
2622 {
2623 /* Handle ternary operators that can appear in GIMPLE form. */
2624 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2625 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2626 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2627
2628 /* Fold embedded expressions in ternary codes. */
2629 if ((subcode == COND_EXPR
2630 || subcode == VEC_COND_EXPR)
2631 && COMPARISON_CLASS_P (op0))
2632 {
2633 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2634 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2635 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2636 TREE_TYPE (op0), op00, op01);
2637 if (tem)
2638 op0 = tem;
2639 }
2640
2641 return fold_ternary_loc (loc, subcode,
2642 gimple_expr_type (stmt), op0, op1, op2);
2643 }
2644
2645 default:
2646 gcc_unreachable ();
2647 }
2648 }
2649
2650 case GIMPLE_CALL:
2651 {
2652 tree fn;
2653
2654 if (gimple_call_internal_p (stmt))
2655 /* No folding yet for these functions. */
2656 return NULL_TREE;
2657
2658 fn = (*valueize) (gimple_call_fn (stmt));
2659 if (TREE_CODE (fn) == ADDR_EXPR
2660 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2661 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2662 {
2663 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2664 tree call, retval;
2665 unsigned i;
2666 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2667 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2668 call = build_call_array_loc (loc,
2669 gimple_call_return_type (stmt),
2670 fn, gimple_call_num_args (stmt), args);
2671 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2672 if (retval)
2673 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2674 STRIP_NOPS (retval);
2675 return retval;
2676 }
2677 return NULL_TREE;
2678 }
2679
2680 default:
2681 return NULL_TREE;
2682 }
2683 }
2684
2685 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2686 Returns NULL_TREE if folding to a constant is not possible, otherwise
2687 returns a constant according to is_gimple_min_invariant. */
2688
2689 tree
2690 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2691 {
2692 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2693 if (res && is_gimple_min_invariant (res))
2694 return res;
2695 return NULL_TREE;
2696 }
2697
2698
2699 /* The following set of functions are supposed to fold references using
2700 their constant initializers. */
2701
2702 static tree fold_ctor_reference (tree type, tree ctor,
2703 unsigned HOST_WIDE_INT offset,
2704 unsigned HOST_WIDE_INT size, tree);
2705
2706 /* See if we can find constructor defining value of BASE.
2707 When we know the consructor with constant offset (such as
2708 base is array[40] and we do know constructor of array), then
2709 BIT_OFFSET is adjusted accordingly.
2710
2711 As a special case, return error_mark_node when constructor
2712 is not explicitly available, but it is known to be zero
2713 such as 'static const int a;'. */
2714 static tree
2715 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2716 tree (*valueize)(tree))
2717 {
2718 HOST_WIDE_INT bit_offset2, size, max_size;
2719 if (TREE_CODE (base) == MEM_REF)
2720 {
2721 if (!integer_zerop (TREE_OPERAND (base, 1)))
2722 {
2723 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
2724 return NULL_TREE;
2725 *bit_offset += (mem_ref_offset (base).low
2726 * BITS_PER_UNIT);
2727 }
2728
2729 if (valueize
2730 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2731 base = valueize (TREE_OPERAND (base, 0));
2732 if (!base || TREE_CODE (base) != ADDR_EXPR)
2733 return NULL_TREE;
2734 base = TREE_OPERAND (base, 0);
2735 }
2736
2737 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2738 DECL_INITIAL. If BASE is a nested reference into another
2739 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2740 the inner reference. */
2741 switch (TREE_CODE (base))
2742 {
2743 case VAR_DECL:
2744 case CONST_DECL:
2745 {
2746 tree init = ctor_for_folding (base);
2747
2748 /* Our semantic is exact opposite of ctor_for_folding;
2749 NULL means unknown, while error_mark_node is 0. */
2750 if (init == error_mark_node)
2751 return NULL_TREE;
2752 if (!init)
2753 return error_mark_node;
2754 return init;
2755 }
2756
2757 case ARRAY_REF:
2758 case COMPONENT_REF:
2759 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2760 if (max_size == -1 || size != max_size)
2761 return NULL_TREE;
2762 *bit_offset += bit_offset2;
2763 return get_base_constructor (base, bit_offset, valueize);
2764
2765 case STRING_CST:
2766 case CONSTRUCTOR:
2767 return base;
2768
2769 default:
2770 return NULL_TREE;
2771 }
2772 }
2773
2774 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2775 to the memory at bit OFFSET.
2776
2777 We do only simple job of folding byte accesses. */
2778
2779 static tree
2780 fold_string_cst_ctor_reference (tree type, tree ctor,
2781 unsigned HOST_WIDE_INT offset,
2782 unsigned HOST_WIDE_INT size)
2783 {
2784 if (INTEGRAL_TYPE_P (type)
2785 && (TYPE_MODE (type)
2786 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2787 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2788 == MODE_INT)
2789 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2790 && size == BITS_PER_UNIT
2791 && !(offset % BITS_PER_UNIT))
2792 {
2793 offset /= BITS_PER_UNIT;
2794 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2795 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2796 [offset]));
2797 /* Folding
2798 const char a[20]="hello";
2799 return a[10];
2800
2801 might lead to offset greater than string length. In this case we
2802 know value is either initialized to 0 or out of bounds. Return 0
2803 in both cases. */
2804 return build_zero_cst (type);
2805 }
2806 return NULL_TREE;
2807 }
2808
2809 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2810 SIZE to the memory at bit OFFSET. */
2811
2812 static tree
2813 fold_array_ctor_reference (tree type, tree ctor,
2814 unsigned HOST_WIDE_INT offset,
2815 unsigned HOST_WIDE_INT size,
2816 tree from_decl)
2817 {
2818 unsigned HOST_WIDE_INT cnt;
2819 tree cfield, cval;
2820 double_int low_bound, elt_size;
2821 double_int index, max_index;
2822 double_int access_index;
2823 tree domain_type = NULL_TREE, index_type = NULL_TREE;
2824 HOST_WIDE_INT inner_offset;
2825
2826 /* Compute low bound and elt size. */
2827 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2828 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2829 if (domain_type && TYPE_MIN_VALUE (domain_type))
2830 {
2831 /* Static constructors for variably sized objects makes no sense. */
2832 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2833 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
2834 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2835 }
2836 else
2837 low_bound = double_int_zero;
2838 /* Static constructors for variably sized objects makes no sense. */
2839 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2840 == INTEGER_CST);
2841 elt_size =
2842 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2843
2844
2845 /* We can handle only constantly sized accesses that are known to not
2846 be larger than size of array element. */
2847 if (!TYPE_SIZE_UNIT (type)
2848 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2849 || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
2850 return NULL_TREE;
2851
2852 /* Compute the array index we look for. */
2853 access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
2854 .udiv (elt_size, TRUNC_DIV_EXPR);
2855 access_index += low_bound;
2856 if (index_type)
2857 access_index = access_index.ext (TYPE_PRECISION (index_type),
2858 TYPE_UNSIGNED (index_type));
2859
2860 /* And offset within the access. */
2861 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
2862
2863 /* See if the array field is large enough to span whole access. We do not
2864 care to fold accesses spanning multiple array indexes. */
2865 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
2866 return NULL_TREE;
2867
2868 index = low_bound - double_int_one;
2869 if (index_type)
2870 index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
2871
2872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2873 {
2874 /* Array constructor might explicitely set index, or specify range
2875 or leave index NULL meaning that it is next index after previous
2876 one. */
2877 if (cfield)
2878 {
2879 if (TREE_CODE (cfield) == INTEGER_CST)
2880 max_index = index = tree_to_double_int (cfield);
2881 else
2882 {
2883 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2884 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2885 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2886 }
2887 }
2888 else
2889 {
2890 index += double_int_one;
2891 if (index_type)
2892 index = index.ext (TYPE_PRECISION (index_type),
2893 TYPE_UNSIGNED (index_type));
2894 max_index = index;
2895 }
2896
2897 /* Do we have match? */
2898 if (access_index.cmp (index, 1) >= 0
2899 && access_index.cmp (max_index, 1) <= 0)
2900 return fold_ctor_reference (type, cval, inner_offset, size,
2901 from_decl);
2902 }
2903 /* When memory is not explicitely mentioned in constructor,
2904 it is 0 (or out of range). */
2905 return build_zero_cst (type);
2906 }
2907
2908 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2909 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2910
2911 static tree
2912 fold_nonarray_ctor_reference (tree type, tree ctor,
2913 unsigned HOST_WIDE_INT offset,
2914 unsigned HOST_WIDE_INT size,
2915 tree from_decl)
2916 {
2917 unsigned HOST_WIDE_INT cnt;
2918 tree cfield, cval;
2919
2920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2921 cval)
2922 {
2923 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2924 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2925 tree field_size = DECL_SIZE (cfield);
2926 double_int bitoffset;
2927 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2928 double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
2929 double_int bitoffset_end, access_end;
2930
2931 /* Variable sized objects in static constructors makes no sense,
2932 but field_size can be NULL for flexible array members. */
2933 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2934 && TREE_CODE (byte_offset) == INTEGER_CST
2935 && (field_size != NULL_TREE
2936 ? TREE_CODE (field_size) == INTEGER_CST
2937 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2938
2939 /* Compute bit offset of the field. */
2940 bitoffset = tree_to_double_int (field_offset)
2941 + byte_offset_cst * bits_per_unit_cst;
2942 /* Compute bit offset where the field ends. */
2943 if (field_size != NULL_TREE)
2944 bitoffset_end = bitoffset + tree_to_double_int (field_size);
2945 else
2946 bitoffset_end = double_int_zero;
2947
2948 access_end = double_int::from_uhwi (offset)
2949 + double_int::from_uhwi (size);
2950
2951 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2952 [BITOFFSET, BITOFFSET_END)? */
2953 if (access_end.cmp (bitoffset, 0) > 0
2954 && (field_size == NULL_TREE
2955 || double_int::from_uhwi (offset).slt (bitoffset_end)))
2956 {
2957 double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
2958 /* We do have overlap. Now see if field is large enough to
2959 cover the access. Give up for accesses spanning multiple
2960 fields. */
2961 if (access_end.cmp (bitoffset_end, 0) > 0)
2962 return NULL_TREE;
2963 if (double_int::from_uhwi (offset).slt (bitoffset))
2964 return NULL_TREE;
2965 return fold_ctor_reference (type, cval,
2966 inner_offset.to_uhwi (), size,
2967 from_decl);
2968 }
2969 }
2970 /* When memory is not explicitely mentioned in constructor, it is 0. */
2971 return build_zero_cst (type);
2972 }
2973
2974 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2975 to the memory at bit OFFSET. */
2976
2977 static tree
2978 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2979 unsigned HOST_WIDE_INT size, tree from_decl)
2980 {
2981 tree ret;
2982
2983 /* We found the field with exact match. */
2984 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2985 && !offset)
2986 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2987
2988 /* We are at the end of walk, see if we can view convert the
2989 result. */
2990 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2991 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2992 && operand_equal_p (TYPE_SIZE (type),
2993 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2994 {
2995 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
2996 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2997 if (ret)
2998 STRIP_NOPS (ret);
2999 return ret;
3000 }
3001 if (TREE_CODE (ctor) == STRING_CST)
3002 return fold_string_cst_ctor_reference (type, ctor, offset, size);
3003 if (TREE_CODE (ctor) == CONSTRUCTOR)
3004 {
3005
3006 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
3007 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
3008 return fold_array_ctor_reference (type, ctor, offset, size,
3009 from_decl);
3010 else
3011 return fold_nonarray_ctor_reference (type, ctor, offset, size,
3012 from_decl);
3013 }
3014
3015 return NULL_TREE;
3016 }
3017
3018 /* Return the tree representing the element referenced by T if T is an
3019 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
3020 names using VALUEIZE. Return NULL_TREE otherwise. */
3021
3022 tree
3023 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
3024 {
3025 tree ctor, idx, base;
3026 HOST_WIDE_INT offset, size, max_size;
3027 tree tem;
3028
3029 if (TREE_THIS_VOLATILE (t))
3030 return NULL_TREE;
3031
3032 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
3033 return get_symbol_constant_value (t);
3034
3035 tem = fold_read_from_constant_string (t);
3036 if (tem)
3037 return tem;
3038
3039 switch (TREE_CODE (t))
3040 {
3041 case ARRAY_REF:
3042 case ARRAY_RANGE_REF:
3043 /* Constant indexes are handled well by get_base_constructor.
3044 Only special case variable offsets.
3045 FIXME: This code can't handle nested references with variable indexes
3046 (they will be handled only by iteration of ccp). Perhaps we can bring
3047 get_ref_base_and_extent here and make it use a valueize callback. */
3048 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
3049 && valueize
3050 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
3051 && TREE_CODE (idx) == INTEGER_CST)
3052 {
3053 tree low_bound, unit_size;
3054 double_int doffset;
3055
3056 /* If the resulting bit-offset is constant, track it. */
3057 if ((low_bound = array_ref_low_bound (t),
3058 TREE_CODE (low_bound) == INTEGER_CST)
3059 && (unit_size = array_ref_element_size (t),
3060 host_integerp (unit_size, 1))
3061 && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
3062 .sext (TYPE_PRECISION (TREE_TYPE (idx))),
3063 doffset.fits_shwi ()))
3064 {
3065 offset = doffset.to_shwi ();
3066 offset *= TREE_INT_CST_LOW (unit_size);
3067 offset *= BITS_PER_UNIT;
3068
3069 base = TREE_OPERAND (t, 0);
3070 ctor = get_base_constructor (base, &offset, valueize);
3071 /* Empty constructor. Always fold to 0. */
3072 if (ctor == error_mark_node)
3073 return build_zero_cst (TREE_TYPE (t));
3074 /* Out of bound array access. Value is undefined,
3075 but don't fold. */
3076 if (offset < 0)
3077 return NULL_TREE;
3078 /* We can not determine ctor. */
3079 if (!ctor)
3080 return NULL_TREE;
3081 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
3082 TREE_INT_CST_LOW (unit_size)
3083 * BITS_PER_UNIT,
3084 base);
3085 }
3086 }
3087 /* Fallthru. */
3088
3089 case COMPONENT_REF:
3090 case BIT_FIELD_REF:
3091 case TARGET_MEM_REF:
3092 case MEM_REF:
3093 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3094 ctor = get_base_constructor (base, &offset, valueize);
3095
3096 /* Empty constructor. Always fold to 0. */
3097 if (ctor == error_mark_node)
3098 return build_zero_cst (TREE_TYPE (t));
3099 /* We do not know precise address. */
3100 if (max_size == -1 || max_size != size)
3101 return NULL_TREE;
3102 /* We can not determine ctor. */
3103 if (!ctor)
3104 return NULL_TREE;
3105
3106 /* Out of bound array access. Value is undefined, but don't fold. */
3107 if (offset < 0)
3108 return NULL_TREE;
3109
3110 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
3111 base);
3112
3113 case REALPART_EXPR:
3114 case IMAGPART_EXPR:
3115 {
3116 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3117 if (c && TREE_CODE (c) == COMPLEX_CST)
3118 return fold_build1_loc (EXPR_LOCATION (t),
3119 TREE_CODE (t), TREE_TYPE (t), c);
3120 break;
3121 }
3122
3123 default:
3124 break;
3125 }
3126
3127 return NULL_TREE;
3128 }
3129
3130 tree
3131 fold_const_aggregate_ref (tree t)
3132 {
3133 return fold_const_aggregate_ref_1 (t, NULL);
3134 }
3135
3136 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3137 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3138 KNOWN_BINFO carries the binfo describing the true type of
3139 OBJ_TYPE_REF_OBJECT(REF). */
3140
3141 tree
3142 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3143 {
3144 unsigned HOST_WIDE_INT offset, size;
3145 tree v, fn, vtable, init;
3146
3147 vtable = v = BINFO_VTABLE (known_binfo);
3148 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3149 if (!v)
3150 return NULL_TREE;
3151
3152 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3153 {
3154 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3155 v = TREE_OPERAND (v, 0);
3156 }
3157 else
3158 offset = 0;
3159
3160 if (TREE_CODE (v) != ADDR_EXPR)
3161 return NULL_TREE;
3162 v = TREE_OPERAND (v, 0);
3163
3164 if (TREE_CODE (v) != VAR_DECL
3165 || !DECL_VIRTUAL_P (v))
3166 return NULL_TREE;
3167 init = ctor_for_folding (v);
3168
3169 /* The virtual tables should always be born with constructors.
3170 and we always should assume that they are avaialble for
3171 folding. At the moment we do not stream them in all cases,
3172 but it should never happen that ctor seem unreachable. */
3173 gcc_assert (init);
3174 if (init == error_mark_node)
3175 {
3176 gcc_assert (in_lto_p);
3177 return NULL_TREE;
3178 }
3179 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3180 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3181 offset += token * size;
3182 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
3183 offset, size, v);
3184 if (!fn || integer_zerop (fn))
3185 return NULL_TREE;
3186 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3187 || TREE_CODE (fn) == FDESC_EXPR);
3188 fn = TREE_OPERAND (fn, 0);
3189 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3190
3191 /* When cgraph node is missing and function is not public, we cannot
3192 devirtualize. This can happen in WHOPR when the actual method
3193 ends up in other partition, because we found devirtualization
3194 possibility too late. */
3195 if (!can_refer_decl_in_current_unit_p (fn, vtable))
3196 return NULL_TREE;
3197
3198 /* Make sure we create a cgraph node for functions we'll reference.
3199 They can be non-existent if the reference comes from an entry
3200 of an external vtable for example. */
3201 cgraph_get_create_node (fn);
3202
3203 return fn;
3204 }
3205
3206 /* Return true iff VAL is a gimple expression that is known to be
3207 non-negative. Restricted to floating-point inputs. */
3208
3209 bool
3210 gimple_val_nonnegative_real_p (tree val)
3211 {
3212 gimple def_stmt;
3213
3214 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3215
3216 /* Use existing logic for non-gimple trees. */
3217 if (tree_expr_nonnegative_p (val))
3218 return true;
3219
3220 if (TREE_CODE (val) != SSA_NAME)
3221 return false;
3222
3223 /* Currently we look only at the immediately defining statement
3224 to make this determination, since recursion on defining
3225 statements of operands can lead to quadratic behavior in the
3226 worst case. This is expected to catch almost all occurrences
3227 in practice. It would be possible to implement limited-depth
3228 recursion if important cases are lost. Alternatively, passes
3229 that need this information (such as the pow/powi lowering code
3230 in the cse_sincos pass) could be revised to provide it through
3231 dataflow propagation. */
3232
3233 def_stmt = SSA_NAME_DEF_STMT (val);
3234
3235 if (is_gimple_assign (def_stmt))
3236 {
3237 tree op0, op1;
3238
3239 /* See fold-const.c:tree_expr_nonnegative_p for additional
3240 cases that could be handled with recursion. */
3241
3242 switch (gimple_assign_rhs_code (def_stmt))
3243 {
3244 case ABS_EXPR:
3245 /* Always true for floating-point operands. */
3246 return true;
3247
3248 case MULT_EXPR:
3249 /* True if the two operands are identical (since we are
3250 restricted to floating-point inputs). */
3251 op0 = gimple_assign_rhs1 (def_stmt);
3252 op1 = gimple_assign_rhs2 (def_stmt);
3253
3254 if (op0 == op1
3255 || operand_equal_p (op0, op1, 0))
3256 return true;
3257
3258 default:
3259 return false;
3260 }
3261 }
3262 else if (is_gimple_call (def_stmt))
3263 {
3264 tree fndecl = gimple_call_fndecl (def_stmt);
3265 if (fndecl
3266 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3267 {
3268 tree arg1;
3269
3270 switch (DECL_FUNCTION_CODE (fndecl))
3271 {
3272 CASE_FLT_FN (BUILT_IN_ACOS):
3273 CASE_FLT_FN (BUILT_IN_ACOSH):
3274 CASE_FLT_FN (BUILT_IN_CABS):
3275 CASE_FLT_FN (BUILT_IN_COSH):
3276 CASE_FLT_FN (BUILT_IN_ERFC):
3277 CASE_FLT_FN (BUILT_IN_EXP):
3278 CASE_FLT_FN (BUILT_IN_EXP10):
3279 CASE_FLT_FN (BUILT_IN_EXP2):
3280 CASE_FLT_FN (BUILT_IN_FABS):
3281 CASE_FLT_FN (BUILT_IN_FDIM):
3282 CASE_FLT_FN (BUILT_IN_HYPOT):
3283 CASE_FLT_FN (BUILT_IN_POW10):
3284 return true;
3285
3286 CASE_FLT_FN (BUILT_IN_SQRT):
3287 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3288 nonnegative inputs. */
3289 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3290 return true;
3291
3292 break;
3293
3294 CASE_FLT_FN (BUILT_IN_POWI):
3295 /* True if the second argument is an even integer. */
3296 arg1 = gimple_call_arg (def_stmt, 1);
3297
3298 if (TREE_CODE (arg1) == INTEGER_CST
3299 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3300 return true;
3301
3302 break;
3303
3304 CASE_FLT_FN (BUILT_IN_POW):
3305 /* True if the second argument is an even integer-valued
3306 real. */
3307 arg1 = gimple_call_arg (def_stmt, 1);
3308
3309 if (TREE_CODE (arg1) == REAL_CST)
3310 {
3311 REAL_VALUE_TYPE c;
3312 HOST_WIDE_INT n;
3313
3314 c = TREE_REAL_CST (arg1);
3315 n = real_to_integer (&c);
3316
3317 if ((n & 1) == 0)
3318 {
3319 REAL_VALUE_TYPE cint;
3320 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3321 if (real_identical (&c, &cint))
3322 return true;
3323 }
3324 }
3325
3326 break;
3327
3328 default:
3329 return false;
3330 }
3331 }
3332 }
3333
3334 return false;
3335 }
3336
3337 /* Given a pointer value OP0, return a simplified version of an
3338 indirection through OP0, or NULL_TREE if no simplification is
3339 possible. Note that the resulting type may be different from
3340 the type pointed to in the sense that it is still compatible
3341 from the langhooks point of view. */
3342
3343 tree
3344 gimple_fold_indirect_ref (tree t)
3345 {
3346 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
3347 tree sub = t;
3348 tree subtype;
3349
3350 STRIP_NOPS (sub);
3351 subtype = TREE_TYPE (sub);
3352 if (!POINTER_TYPE_P (subtype))
3353 return NULL_TREE;
3354
3355 if (TREE_CODE (sub) == ADDR_EXPR)
3356 {
3357 tree op = TREE_OPERAND (sub, 0);
3358 tree optype = TREE_TYPE (op);
3359 /* *&p => p */
3360 if (useless_type_conversion_p (type, optype))
3361 return op;
3362
3363 /* *(foo *)&fooarray => fooarray[0] */
3364 if (TREE_CODE (optype) == ARRAY_TYPE
3365 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
3366 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3367 {
3368 tree type_domain = TYPE_DOMAIN (optype);
3369 tree min_val = size_zero_node;
3370 if (type_domain && TYPE_MIN_VALUE (type_domain))
3371 min_val = TYPE_MIN_VALUE (type_domain);
3372 if (TREE_CODE (min_val) == INTEGER_CST)
3373 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3374 }
3375 /* *(foo *)&complexfoo => __real__ complexfoo */
3376 else if (TREE_CODE (optype) == COMPLEX_TYPE
3377 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3378 return fold_build1 (REALPART_EXPR, type, op);
3379 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
3380 else if (TREE_CODE (optype) == VECTOR_TYPE
3381 && useless_type_conversion_p (type, TREE_TYPE (optype)))
3382 {
3383 tree part_width = TYPE_SIZE (type);
3384 tree index = bitsize_int (0);
3385 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
3386 }
3387 }
3388
3389 /* *(p + CST) -> ... */
3390 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
3391 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
3392 {
3393 tree addr = TREE_OPERAND (sub, 0);
3394 tree off = TREE_OPERAND (sub, 1);
3395 tree addrtype;
3396
3397 STRIP_NOPS (addr);
3398 addrtype = TREE_TYPE (addr);
3399
3400 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
3401 if (TREE_CODE (addr) == ADDR_EXPR
3402 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
3403 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
3404 && host_integerp (off, 1))
3405 {
3406 unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
3407 tree part_width = TYPE_SIZE (type);
3408 unsigned HOST_WIDE_INT part_widthi
3409 = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
3410 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
3411 tree index = bitsize_int (indexi);
3412 if (offset / part_widthi
3413 <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
3414 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
3415 part_width, index);
3416 }
3417
3418 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
3419 if (TREE_CODE (addr) == ADDR_EXPR
3420 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
3421 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
3422 {
3423 tree size = TYPE_SIZE_UNIT (type);
3424 if (tree_int_cst_equal (size, off))
3425 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
3426 }
3427
3428 /* *(p + CST) -> MEM_REF <p, CST>. */
3429 if (TREE_CODE (addr) != ADDR_EXPR
3430 || DECL_P (TREE_OPERAND (addr, 0)))
3431 return fold_build2 (MEM_REF, type,
3432 addr,
3433 build_int_cst_wide (ptype,
3434 TREE_INT_CST_LOW (off),
3435 TREE_INT_CST_HIGH (off)));
3436 }
3437
3438 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3439 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3440 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
3441 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3442 {
3443 tree type_domain;
3444 tree min_val = size_zero_node;
3445 tree osub = sub;
3446 sub = gimple_fold_indirect_ref (sub);
3447 if (! sub)
3448 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3449 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3450 if (type_domain && TYPE_MIN_VALUE (type_domain))
3451 min_val = TYPE_MIN_VALUE (type_domain);
3452 if (TREE_CODE (min_val) == INTEGER_CST)
3453 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3454 }
3455
3456 return NULL_TREE;
3457 }