]>
git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-affine.c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "tree-pretty-print.h"
26 #include "pointer-set.h"
27 #include "tree-affine.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
37 #include "cfgexpand.h"
38 #include "wide-int-print.h"
40 /* Extends CST as appropriate for the affine combinations COMB. */
43 wide_int_ext_for_comb (const widest_int
&cst
, aff_tree
*comb
)
45 return wi::sext (cst
, TYPE_PRECISION (comb
->type
));
48 /* Initializes affine combination COMB so that its value is zero in TYPE. */
51 aff_combination_zero (aff_tree
*comb
, tree type
)
57 for (i
= 0; i
< MAX_AFF_ELTS
; i
++)
58 comb
->elts
[i
].coef
= 0;
59 comb
->rest
= NULL_TREE
;
62 /* Sets COMB to CST. */
65 aff_combination_const (aff_tree
*comb
, tree type
, const widest_int
&cst
)
67 aff_combination_zero (comb
, type
);
68 comb
->offset
= wide_int_ext_for_comb (cst
, comb
);;
71 /* Sets COMB to single element ELT. */
74 aff_combination_elt (aff_tree
*comb
, tree type
, tree elt
)
76 aff_combination_zero (comb
, type
);
79 comb
->elts
[0].val
= elt
;
80 comb
->elts
[0].coef
= 1;
83 /* Scales COMB by SCALE. */
86 aff_combination_scale (aff_tree
*comb
, const widest_int
&scale_in
)
90 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
96 aff_combination_zero (comb
, comb
->type
);
100 comb
->offset
= wide_int_ext_for_comb (scale
* comb
->offset
, comb
);
101 for (i
= 0, j
= 0; i
< comb
->n
; i
++)
104 = wide_int_ext_for_comb (scale
* comb
->elts
[i
].coef
, comb
);
105 /* A coefficient may become zero due to overflow. Remove the zero
109 comb
->elts
[j
].coef
= new_coef
;
110 comb
->elts
[j
].val
= comb
->elts
[i
].val
;
117 tree type
= comb
->type
;
118 if (POINTER_TYPE_P (type
))
120 if (comb
->n
< MAX_AFF_ELTS
)
122 comb
->elts
[comb
->n
].coef
= scale
;
123 comb
->elts
[comb
->n
].val
= comb
->rest
;
124 comb
->rest
= NULL_TREE
;
128 comb
->rest
= fold_build2 (MULT_EXPR
, type
, comb
->rest
,
129 wide_int_to_tree (type
, scale
));
133 /* Adds ELT * SCALE to COMB. */
136 aff_combination_add_elt (aff_tree
*comb
, tree elt
, const widest_int
&scale_in
)
141 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
145 for (i
= 0; i
< comb
->n
; i
++)
146 if (operand_equal_p (comb
->elts
[i
].val
, elt
, 0))
149 = wide_int_ext_for_comb (comb
->elts
[i
].coef
+ scale
, comb
);
152 comb
->elts
[i
].coef
= new_coef
;
157 comb
->elts
[i
] = comb
->elts
[comb
->n
];
161 gcc_assert (comb
->n
== MAX_AFF_ELTS
- 1);
162 comb
->elts
[comb
->n
].coef
= 1;
163 comb
->elts
[comb
->n
].val
= comb
->rest
;
164 comb
->rest
= NULL_TREE
;
169 if (comb
->n
< MAX_AFF_ELTS
)
171 comb
->elts
[comb
->n
].coef
= scale
;
172 comb
->elts
[comb
->n
].val
= elt
;
178 if (POINTER_TYPE_P (type
))
182 elt
= fold_convert (type
, elt
);
184 elt
= fold_build2 (MULT_EXPR
, type
,
185 fold_convert (type
, elt
),
186 wide_int_to_tree (type
, scale
));
189 comb
->rest
= fold_build2 (PLUS_EXPR
, type
, comb
->rest
,
198 aff_combination_add_cst (aff_tree
*c
, const widest_int
&cst
)
200 c
->offset
= wide_int_ext_for_comb (c
->offset
+ cst
, c
);
203 /* Adds COMB2 to COMB1. */
206 aff_combination_add (aff_tree
*comb1
, aff_tree
*comb2
)
210 aff_combination_add_cst (comb1
, comb2
->offset
);
211 for (i
= 0; i
< comb2
->n
; i
++)
212 aff_combination_add_elt (comb1
, comb2
->elts
[i
].val
, comb2
->elts
[i
].coef
);
214 aff_combination_add_elt (comb1
, comb2
->rest
, 1);
217 /* Converts affine combination COMB to TYPE. */
220 aff_combination_convert (aff_tree
*comb
, tree type
)
223 tree comb_type
= comb
->type
;
225 if (TYPE_PRECISION (type
) > TYPE_PRECISION (comb_type
))
227 tree val
= fold_convert (type
, aff_combination_to_tree (comb
));
228 tree_to_aff_combination (val
, type
, comb
);
233 if (comb
->rest
&& !POINTER_TYPE_P (type
))
234 comb
->rest
= fold_convert (type
, comb
->rest
);
236 if (TYPE_PRECISION (type
) == TYPE_PRECISION (comb_type
))
239 comb
->offset
= wide_int_ext_for_comb (comb
->offset
, comb
);
240 for (i
= j
= 0; i
< comb
->n
; i
++)
242 if (comb
->elts
[i
].coef
== 0)
244 comb
->elts
[j
].coef
= comb
->elts
[i
].coef
;
245 comb
->elts
[j
].val
= fold_convert (type
, comb
->elts
[i
].val
);
250 if (comb
->n
< MAX_AFF_ELTS
&& comb
->rest
)
252 comb
->elts
[comb
->n
].coef
= 1;
253 comb
->elts
[comb
->n
].val
= comb
->rest
;
254 comb
->rest
= NULL_TREE
;
259 /* Splits EXPR into an affine combination of parts. */
262 tree_to_aff_combination (tree expr
, tree type
, aff_tree
*comb
)
266 tree cst
, core
, toffset
;
267 HOST_WIDE_INT bitpos
, bitsize
;
268 enum machine_mode mode
;
269 int unsignedp
, volatilep
;
273 code
= TREE_CODE (expr
);
277 aff_combination_const (comb
, type
, wi::to_widest (expr
));
280 case POINTER_PLUS_EXPR
:
281 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
282 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
283 aff_combination_add (comb
, &tmp
);
288 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
289 tree_to_aff_combination (TREE_OPERAND (expr
, 1), type
, &tmp
);
290 if (code
== MINUS_EXPR
)
291 aff_combination_scale (&tmp
, -1);
292 aff_combination_add (comb
, &tmp
);
296 cst
= TREE_OPERAND (expr
, 1);
297 if (TREE_CODE (cst
) != INTEGER_CST
)
299 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
300 aff_combination_scale (comb
, wi::to_widest (cst
));
304 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
305 aff_combination_scale (comb
, -1);
310 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
311 aff_combination_scale (comb
, -1);
312 aff_combination_add_cst (comb
, -1);
316 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
317 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == MEM_REF
)
319 expr
= TREE_OPERAND (expr
, 0);
320 tree_to_aff_combination (TREE_OPERAND (expr
, 0), type
, comb
);
321 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
322 aff_combination_add (comb
, &tmp
);
325 core
= get_inner_reference (TREE_OPERAND (expr
, 0), &bitsize
, &bitpos
,
326 &toffset
, &mode
, &unsignedp
, &volatilep
,
328 if (bitpos
% BITS_PER_UNIT
!= 0)
330 aff_combination_const (comb
, type
, bitpos
/ BITS_PER_UNIT
);
331 core
= build_fold_addr_expr (core
);
332 if (TREE_CODE (core
) == ADDR_EXPR
)
333 aff_combination_add_elt (comb
, core
, 1);
336 tree_to_aff_combination (core
, type
, &tmp
);
337 aff_combination_add (comb
, &tmp
);
341 tree_to_aff_combination (toffset
, type
, &tmp
);
342 aff_combination_add (comb
, &tmp
);
347 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
348 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0),
350 else if (integer_zerop (TREE_OPERAND (expr
, 1)))
352 aff_combination_elt (comb
, type
, expr
);
356 aff_combination_elt (comb
, type
,
357 build2 (MEM_REF
, TREE_TYPE (expr
),
358 TREE_OPERAND (expr
, 0),
360 (TREE_TYPE (TREE_OPERAND (expr
, 1)), 0)));
361 tree_to_aff_combination (TREE_OPERAND (expr
, 1), sizetype
, &tmp
);
362 aff_combination_add (comb
, &tmp
);
369 aff_combination_elt (comb
, type
, expr
);
372 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
376 add_elt_to_tree (tree expr
, tree type
, tree elt
, const widest_int
&scale_in
,
377 aff_tree
*comb ATTRIBUTE_UNUSED
)
381 if (POINTER_TYPE_P (type
))
384 widest_int scale
= wide_int_ext_for_comb (scale_in
, comb
);
387 && POINTER_TYPE_P (TREE_TYPE (elt
)))
389 elt
= convert_to_ptrofftype (elt
);
390 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
398 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
401 return fold_convert (type1
, elt
);
404 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
405 return fold_build_pointer_plus (expr
, elt
);
406 if (POINTER_TYPE_P (TREE_TYPE (elt
)))
407 return fold_build_pointer_plus (elt
, expr
);
408 return fold_build2 (PLUS_EXPR
, type1
,
409 expr
, fold_convert (type1
, elt
));
415 return fold_build1 (NEGATE_EXPR
, type1
,
416 fold_convert (type1
, elt
));
418 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
420 elt
= convert_to_ptrofftype (elt
);
421 elt
= fold_build1 (NEGATE_EXPR
, TREE_TYPE (elt
), elt
);
422 return fold_build_pointer_plus (expr
, elt
);
424 return fold_build2 (MINUS_EXPR
, type1
,
425 expr
, fold_convert (type1
, elt
));
428 elt
= fold_convert (type1
, elt
);
430 return fold_build2 (MULT_EXPR
, type1
, elt
,
431 wide_int_to_tree (type1
, scale
));
433 if (wi::neg_p (scale
))
441 elt
= fold_build2 (MULT_EXPR
, type1
, elt
,
442 wide_int_to_tree (type1
, scale
));
443 if (POINTER_TYPE_P (TREE_TYPE (expr
)))
445 if (code
== MINUS_EXPR
)
446 elt
= fold_build1 (NEGATE_EXPR
, type1
, elt
);
447 return fold_build_pointer_plus (expr
, elt
);
449 return fold_build2 (code
, type1
, expr
, elt
);
452 /* Makes tree from the affine combination COMB. */
455 aff_combination_to_tree (aff_tree
*comb
)
457 tree type
= comb
->type
;
458 tree expr
= NULL_TREE
;
462 if (POINTER_TYPE_P (type
))
465 gcc_assert (comb
->n
== MAX_AFF_ELTS
|| comb
->rest
== NULL_TREE
);
467 for (i
= 0; i
< comb
->n
; i
++)
468 expr
= add_elt_to_tree (expr
, type
, comb
->elts
[i
].val
, comb
->elts
[i
].coef
,
472 expr
= add_elt_to_tree (expr
, type
, comb
->rest
, 1, comb
);
474 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
476 if (wi::neg_p (comb
->offset
))
486 return add_elt_to_tree (expr
, type
, wide_int_to_tree (type1
, off
), sgn
,
490 /* Copies the tree elements of COMB to ensure that they are not shared. */
493 unshare_aff_combination (aff_tree
*comb
)
497 for (i
= 0; i
< comb
->n
; i
++)
498 comb
->elts
[i
].val
= unshare_expr (comb
->elts
[i
].val
);
500 comb
->rest
= unshare_expr (comb
->rest
);
503 /* Remove M-th element from COMB. */
506 aff_combination_remove_elt (aff_tree
*comb
, unsigned m
)
510 comb
->elts
[m
] = comb
->elts
[comb
->n
];
513 comb
->elts
[comb
->n
].coef
= 1;
514 comb
->elts
[comb
->n
].val
= comb
->rest
;
515 comb
->rest
= NULL_TREE
;
520 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
521 C * COEF is added to R. */
525 aff_combination_add_product (aff_tree
*c
, const widest_int
&coef
, tree val
,
531 for (i
= 0; i
< c
->n
; i
++)
533 aval
= c
->elts
[i
].val
;
536 type
= TREE_TYPE (aval
);
537 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
538 fold_convert (type
, val
));
541 aff_combination_add_elt (r
, aval
, coef
* c
->elts
[i
].coef
);
549 type
= TREE_TYPE (aval
);
550 aval
= fold_build2 (MULT_EXPR
, type
, aval
,
551 fold_convert (type
, val
));
554 aff_combination_add_elt (r
, aval
, coef
);
558 aff_combination_add_elt (r
, val
, coef
* c
->offset
);
560 aff_combination_add_cst (r
, coef
* c
->offset
);
563 /* Multiplies C1 by C2, storing the result to R */
566 aff_combination_mult (aff_tree
*c1
, aff_tree
*c2
, aff_tree
*r
)
569 gcc_assert (TYPE_PRECISION (c1
->type
) == TYPE_PRECISION (c2
->type
));
571 aff_combination_zero (r
, c1
->type
);
573 for (i
= 0; i
< c2
->n
; i
++)
574 aff_combination_add_product (c1
, c2
->elts
[i
].coef
, c2
->elts
[i
].val
, r
);
576 aff_combination_add_product (c1
, 1, c2
->rest
, r
);
577 aff_combination_add_product (c1
, c2
->offset
, NULL
, r
);
580 /* Returns the element of COMB whose value is VAL, or NULL if no such
581 element exists. If IDX is not NULL, it is set to the index of VAL in
584 static struct aff_comb_elt
*
585 aff_combination_find_elt (aff_tree
*comb
, tree val
, unsigned *idx
)
589 for (i
= 0; i
< comb
->n
; i
++)
590 if (operand_equal_p (comb
->elts
[i
].val
, val
, 0))
595 return &comb
->elts
[i
];
601 /* Element of the cache that maps ssa name NAME to its expanded form
602 as an affine expression EXPANSION. */
604 struct name_expansion
608 /* True if the expansion for the name is just being generated. */
609 unsigned in_progress
: 1;
612 /* Expands SSA names in COMB recursively. CACHE is used to cache the
616 aff_combination_expand (aff_tree
*comb ATTRIBUTE_UNUSED
,
617 struct pointer_map_t
**cache ATTRIBUTE_UNUSED
)
620 aff_tree to_add
, current
, curre
;
625 struct name_expansion
*exp
;
627 aff_combination_zero (&to_add
, comb
->type
);
628 for (i
= 0; i
< comb
->n
; i
++)
633 e
= comb
->elts
[i
].val
;
634 type
= TREE_TYPE (e
);
636 /* Look through some conversions. */
637 if (TREE_CODE (e
) == NOP_EXPR
638 && (TYPE_PRECISION (type
)
639 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e
, 0)))))
640 name
= TREE_OPERAND (e
, 0);
641 if (TREE_CODE (name
) != SSA_NAME
)
643 def
= SSA_NAME_DEF_STMT (name
);
644 if (!is_gimple_assign (def
) || gimple_assign_lhs (def
) != name
)
647 code
= gimple_assign_rhs_code (def
);
649 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
))
650 && (get_gimple_rhs_class (code
) != GIMPLE_SINGLE_RHS
651 || !is_gimple_min_invariant (gimple_assign_rhs1 (def
))))
654 /* We do not know whether the reference retains its value at the
655 place where the expansion is used. */
656 if (TREE_CODE_CLASS (code
) == tcc_reference
)
660 *cache
= pointer_map_create ();
661 slot
= pointer_map_insert (*cache
, e
);
662 exp
= (struct name_expansion
*) *slot
;
666 exp
= XNEW (struct name_expansion
);
667 exp
->in_progress
= 1;
669 /* In principle this is a generally valid folding, but
670 it is not unconditionally an optimization, so do it
671 here and not in fold_unary. */
672 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
673 than the type of X and overflow for the type of X is
676 && INTEGRAL_TYPE_P (type
)
677 && INTEGRAL_TYPE_P (TREE_TYPE (name
))
678 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name
))
679 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (name
))
680 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
|| code
== MULT_EXPR
)
681 && TREE_CODE (gimple_assign_rhs2 (def
)) == INTEGER_CST
)
682 rhs
= fold_build2 (code
, type
,
683 fold_convert (type
, gimple_assign_rhs1 (def
)),
684 fold_convert (type
, gimple_assign_rhs2 (def
)));
687 rhs
= gimple_assign_rhs_to_tree (def
);
689 rhs
= fold_convert (type
, rhs
);
691 tree_to_aff_combination_expand (rhs
, comb
->type
, ¤t
, cache
);
692 exp
->expansion
= current
;
693 exp
->in_progress
= 0;
697 /* Since we follow the definitions in the SSA form, we should not
698 enter a cycle unless we pass through a phi node. */
699 gcc_assert (!exp
->in_progress
);
700 current
= exp
->expansion
;
703 /* Accumulate the new terms to TO_ADD, so that we do not modify
704 COMB while traversing it; include the term -coef * E, to remove
706 scale
= comb
->elts
[i
].coef
;
707 aff_combination_zero (&curre
, comb
->type
);
708 aff_combination_add_elt (&curre
, e
, -scale
);
709 aff_combination_scale (¤t
, scale
);
710 aff_combination_add (&to_add
, ¤t
);
711 aff_combination_add (&to_add
, &curre
);
713 aff_combination_add (comb
, &to_add
);
716 /* Similar to tree_to_aff_combination, but follows SSA name definitions
717 and expands them recursively. CACHE is used to cache the expansions
718 of the ssa names, to avoid exponential time complexity for cases
727 tree_to_aff_combination_expand (tree expr
, tree type
, aff_tree
*comb
,
728 struct pointer_map_t
**cache
)
730 tree_to_aff_combination (expr
, type
, comb
);
731 aff_combination_expand (comb
, cache
);
734 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
735 pointer_map_traverse. */
738 free_name_expansion (const void *key ATTRIBUTE_UNUSED
, void **value
,
739 void *data ATTRIBUTE_UNUSED
)
741 struct name_expansion
*const exp
= (struct name_expansion
*) *value
;
747 /* Frees memory allocated for the CACHE used by
748 tree_to_aff_combination_expand. */
751 free_affine_expand_cache (struct pointer_map_t
**cache
)
756 pointer_map_traverse (*cache
, free_name_expansion
, NULL
);
757 pointer_map_destroy (*cache
);
761 /* If VAL != CST * DIV for any constant CST, returns false.
762 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
763 and if they are different, returns false. Finally, if neither of these
764 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
768 wide_int_constant_multiple_p (const widest_int
&val
, const widest_int
&div
,
769 bool *mult_set
, widest_int
*mult
)
775 if (*mult_set
&& mult
!= 0)
785 if (!wi::multiple_of_p (val
, div
, SIGNED
, &cst
))
788 if (*mult_set
&& *mult
!= cst
)
796 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
797 X is stored to MULT. */
800 aff_combination_constant_multiple_p (aff_tree
*val
, aff_tree
*div
,
803 bool mult_set
= false;
806 if (val
->n
== 0 && val
->offset
== 0)
811 if (val
->n
!= div
->n
)
814 if (val
->rest
|| div
->rest
)
817 if (!wide_int_constant_multiple_p (val
->offset
, div
->offset
,
821 for (i
= 0; i
< div
->n
; i
++)
823 struct aff_comb_elt
*elt
824 = aff_combination_find_elt (val
, div
->elts
[i
].val
, NULL
);
827 if (!wide_int_constant_multiple_p (elt
->coef
, div
->elts
[i
].coef
,
832 gcc_assert (mult_set
);
836 /* Prints the affine VAL to the FILE. */
839 print_aff (FILE *file
, aff_tree
*val
)
842 signop sgn
= TYPE_SIGN (val
->type
);
843 if (POINTER_TYPE_P (val
->type
))
845 fprintf (file
, "{\n type = ");
846 print_generic_expr (file
, val
->type
, TDF_VOPS
|TDF_MEMSYMS
);
847 fprintf (file
, "\n offset = ");
848 print_dec (val
->offset
, file
, sgn
);
851 fprintf (file
, "\n elements = {\n");
852 for (i
= 0; i
< val
->n
; i
++)
854 fprintf (file
, " [%d] = ", i
);
855 print_generic_expr (file
, val
->elts
[i
].val
, TDF_VOPS
|TDF_MEMSYMS
);
857 fprintf (file
, " * ");
858 print_dec (val
->elts
[i
].coef
, file
, sgn
);
860 fprintf (file
, ", \n");
862 fprintf (file
, "\n }");
866 fprintf (file
, "\n rest = ");
867 print_generic_expr (file
, val
->rest
, TDF_VOPS
|TDF_MEMSYMS
);
869 fprintf (file
, "\n}");
872 /* Prints the affine VAL to the standard error, used for debugging. */
875 debug_aff (aff_tree
*val
)
877 print_aff (stderr
, val
);
878 fprintf (stderr
, "\n");
881 /* Computes address of the reference REF in ADDR. The size of the accessed
882 location is stored to SIZE. Returns the ultimate containing object to
886 get_inner_reference_aff (tree ref
, aff_tree
*addr
, widest_int
*size
)
888 HOST_WIDE_INT bitsize
, bitpos
;
890 enum machine_mode mode
;
893 tree base
= get_inner_reference (ref
, &bitsize
, &bitpos
, &toff
, &mode
,
895 tree base_addr
= build_fold_addr_expr (base
);
897 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
899 tree_to_aff_combination (base_addr
, sizetype
, addr
);
903 tree_to_aff_combination (toff
, sizetype
, &tmp
);
904 aff_combination_add (addr
, &tmp
);
907 aff_combination_const (&tmp
, sizetype
, bitpos
/ BITS_PER_UNIT
);
908 aff_combination_add (addr
, &tmp
);
910 *size
= (bitsize
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
915 /* Returns true if a region of size SIZE1 at position 0 and a region of
916 size SIZE2 at position DIFF cannot overlap. */
919 aff_comb_cannot_overlap_p (aff_tree
*diff
, const widest_int
&size1
,
920 const widest_int
&size2
)
922 /* Unless the difference is a constant, we fail. */
926 if (wi::neg_p (diff
->offset
))
928 /* The second object is before the first one, we succeed if the last
929 element of the second object is before the start of the first one. */
930 return wi::neg_p (diff
->offset
+ size2
- 1);
934 /* We succeed if the second object starts after the first one ends. */
935 return wi::les_p (size1
, diff
->offset
);