]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-affine.c
Update Copyright years for files modified in 2011 and/or 2012.
[thirdparty/gcc.git] / gcc / tree-affine.c
CommitLineData
d3610bea 1/* Operations with affine combinations of trees.
71e45bc2 2 Copyright (C) 2005, 2007, 2008, 2010, 2011, 2012
3 Free Software Foundation, Inc.
48e1416a 4
d3610bea 5This file is part of GCC.
48e1416a 6
d3610bea 7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
8c4c00c1 9Free Software Foundation; either version 3, or (at your option) any
d3610bea 10later version.
48e1416a 11
d3610bea 12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
48e1416a 16
d3610bea 17You should have received a copy of the GNU General Public License
8c4c00c1 18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
d3610bea 20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
d3610bea 24#include "tree.h"
ce084dfc 25#include "tree-pretty-print.h"
ad4a85ad 26#include "pointer-set.h"
d3610bea 27#include "tree-affine.h"
75a70cf9 28#include "gimple.h"
00ae4124 29#include "flags.h"
b9ed1410 30#include "dumpfile.h"
d3610bea 31
32/* Extends CST as appropriate for the affine combinations COMB. */
33
34double_int
35double_int_ext_for_comb (double_int cst, aff_tree *comb)
36{
cf8f0e63 37 return cst.sext (TYPE_PRECISION (comb->type));
d3610bea 38}
39
40/* Initializes affine combination COMB so that its value is zero in TYPE. */
41
42static void
43aff_combination_zero (aff_tree *comb, tree type)
44{
45 comb->type = type;
46 comb->offset = double_int_zero;
47 comb->n = 0;
48 comb->rest = NULL_TREE;
49}
50
51/* Sets COMB to CST. */
52
53void
54aff_combination_const (aff_tree *comb, tree type, double_int cst)
55{
56 aff_combination_zero (comb, type);
57 comb->offset = double_int_ext_for_comb (cst, comb);
58}
59
60/* Sets COMB to single element ELT. */
61
62void
63aff_combination_elt (aff_tree *comb, tree type, tree elt)
64{
65 aff_combination_zero (comb, type);
66
67 comb->n = 1;
68 comb->elts[0].val = elt;
69 comb->elts[0].coef = double_int_one;
70}
71
72/* Scales COMB by SCALE. */
73
74void
75aff_combination_scale (aff_tree *comb, double_int scale)
76{
77 unsigned i, j;
78
79 scale = double_int_ext_for_comb (scale, comb);
cf8f0e63 80 if (scale.is_one ())
d3610bea 81 return;
82
cf8f0e63 83 if (scale.is_zero ())
d3610bea 84 {
85 aff_combination_zero (comb, comb->type);
86 return;
87 }
88
89 comb->offset
cf8f0e63 90 = double_int_ext_for_comb (scale * comb->offset, comb);
d3610bea 91 for (i = 0, j = 0; i < comb->n; i++)
92 {
93 double_int new_coef;
94
95 new_coef
cf8f0e63 96 = double_int_ext_for_comb (scale * comb->elts[i].coef, comb);
d3610bea 97 /* A coefficient may become zero due to overflow. Remove the zero
98 elements. */
cf8f0e63 99 if (new_coef.is_zero ())
d3610bea 100 continue;
101 comb->elts[j].coef = new_coef;
102 comb->elts[j].val = comb->elts[i].val;
103 j++;
104 }
105 comb->n = j;
106
107 if (comb->rest)
108 {
2ccb5abd 109 tree type = comb->type;
110 if (POINTER_TYPE_P (type))
111 type = sizetype;
d3610bea 112 if (comb->n < MAX_AFF_ELTS)
113 {
114 comb->elts[comb->n].coef = scale;
115 comb->elts[comb->n].val = comb->rest;
116 comb->rest = NULL_TREE;
117 comb->n++;
118 }
119 else
48e1416a 120 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
2ccb5abd 121 double_int_to_tree (type, scale));
d3610bea 122 }
123}
124
125/* Adds ELT * SCALE to COMB. */
126
127void
128aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
129{
130 unsigned i;
00de526b 131 tree type;
d3610bea 132
133 scale = double_int_ext_for_comb (scale, comb);
cf8f0e63 134 if (scale.is_zero ())
d3610bea 135 return;
136
137 for (i = 0; i < comb->n; i++)
138 if (operand_equal_p (comb->elts[i].val, elt, 0))
139 {
140 double_int new_coef;
141
cf8f0e63 142 new_coef = comb->elts[i].coef + scale;
d3610bea 143 new_coef = double_int_ext_for_comb (new_coef, comb);
cf8f0e63 144 if (!new_coef.is_zero ())
d3610bea 145 {
146 comb->elts[i].coef = new_coef;
147 return;
148 }
149
150 comb->n--;
151 comb->elts[i] = comb->elts[comb->n];
152
153 if (comb->rest)
154 {
155 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
156 comb->elts[comb->n].coef = double_int_one;
157 comb->elts[comb->n].val = comb->rest;
158 comb->rest = NULL_TREE;
159 comb->n++;
160 }
161 return;
162 }
163 if (comb->n < MAX_AFF_ELTS)
164 {
165 comb->elts[comb->n].coef = scale;
166 comb->elts[comb->n].val = elt;
167 comb->n++;
168 return;
169 }
170
00de526b 171 type = comb->type;
172 if (POINTER_TYPE_P (type))
173 type = sizetype;
174
cf8f0e63 175 if (scale.is_one ())
00de526b 176 elt = fold_convert (type, elt);
d3610bea 177 else
00de526b 178 elt = fold_build2 (MULT_EXPR, type,
179 fold_convert (type, elt),
48e1416a 180 double_int_to_tree (type, scale));
d3610bea 181
182 if (comb->rest)
2ccb5abd 183 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
184 elt);
d3610bea 185 else
186 comb->rest = elt;
187}
188
cf661063 189/* Adds CST to C. */
190
191static void
192aff_combination_add_cst (aff_tree *c, double_int cst)
193{
cf8f0e63 194 c->offset = double_int_ext_for_comb (c->offset + cst, c);
cf661063 195}
196
d3610bea 197/* Adds COMB2 to COMB1. */
198
199void
200aff_combination_add (aff_tree *comb1, aff_tree *comb2)
201{
202 unsigned i;
203
cf661063 204 aff_combination_add_cst (comb1, comb2->offset);
d3610bea 205 for (i = 0; i < comb2->n; i++)
206 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
207 if (comb2->rest)
208 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
209}
210
211/* Converts affine combination COMB to TYPE. */
212
213void
214aff_combination_convert (aff_tree *comb, tree type)
215{
216 unsigned i, j;
217 tree comb_type = comb->type;
218
cf661063 219 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
220 {
221 tree val = fold_convert (type, aff_combination_to_tree (comb));
222 tree_to_aff_combination (val, type, comb);
223 return;
224 }
225
d3610bea 226 comb->type = type;
2ccb5abd 227 if (comb->rest && !POINTER_TYPE_P (type))
d3610bea 228 comb->rest = fold_convert (type, comb->rest);
229
230 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
231 return;
232
233 comb->offset = double_int_ext_for_comb (comb->offset, comb);
234 for (i = j = 0; i < comb->n; i++)
235 {
236 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
cf8f0e63 237 if (new_coef.is_zero ())
d3610bea 238 continue;
239 comb->elts[j].coef = new_coef;
240 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
241 j++;
242 }
243
244 comb->n = j;
245 if (comb->n < MAX_AFF_ELTS && comb->rest)
246 {
247 comb->elts[comb->n].coef = double_int_one;
248 comb->elts[comb->n].val = comb->rest;
249 comb->rest = NULL_TREE;
250 comb->n++;
251 }
252}
253
254/* Splits EXPR into an affine combination of parts. */
255
256void
257tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
258{
259 aff_tree tmp;
260 enum tree_code code;
261 tree cst, core, toffset;
262 HOST_WIDE_INT bitpos, bitsize;
263 enum machine_mode mode;
264 int unsignedp, volatilep;
265
266 STRIP_NOPS (expr);
267
268 code = TREE_CODE (expr);
269 switch (code)
270 {
271 case INTEGER_CST:
272 aff_combination_const (comb, type, tree_to_double_int (expr));
273 return;
274
0de36bdb 275 case POINTER_PLUS_EXPR:
276 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
277 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
0de36bdb 278 aff_combination_add (comb, &tmp);
279 return;
280
d3610bea 281 case PLUS_EXPR:
282 case MINUS_EXPR:
283 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
284 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
285 if (code == MINUS_EXPR)
286 aff_combination_scale (&tmp, double_int_minus_one);
287 aff_combination_add (comb, &tmp);
288 return;
289
290 case MULT_EXPR:
291 cst = TREE_OPERAND (expr, 1);
292 if (TREE_CODE (cst) != INTEGER_CST)
293 break;
294 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
295 aff_combination_scale (comb, tree_to_double_int (cst));
296 return;
297
298 case NEGATE_EXPR:
299 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
300 aff_combination_scale (comb, double_int_minus_one);
301 return;
302
cf661063 303 case BIT_NOT_EXPR:
304 /* ~x = -x - 1 */
305 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
306 aff_combination_scale (comb, double_int_minus_one);
307 aff_combination_add_cst (comb, double_int_minus_one);
308 return;
309
d3610bea 310 case ADDR_EXPR:
182cf5a9 311 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
312 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
313 {
314 expr = TREE_OPERAND (expr, 0);
315 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
316 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
317 aff_combination_add (comb, &tmp);
318 return;
319 }
d3610bea 320 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
321 &toffset, &mode, &unsignedp, &volatilep,
322 false);
323 if (bitpos % BITS_PER_UNIT != 0)
324 break;
325 aff_combination_const (comb, type,
cf8f0e63 326 double_int::from_uhwi (bitpos / BITS_PER_UNIT));
d3610bea 327 core = build_fold_addr_expr (core);
328 if (TREE_CODE (core) == ADDR_EXPR)
329 aff_combination_add_elt (comb, core, double_int_one);
330 else
331 {
332 tree_to_aff_combination (core, type, &tmp);
333 aff_combination_add (comb, &tmp);
334 }
335 if (toffset)
336 {
337 tree_to_aff_combination (toffset, type, &tmp);
338 aff_combination_add (comb, &tmp);
339 }
340 return;
341
182cf5a9 342 case MEM_REF:
343 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
344 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
345 type, comb);
346 else if (integer_zerop (TREE_OPERAND (expr, 1)))
347 {
348 aff_combination_elt (comb, type, expr);
349 return;
350 }
351 else
352 aff_combination_elt (comb, type,
353 build2 (MEM_REF, TREE_TYPE (expr),
354 TREE_OPERAND (expr, 0),
355 build_int_cst
356 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
357 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
358 aff_combination_add (comb, &tmp);
359 return;
360
d3610bea 361 default:
362 break;
363 }
364
365 aff_combination_elt (comb, type, expr);
366}
367
368/* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
369 combination COMB. */
370
371static tree
372add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
373 aff_tree *comb)
374{
375 enum tree_code code;
06240723 376 tree type1 = type;
377 if (POINTER_TYPE_P (type))
378 type1 = sizetype;
d3610bea 379
380 scale = double_int_ext_for_comb (scale, comb);
06240723 381 elt = fold_convert (type1, elt);
d3610bea 382
cf8f0e63 383 if (scale.is_one ())
d3610bea 384 {
385 if (!expr)
06240723 386 return fold_convert (type, elt);
d3610bea 387
06240723 388 if (POINTER_TYPE_P (type))
2cc66f2a 389 return fold_build_pointer_plus (expr, elt);
d3610bea 390 return fold_build2 (PLUS_EXPR, type, expr, elt);
391 }
392
cf8f0e63 393 if (scale.is_minus_one ())
d3610bea 394 {
395 if (!expr)
06240723 396 return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
d3610bea 397
06240723 398 if (POINTER_TYPE_P (type))
399 {
400 elt = fold_build1 (NEGATE_EXPR, type1, elt);
2cc66f2a 401 return fold_build_pointer_plus (expr, elt);
06240723 402 }
d3610bea 403 return fold_build2 (MINUS_EXPR, type, expr, elt);
404 }
405
406 if (!expr)
06240723 407 return fold_convert (type,
408 fold_build2 (MULT_EXPR, type1, elt,
409 double_int_to_tree (type1, scale)));
d3610bea 410
cf8f0e63 411 if (scale.is_negative ())
d3610bea 412 {
413 code = MINUS_EXPR;
cf8f0e63 414 scale = -scale;
d3610bea 415 }
416 else
417 code = PLUS_EXPR;
418
06240723 419 elt = fold_build2 (MULT_EXPR, type1, elt,
420 double_int_to_tree (type1, scale));
421 if (POINTER_TYPE_P (type))
422 {
423 if (code == MINUS_EXPR)
424 elt = fold_build1 (NEGATE_EXPR, type1, elt);
2cc66f2a 425 return fold_build_pointer_plus (expr, elt);
06240723 426 }
d3610bea 427 return fold_build2 (code, type, expr, elt);
428}
429
430/* Makes tree from the affine combination COMB. */
431
432tree
433aff_combination_to_tree (aff_tree *comb)
434{
435 tree type = comb->type;
748c2ed2 436 tree expr = NULL_TREE;
d3610bea 437 unsigned i;
438 double_int off, sgn;
06240723 439 tree type1 = type;
440 if (POINTER_TYPE_P (type))
441 type1 = sizetype;
d3610bea 442
443 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
444
445 for (i = 0; i < comb->n; i++)
446 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
447 comb);
448
748c2ed2 449 if (comb->rest)
450 expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
451
d3610bea 452 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
453 unsigned. */
cf8f0e63 454 if (comb->offset.is_negative ())
d3610bea 455 {
cf8f0e63 456 off = -comb->offset;
d3610bea 457 sgn = double_int_minus_one;
458 }
459 else
460 {
461 off = comb->offset;
462 sgn = double_int_one;
463 }
06240723 464 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
d3610bea 465 comb);
466}
467
468/* Copies the tree elements of COMB to ensure that they are not shared. */
469
470void
471unshare_aff_combination (aff_tree *comb)
472{
473 unsigned i;
474
475 for (i = 0; i < comb->n; i++)
476 comb->elts[i].val = unshare_expr (comb->elts[i].val);
477 if (comb->rest)
478 comb->rest = unshare_expr (comb->rest);
479}
480
481/* Remove M-th element from COMB. */
482
483void
484aff_combination_remove_elt (aff_tree *comb, unsigned m)
485{
486 comb->n--;
487 if (m <= comb->n)
488 comb->elts[m] = comb->elts[comb->n];
489 if (comb->rest)
490 {
491 comb->elts[comb->n].coef = double_int_one;
492 comb->elts[comb->n].val = comb->rest;
493 comb->rest = NULL_TREE;
494 comb->n++;
495 }
496}
cf661063 497
498/* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
499 C * COEF is added to R. */
48e1416a 500
cf661063 501
502static void
503aff_combination_add_product (aff_tree *c, double_int coef, tree val,
504 aff_tree *r)
505{
506 unsigned i;
507 tree aval, type;
508
509 for (i = 0; i < c->n; i++)
510 {
511 aval = c->elts[i].val;
512 if (val)
513 {
514 type = TREE_TYPE (aval);
515 aval = fold_build2 (MULT_EXPR, type, aval,
516 fold_convert (type, val));
517 }
518
cf8f0e63 519 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
cf661063 520 }
521
522 if (c->rest)
523 {
524 aval = c->rest;
525 if (val)
526 {
527 type = TREE_TYPE (aval);
528 aval = fold_build2 (MULT_EXPR, type, aval,
529 fold_convert (type, val));
530 }
531
532 aff_combination_add_elt (r, aval, coef);
533 }
534
535 if (val)
cf8f0e63 536 aff_combination_add_elt (r, val, coef * c->offset);
cf661063 537 else
cf8f0e63 538 aff_combination_add_cst (r, coef * c->offset);
cf661063 539}
540
541/* Multiplies C1 by C2, storing the result to R */
542
543void
544aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
545{
546 unsigned i;
547 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
548
549 aff_combination_zero (r, c1->type);
550
551 for (i = 0; i < c2->n; i++)
552 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
553 if (c2->rest)
554 aff_combination_add_product (c1, double_int_one, c2->rest, r);
555 aff_combination_add_product (c1, c2->offset, NULL, r);
556}
ad4a85ad 557
558/* Returns the element of COMB whose value is VAL, or NULL if no such
559 element exists. If IDX is not NULL, it is set to the index of VAL in
560 COMB. */
48e1416a 561
ad4a85ad 562static struct aff_comb_elt *
563aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
564{
565 unsigned i;
566
567 for (i = 0; i < comb->n; i++)
568 if (operand_equal_p (comb->elts[i].val, val, 0))
569 {
570 if (idx)
571 *idx = i;
572
573 return &comb->elts[i];
574 }
575
576 return NULL;
577}
578
579/* Element of the cache that maps ssa name NAME to its expanded form
580 as an affine expression EXPANSION. */
581
582struct name_expansion
583{
584 aff_tree expansion;
585
586 /* True if the expansion for the name is just being generated. */
587 unsigned in_progress : 1;
588};
589
063a8bce 590/* Expands SSA names in COMB recursively. CACHE is used to cache the
591 results. */
ad4a85ad 592
593void
75a70cf9 594aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
595 struct pointer_map_t **cache ATTRIBUTE_UNUSED)
ad4a85ad 596{
597 unsigned i;
598 aff_tree to_add, current, curre;
75a70cf9 599 tree e, rhs;
600 gimple def;
ad4a85ad 601 double_int scale;
602 void **slot;
603 struct name_expansion *exp;
604
063a8bce 605 aff_combination_zero (&to_add, comb->type);
ad4a85ad 606 for (i = 0; i < comb->n; i++)
607 {
00ae4124 608 tree type, name;
75a70cf9 609 enum tree_code code;
610
ad4a85ad 611 e = comb->elts[i].val;
00ae4124 612 type = TREE_TYPE (e);
613 name = e;
614 /* Look through some conversions. */
615 if (TREE_CODE (e) == NOP_EXPR
616 && (TYPE_PRECISION (type)
617 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
618 name = TREE_OPERAND (e, 0);
619 if (TREE_CODE (name) != SSA_NAME)
ad4a85ad 620 continue;
00ae4124 621 def = SSA_NAME_DEF_STMT (name);
75a70cf9 622 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
ad4a85ad 623 continue;
624
75a70cf9 625 code = gimple_assign_rhs_code (def);
626 if (code != SSA_NAME
627 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
628 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
629 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
ad4a85ad 630 continue;
631
632 /* We do not know whether the reference retains its value at the
633 place where the expansion is used. */
75a70cf9 634 if (TREE_CODE_CLASS (code) == tcc_reference)
ad4a85ad 635 continue;
636
637 if (!*cache)
638 *cache = pointer_map_create ();
639 slot = pointer_map_insert (*cache, e);
45ba1503 640 exp = (struct name_expansion *) *slot;
ad4a85ad 641
642 if (!exp)
643 {
644 exp = XNEW (struct name_expansion);
645 exp->in_progress = 1;
646 *slot = exp;
75a70cf9 647 /* In principle this is a generally valid folding, but
648 it is not unconditionally an optimization, so do it
649 here and not in fold_unary. */
650 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
651 than the type of X and overflow for the type of X is
652 undefined. */
653 if (e != name
654 && INTEGRAL_TYPE_P (type)
655 && INTEGRAL_TYPE_P (TREE_TYPE (name))
656 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
657 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
658 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
659 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
660 rhs = fold_build2 (code, type,
661 fold_convert (type, gimple_assign_rhs1 (def)),
662 fold_convert (type, gimple_assign_rhs2 (def)));
663 else
00ae4124 664 {
75a70cf9 665 rhs = gimple_assign_rhs_to_tree (def);
666 if (e != name)
667 rhs = fold_convert (type, rhs);
00ae4124 668 }
063a8bce 669 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
ad4a85ad 670 exp->expansion = current;
671 exp->in_progress = 0;
672 }
673 else
674 {
675 /* Since we follow the definitions in the SSA form, we should not
676 enter a cycle unless we pass through a phi node. */
677 gcc_assert (!exp->in_progress);
678 current = exp->expansion;
679 }
680
681 /* Accumulate the new terms to TO_ADD, so that we do not modify
682 COMB while traversing it; include the term -coef * E, to remove
683 it from COMB. */
684 scale = comb->elts[i].coef;
063a8bce 685 aff_combination_zero (&curre, comb->type);
cf8f0e63 686 aff_combination_add_elt (&curre, e, -scale);
ad4a85ad 687 aff_combination_scale (&current, scale);
688 aff_combination_add (&to_add, &current);
689 aff_combination_add (&to_add, &curre);
690 }
691 aff_combination_add (comb, &to_add);
692}
693
063a8bce 694/* Similar to tree_to_aff_combination, but follows SSA name definitions
695 and expands them recursively. CACHE is used to cache the expansions
696 of the ssa names, to avoid exponential time complexity for cases
697 like
698
699 a1 = a0 + a0;
700 a2 = a1 + a1;
701 a3 = a2 + a2;
702 ... */
703
704void
705tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
706 struct pointer_map_t **cache)
707{
708 tree_to_aff_combination (expr, type, comb);
709 aff_combination_expand (comb, cache);
710}
711
ad4a85ad 712/* Frees memory occupied by struct name_expansion in *VALUE. Callback for
713 pointer_map_traverse. */
714
715static bool
f8fd23c0 716free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
ad4a85ad 717 void *data ATTRIBUTE_UNUSED)
718{
45ba1503 719 struct name_expansion *const exp = (struct name_expansion *) *value;
ad4a85ad 720
721 free (exp);
722 return true;
723}
724
725/* Frees memory allocated for the CACHE used by
726 tree_to_aff_combination_expand. */
727
728void
729free_affine_expand_cache (struct pointer_map_t **cache)
730{
731 if (!*cache)
732 return;
733
734 pointer_map_traverse (*cache, free_name_expansion, NULL);
735 pointer_map_destroy (*cache);
736 *cache = NULL;
737}
738
739/* If VAL != CST * DIV for any constant CST, returns false.
740 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
741 additionally compares CST and MULT, and if they are different,
310d2511 742 returns false. Finally, if neither of these two cases occur,
ad4a85ad 743 true is returned, and if CST != 0, CST is stored to MULT and
744 MULT_SET is set to true. */
745
746static bool
747double_int_constant_multiple_p (double_int val, double_int div,
748 bool *mult_set, double_int *mult)
749{
750 double_int rem, cst;
751
cf8f0e63 752 if (val.is_zero ())
ad4a85ad 753 return true;
754
cf8f0e63 755 if (div.is_zero ())
ad4a85ad 756 return false;
757
cf8f0e63 758 cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem);
759 if (!rem.is_zero ())
ad4a85ad 760 return false;
761
cf8f0e63 762 if (*mult_set && *mult != cst)
ad4a85ad 763 return false;
764
765 *mult_set = true;
766 *mult = cst;
767 return true;
768}
769
770/* Returns true if VAL = X * DIV for some constant X. If this is the case,
771 X is stored to MULT. */
772
773bool
774aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
775 double_int *mult)
776{
777 bool mult_set = false;
778 unsigned i;
779
cf8f0e63 780 if (val->n == 0 && val->offset.is_zero ())
ad4a85ad 781 {
782 *mult = double_int_zero;
783 return true;
784 }
785 if (val->n != div->n)
786 return false;
787
788 if (val->rest || div->rest)
789 return false;
790
791 if (!double_int_constant_multiple_p (val->offset, div->offset,
792 &mult_set, mult))
793 return false;
794
795 for (i = 0; i < div->n; i++)
796 {
797 struct aff_comb_elt *elt
798 = aff_combination_find_elt (val, div->elts[i].val, NULL);
799 if (!elt)
800 return false;
801 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
802 &mult_set, mult))
803 return false;
804 }
805
806 gcc_assert (mult_set);
807 return true;
808}
28d4c440 809
810/* Prints the affine VAL to the FILE. */
811
d129ada0 812static void
28d4c440 813print_aff (FILE *file, aff_tree *val)
814{
815 unsigned i;
816 bool uns = TYPE_UNSIGNED (val->type);
817 if (POINTER_TYPE_P (val->type))
818 uns = false;
819 fprintf (file, "{\n type = ");
820 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
821 fprintf (file, "\n offset = ");
822 dump_double_int (file, val->offset, uns);
823 if (val->n > 0)
824 {
825 fprintf (file, "\n elements = {\n");
826 for (i = 0; i < val->n; i++)
827 {
828 fprintf (file, " [%d] = ", i);
829 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
48e1416a 830
28d4c440 831 fprintf (file, " * ");
832 dump_double_int (file, val->elts[i].coef, uns);
833 if (i != val->n - 1)
834 fprintf (file, ", \n");
835 }
836 fprintf (file, "\n }");
837 }
838 if (val->rest)
839 {
840 fprintf (file, "\n rest = ");
841 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
842 }
843 fprintf (file, "\n}");
844}
845
846/* Prints the affine VAL to the standard error, used for debugging. */
847
4b987fac 848DEBUG_FUNCTION void
28d4c440 849debug_aff (aff_tree *val)
850{
851 print_aff (stderr, val);
852 fprintf (stderr, "\n");
853}
063a8bce 854
855/* Returns address of the reference REF in ADDR. The size of the accessed
856 location is stored to SIZE. */
857
858void
859get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
860{
861 HOST_WIDE_INT bitsize, bitpos;
862 tree toff;
863 enum machine_mode mode;
864 int uns, vol;
865 aff_tree tmp;
866 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
867 &uns, &vol, false);
868 tree base_addr = build_fold_addr_expr (base);
869
870 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
871
872 tree_to_aff_combination (base_addr, sizetype, addr);
873
874 if (toff)
875 {
876 tree_to_aff_combination (toff, sizetype, &tmp);
877 aff_combination_add (addr, &tmp);
878 }
879
880 aff_combination_const (&tmp, sizetype,
cf8f0e63 881 double_int::from_shwi (bitpos / BITS_PER_UNIT));
063a8bce 882 aff_combination_add (addr, &tmp);
883
cf8f0e63 884 *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
063a8bce 885}
886
5fc88ffd 887/* Returns true if a region of size SIZE1 at position 0 and a region of
888 size SIZE2 at position DIFF cannot overlap. */
889
890bool
891aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
892{
893 double_int d, bound;
894
895 /* Unless the difference is a constant, we fail. */
896 if (diff->n != 0)
897 return false;
898
899 d = diff->offset;
cf8f0e63 900 if (d.is_negative ())
5fc88ffd 901 {
902 /* The second object is before the first one, we succeed if the last
903 element of the second object is before the start of the first one. */
cf8f0e63 904 bound = d + size2 + double_int_minus_one;
905 return bound.is_negative ();
5fc88ffd 906 }
907 else
908 {
909 /* We succeed if the second object starts after the first one ends. */
cf8f0e63 910 return size1.sle (d);
5fc88ffd 911 }
912}
913