]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-affine.c
PR translation/79183
[thirdparty/gcc.git] / gcc / tree-affine.c
CommitLineData
d3610bea 1/* Operations with affine combinations of trees.
fbd26352 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
48e1416a 3
d3610bea 4This file is part of GCC.
48e1416a 5
d3610bea 6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8c4c00c1 8Free Software Foundation; either version 3, or (at your option) any
d3610bea 9later version.
48e1416a 10
d3610bea 11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
48e1416a 15
d3610bea 16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
d3610bea 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "rtl.h"
d3610bea 25#include "tree.h"
9ef16211 26#include "gimple.h"
da744518 27#include "ssa.h"
7c29e30e 28#include "tree-pretty-print.h"
9ef16211 29#include "fold-const.h"
d3610bea 30#include "tree-affine.h"
a8783bee 31#include "gimplify.h"
b9ed1410 32#include "dumpfile.h"
e797f49f 33#include "cfgexpand.h"
e913b5cd 34
d3610bea 35/* Extends CST as appropriate for the affine combinations COMB. */
36
1aeea61f 37static widest_int
a8dd4afa 38wide_int_ext_for_comb (const widest_int &cst, tree type)
d3610bea 39{
a8dd4afa 40 return wi::sext (cst, TYPE_PRECISION (type));
d3610bea 41}
ab2c1de8 42
1aeea61f 43/* Likewise for polynomial offsets. */
44
45static poly_widest_int
46wide_int_ext_for_comb (const poly_widest_int &cst, tree type)
47{
48 return wi::sext (cst, TYPE_PRECISION (type));
49}
50
d3610bea 51/* Initializes affine combination COMB so that its value is zero in TYPE. */
52
53static void
54aff_combination_zero (aff_tree *comb, tree type)
55{
e913b5cd 56 int i;
d3610bea 57 comb->type = type;
e913b5cd 58 comb->offset = 0;
d3610bea 59 comb->n = 0;
e913b5cd 60 for (i = 0; i < MAX_AFF_ELTS; i++)
61 comb->elts[i].coef = 0;
d3610bea 62 comb->rest = NULL_TREE;
63}
64
65/* Sets COMB to CST. */
66
67void
1aeea61f 68aff_combination_const (aff_tree *comb, tree type, const poly_widest_int &cst)
d3610bea 69{
70 aff_combination_zero (comb, type);
a8dd4afa 71 comb->offset = wide_int_ext_for_comb (cst, comb->type);;
d3610bea 72}
73
74/* Sets COMB to single element ELT. */
75
76void
77aff_combination_elt (aff_tree *comb, tree type, tree elt)
78{
79 aff_combination_zero (comb, type);
80
81 comb->n = 1;
82 comb->elts[0].val = elt;
e913b5cd 83 comb->elts[0].coef = 1;
d3610bea 84}
85
86/* Scales COMB by SCALE. */
87
88void
ab2c1de8 89aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
d3610bea 90{
91 unsigned i, j;
92
a8dd4afa 93 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
796b6678 94 if (scale == 1)
d3610bea 95 return;
96
796b6678 97 if (scale == 0)
d3610bea 98 {
99 aff_combination_zero (comb, comb->type);
100 return;
101 }
102
a8dd4afa 103 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
d3610bea 104 for (i = 0, j = 0; i < comb->n; i++)
105 {
ab2c1de8 106 widest_int new_coef
a8dd4afa 107 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
d3610bea 108 /* A coefficient may become zero due to overflow. Remove the zero
109 elements. */
796b6678 110 if (new_coef == 0)
d3610bea 111 continue;
112 comb->elts[j].coef = new_coef;
113 comb->elts[j].val = comb->elts[i].val;
114 j++;
115 }
116 comb->n = j;
117
118 if (comb->rest)
119 {
2ccb5abd 120 tree type = comb->type;
121 if (POINTER_TYPE_P (type))
122 type = sizetype;
d3610bea 123 if (comb->n < MAX_AFF_ELTS)
124 {
125 comb->elts[comb->n].coef = scale;
126 comb->elts[comb->n].val = comb->rest;
127 comb->rest = NULL_TREE;
128 comb->n++;
129 }
130 else
48e1416a 131 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
e913b5cd 132 wide_int_to_tree (type, scale));
d3610bea 133 }
134}
135
136/* Adds ELT * SCALE to COMB. */
137
138void
ab2c1de8 139aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
d3610bea 140{
141 unsigned i;
00de526b 142 tree type;
d3610bea 143
a8dd4afa 144 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
796b6678 145 if (scale == 0)
d3610bea 146 return;
147
148 for (i = 0; i < comb->n; i++)
149 if (operand_equal_p (comb->elts[i].val, elt, 0))
150 {
ab2c1de8 151 widest_int new_coef
a8dd4afa 152 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
796b6678 153 if (new_coef != 0)
d3610bea 154 {
155 comb->elts[i].coef = new_coef;
156 return;
157 }
158
159 comb->n--;
160 comb->elts[i] = comb->elts[comb->n];
161
162 if (comb->rest)
163 {
164 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
e913b5cd 165 comb->elts[comb->n].coef = 1;
d3610bea 166 comb->elts[comb->n].val = comb->rest;
167 comb->rest = NULL_TREE;
168 comb->n++;
169 }
170 return;
171 }
172 if (comb->n < MAX_AFF_ELTS)
173 {
174 comb->elts[comb->n].coef = scale;
175 comb->elts[comb->n].val = elt;
176 comb->n++;
177 return;
178 }
179
00de526b 180 type = comb->type;
181 if (POINTER_TYPE_P (type))
182 type = sizetype;
183
796b6678 184 if (scale == 1)
00de526b 185 elt = fold_convert (type, elt);
d3610bea 186 else
00de526b 187 elt = fold_build2 (MULT_EXPR, type,
188 fold_convert (type, elt),
e913b5cd 189 wide_int_to_tree (type, scale));
d3610bea 190
191 if (comb->rest)
2ccb5abd 192 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
193 elt);
d3610bea 194 else
195 comb->rest = elt;
196}
197
cf661063 198/* Adds CST to C. */
199
200static void
1aeea61f 201aff_combination_add_cst (aff_tree *c, const poly_widest_int &cst)
cf661063 202{
a8dd4afa 203 c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
cf661063 204}
205
d3610bea 206/* Adds COMB2 to COMB1. */
207
208void
209aff_combination_add (aff_tree *comb1, aff_tree *comb2)
210{
211 unsigned i;
212
cf661063 213 aff_combination_add_cst (comb1, comb2->offset);
d3610bea 214 for (i = 0; i < comb2->n; i++)
215 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
216 if (comb2->rest)
e913b5cd 217 aff_combination_add_elt (comb1, comb2->rest, 1);
d3610bea 218}
219
220/* Converts affine combination COMB to TYPE. */
221
222void
223aff_combination_convert (aff_tree *comb, tree type)
224{
225 unsigned i, j;
226 tree comb_type = comb->type;
227
cf661063 228 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
229 {
230 tree val = fold_convert (type, aff_combination_to_tree (comb));
231 tree_to_aff_combination (val, type, comb);
232 return;
233 }
234
d3610bea 235 comb->type = type;
2ccb5abd 236 if (comb->rest && !POINTER_TYPE_P (type))
d3610bea 237 comb->rest = fold_convert (type, comb->rest);
238
239 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
240 return;
241
a8dd4afa 242 comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
d3610bea 243 for (i = j = 0; i < comb->n; i++)
244 {
ab2c1de8 245 if (comb->elts[i].coef == 0)
d3610bea 246 continue;
ab2c1de8 247 comb->elts[j].coef = comb->elts[i].coef;
d3610bea 248 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
249 j++;
250 }
251
252 comb->n = j;
253 if (comb->n < MAX_AFF_ELTS && comb->rest)
254 {
e913b5cd 255 comb->elts[comb->n].coef = 1;
d3610bea 256 comb->elts[comb->n].val = comb->rest;
257 comb->rest = NULL_TREE;
258 comb->n++;
259 }
260}
261
262/* Splits EXPR into an affine combination of parts. */
263
264void
265tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
266{
267 aff_tree tmp;
268 enum tree_code code;
269 tree cst, core, toffset;
81bc0f0f 270 poly_int64 bitpos, bitsize, bytepos;
3754d046 271 machine_mode mode;
292237f3 272 int unsignedp, reversep, volatilep;
d3610bea 273
274 STRIP_NOPS (expr);
275
276 code = TREE_CODE (expr);
277 switch (code)
278 {
0de36bdb 279 case POINTER_PLUS_EXPR:
280 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
281 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
0de36bdb 282 aff_combination_add (comb, &tmp);
283 return;
284
d3610bea 285 case PLUS_EXPR:
286 case MINUS_EXPR:
287 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
288 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
289 if (code == MINUS_EXPR)
e913b5cd 290 aff_combination_scale (&tmp, -1);
d3610bea 291 aff_combination_add (comb, &tmp);
292 return;
293
294 case MULT_EXPR:
295 cst = TREE_OPERAND (expr, 1);
296 if (TREE_CODE (cst) != INTEGER_CST)
297 break;
298 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
5de9d3ed 299 aff_combination_scale (comb, wi::to_widest (cst));
d3610bea 300 return;
301
302 case NEGATE_EXPR:
303 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
e913b5cd 304 aff_combination_scale (comb, -1);
d3610bea 305 return;
306
cf661063 307 case BIT_NOT_EXPR:
308 /* ~x = -x - 1 */
309 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
e913b5cd 310 aff_combination_scale (comb, -1);
311 aff_combination_add_cst (comb, -1);
cf661063 312 return;
313
d3610bea 314 case ADDR_EXPR:
182cf5a9 315 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
316 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
317 {
318 expr = TREE_OPERAND (expr, 0);
319 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
320 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
321 aff_combination_add (comb, &tmp);
322 return;
323 }
d3610bea 324 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
292237f3 325 &toffset, &mode, &unsignedp, &reversep,
b3b6e4b5 326 &volatilep);
81bc0f0f 327 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
d3610bea 328 break;
81bc0f0f 329 aff_combination_const (comb, type, bytepos);
2e8bfa0b 330 if (TREE_CODE (core) == MEM_REF)
331 {
81bc0f0f 332 tree mem_offset = TREE_OPERAND (core, 1);
333 aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
2e8bfa0b 334 core = TREE_OPERAND (core, 0);
335 }
336 else
337 core = build_fold_addr_expr (core);
338
d3610bea 339 if (TREE_CODE (core) == ADDR_EXPR)
e913b5cd 340 aff_combination_add_elt (comb, core, 1);
d3610bea 341 else
342 {
343 tree_to_aff_combination (core, type, &tmp);
344 aff_combination_add (comb, &tmp);
345 }
346 if (toffset)
347 {
348 tree_to_aff_combination (toffset, type, &tmp);
349 aff_combination_add (comb, &tmp);
350 }
351 return;
352
9a559110 353 CASE_CONVERT:
354 {
355 tree otype = TREE_TYPE (expr);
356 tree inner = TREE_OPERAND (expr, 0);
357 tree itype = TREE_TYPE (inner);
358 enum tree_code icode = TREE_CODE (inner);
359
360 /* In principle this is a valid folding, but it isn't necessarily
361 an optimization, so do it here and not in fold_unary. */
362 if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
363 && TREE_CODE (itype) == INTEGER_TYPE
364 && TREE_CODE (otype) == INTEGER_TYPE
15295570 365 && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
9a559110 366 {
15295570 367 tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);
368
369 /* If inner type has undefined overflow behavior, fold conversion
370 for below two cases:
371 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
372 (T1)(X + X) -> (T1)X + (T1)X. */
373 if (TYPE_OVERFLOW_UNDEFINED (itype)
374 && (TREE_CODE (op1) == INTEGER_CST
375 || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
376 {
377 op0 = fold_convert (otype, op0);
378 op1 = fold_convert (otype, op1);
379 expr = fold_build2 (icode, otype, op0, op1);
380 tree_to_aff_combination (expr, type, comb);
381 return;
382 }
da744518 383 wide_int minv, maxv;
384 /* If inner type has wrapping overflow behavior, fold conversion
385 for below case:
386 (T1)(X - CST) -> (T1)X - (T1)CST
387 if X - CST doesn't overflow by range information. Also handle
388 (T1)(X + CST) as (T1)(X - (-CST)). */
389 if (TYPE_UNSIGNED (itype)
390 && TYPE_OVERFLOW_WRAPS (itype)
391 && TREE_CODE (op0) == SSA_NAME
392 && TREE_CODE (op1) == INTEGER_CST
393 && icode != MULT_EXPR
394 && get_range_info (op0, &minv, &maxv) == VR_RANGE)
395 {
396 if (icode == PLUS_EXPR)
e3d0f65c 397 op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
398 if (wi::geu_p (minv, wi::to_wide (op1)))
da744518 399 {
400 op0 = fold_convert (otype, op0);
401 op1 = fold_convert (otype, op1);
402 expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
403 tree_to_aff_combination (expr, type, comb);
404 return;
405 }
406 }
9a559110 407 }
408 }
409 break;
410
d3610bea 411 default:
1aeea61f 412 {
413 if (poly_int_tree_p (expr))
414 {
415 aff_combination_const (comb, type, wi::to_poly_widest (expr));
416 return;
417 }
418 break;
419 }
d3610bea 420 }
421
422 aff_combination_elt (comb, type, expr);
423}
424
425/* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
426 combination COMB. */
427
428static tree
a8dd4afa 429add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
d3610bea 430{
431 enum tree_code code;
432
a8dd4afa 433 widest_int scale = wide_int_ext_for_comb (scale_in, type);
9ea678af 434
61728404 435 elt = fold_convert (type, elt);
796b6678 436 if (scale == 1)
d3610bea 437 {
438 if (!expr)
61728404 439 return elt;
d3610bea 440
61728404 441 return fold_build2 (PLUS_EXPR, type, expr, elt);
d3610bea 442 }
443
796b6678 444 if (scale == -1)
d3610bea 445 {
446 if (!expr)
61728404 447 return fold_build1 (NEGATE_EXPR, type, elt);
d3610bea 448
61728404 449 return fold_build2 (MINUS_EXPR, type, expr, elt);
d3610bea 450 }
451
452 if (!expr)
61728404 453 return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
d3610bea 454
796b6678 455 if (wi::neg_p (scale))
d3610bea 456 {
457 code = MINUS_EXPR;
cf8f0e63 458 scale = -scale;
d3610bea 459 }
460 else
461 code = PLUS_EXPR;
462
61728404 463 elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
464 return fold_build2 (code, type, expr, elt);
d3610bea 465}
466
467/* Makes tree from the affine combination COMB. */
468
469tree
470aff_combination_to_tree (aff_tree *comb)
471{
61728404 472 tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
d3610bea 473 unsigned i;
1aeea61f 474 poly_widest_int off;
475 int sgn;
d3610bea 476
477 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
478
61728404 479 i = 0;
480 if (POINTER_TYPE_P (type))
481 {
482 type = sizetype;
483 if (comb->n > 0 && comb->elts[0].coef == 1
484 && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
485 {
486 base = comb->elts[0].val;
487 ++i;
488 }
489 }
490
491 for (; i < comb->n; i++)
a8dd4afa 492 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
d3610bea 493
748c2ed2 494 if (comb->rest)
a8dd4afa 495 expr = add_elt_to_tree (expr, type, comb->rest, 1);
748c2ed2 496
d3610bea 497 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
498 unsigned. */
1aeea61f 499 if (known_lt (comb->offset, 0))
d3610bea 500 {
cf8f0e63 501 off = -comb->offset;
e913b5cd 502 sgn = -1;
d3610bea 503 }
504 else
505 {
506 off = comb->offset;
e913b5cd 507 sgn = 1;
d3610bea 508 }
61728404 509 expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
510
511 if (base)
512 return fold_build_pointer_plus (base, expr);
513 else
514 return fold_convert (comb->type, expr);
d3610bea 515}
516
517/* Copies the tree elements of COMB to ensure that they are not shared. */
518
519void
520unshare_aff_combination (aff_tree *comb)
521{
522 unsigned i;
523
524 for (i = 0; i < comb->n; i++)
525 comb->elts[i].val = unshare_expr (comb->elts[i].val);
526 if (comb->rest)
527 comb->rest = unshare_expr (comb->rest);
528}
529
530/* Remove M-th element from COMB. */
531
532void
533aff_combination_remove_elt (aff_tree *comb, unsigned m)
534{
535 comb->n--;
536 if (m <= comb->n)
537 comb->elts[m] = comb->elts[comb->n];
538 if (comb->rest)
539 {
e913b5cd 540 comb->elts[comb->n].coef = 1;
d3610bea 541 comb->elts[comb->n].val = comb->rest;
542 comb->rest = NULL_TREE;
543 comb->n++;
544 }
545}
cf661063 546
547/* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
548 C * COEF is added to R. */
48e1416a 549
cf661063 550
551static void
5de9d3ed 552aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
cf661063 553 aff_tree *r)
554{
555 unsigned i;
556 tree aval, type;
557
558 for (i = 0; i < c->n; i++)
559 {
560 aval = c->elts[i].val;
561 if (val)
562 {
563 type = TREE_TYPE (aval);
564 aval = fold_build2 (MULT_EXPR, type, aval,
565 fold_convert (type, val));
566 }
567
cf8f0e63 568 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
cf661063 569 }
570
571 if (c->rest)
572 {
573 aval = c->rest;
574 if (val)
575 {
576 type = TREE_TYPE (aval);
577 aval = fold_build2 (MULT_EXPR, type, aval,
578 fold_convert (type, val));
579 }
580
581 aff_combination_add_elt (r, aval, coef);
582 }
583
584 if (val)
1aeea61f 585 {
586 if (c->offset.is_constant ())
587 /* Access coeffs[0] directly, for efficiency. */
588 aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]);
589 else
590 {
591 /* c->offset is polynomial, so multiply VAL rather than COEF
592 by it. */
593 tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset);
594 val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset);
595 aff_combination_add_elt (r, val, coef);
596 }
597 }
cf661063 598 else
cf8f0e63 599 aff_combination_add_cst (r, coef * c->offset);
cf661063 600}
601
602/* Multiplies C1 by C2, storing the result to R */
603
604void
605aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
606{
607 unsigned i;
608 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
609
610 aff_combination_zero (r, c1->type);
611
612 for (i = 0; i < c2->n; i++)
613 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
614 if (c2->rest)
e913b5cd 615 aff_combination_add_product (c1, 1, c2->rest, r);
1aeea61f 616 if (c2->offset.is_constant ())
617 /* Access coeffs[0] directly, for efficiency. */
618 aff_combination_add_product (c1, c2->offset.coeffs[0], NULL, r);
619 else
620 {
621 /* c2->offset is polynomial, so do the multiplication in tree form. */
622 tree offset = wide_int_to_tree (c2->type, c2->offset);
623 aff_combination_add_product (c1, 1, offset, r);
624 }
cf661063 625}
ad4a85ad 626
627/* Returns the element of COMB whose value is VAL, or NULL if no such
628 element exists. If IDX is not NULL, it is set to the index of VAL in
629 COMB. */
48e1416a 630
ad4a85ad 631static struct aff_comb_elt *
632aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
633{
634 unsigned i;
635
636 for (i = 0; i < comb->n; i++)
637 if (operand_equal_p (comb->elts[i].val, val, 0))
638 {
639 if (idx)
640 *idx = i;
641
642 return &comb->elts[i];
643 }
644
645 return NULL;
646}
647
648/* Element of the cache that maps ssa name NAME to its expanded form
649 as an affine expression EXPANSION. */
650
651struct name_expansion
652{
653 aff_tree expansion;
654
655 /* True if the expansion for the name is just being generated. */
656 unsigned in_progress : 1;
657};
658
063a8bce 659/* Expands SSA names in COMB recursively. CACHE is used to cache the
660 results. */
ad4a85ad 661
662void
75a70cf9 663aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
5f8841a5 664 hash_map<tree, name_expansion *> **cache)
ad4a85ad 665{
666 unsigned i;
667 aff_tree to_add, current, curre;
75a70cf9 668 tree e, rhs;
42acab1c 669 gimple *def;
5de9d3ed 670 widest_int scale;
ad4a85ad 671 struct name_expansion *exp;
672
063a8bce 673 aff_combination_zero (&to_add, comb->type);
ad4a85ad 674 for (i = 0; i < comb->n; i++)
675 {
00ae4124 676 tree type, name;
75a70cf9 677 enum tree_code code;
678
ad4a85ad 679 e = comb->elts[i].val;
00ae4124 680 type = TREE_TYPE (e);
681 name = e;
682 /* Look through some conversions. */
d09ef31a 683 if (CONVERT_EXPR_P (e)
00ae4124 684 && (TYPE_PRECISION (type)
685 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
686 name = TREE_OPERAND (e, 0);
687 if (TREE_CODE (name) != SSA_NAME)
ad4a85ad 688 continue;
00ae4124 689 def = SSA_NAME_DEF_STMT (name);
75a70cf9 690 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
ad4a85ad 691 continue;
692
75a70cf9 693 code = gimple_assign_rhs_code (def);
694 if (code != SSA_NAME
695 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
696 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
697 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
ad4a85ad 698 continue;
699
700 /* We do not know whether the reference retains its value at the
701 place where the expansion is used. */
75a70cf9 702 if (TREE_CODE_CLASS (code) == tcc_reference)
ad4a85ad 703 continue;
704
9861da3b 705 name_expansion **slot = NULL;
706 if (*cache)
707 slot = (*cache)->get (name);
708 exp = slot ? *slot : NULL;
ad4a85ad 709 if (!exp)
710 {
9861da3b 711 /* Only bother to handle cases tree_to_aff_combination will. */
712 switch (code)
713 {
714 case POINTER_PLUS_EXPR:
715 case PLUS_EXPR:
716 case MINUS_EXPR:
717 case MULT_EXPR:
718 case NEGATE_EXPR:
719 case BIT_NOT_EXPR:
720 CASE_CONVERT:
721 rhs = gimple_assign_rhs_to_tree (def);
722 break;
723 case ADDR_EXPR:
724 case INTEGER_CST:
725 case POLY_INT_CST:
726 rhs = gimple_assign_rhs1 (def);
727 break;
728 default:
729 continue;
730 }
731 tree_to_aff_combination (rhs, TREE_TYPE (name), &current);
ad4a85ad 732 exp = XNEW (struct name_expansion);
733 exp->in_progress = 1;
9861da3b 734 if (!*cache)
735 *cache = new hash_map<tree, name_expansion *>;
736 (*cache)->put (name, exp);
737 aff_combination_expand (&current, cache);
ad4a85ad 738 exp->expansion = current;
739 exp->in_progress = 0;
740 }
741 else
742 {
743 /* Since we follow the definitions in the SSA form, we should not
744 enter a cycle unless we pass through a phi node. */
745 gcc_assert (!exp->in_progress);
746 current = exp->expansion;
747 }
9861da3b 748 if (!useless_type_conversion_p (comb->type, current.type))
749 aff_combination_convert (&current, comb->type);
ad4a85ad 750
751 /* Accumulate the new terms to TO_ADD, so that we do not modify
752 COMB while traversing it; include the term -coef * E, to remove
753 it from COMB. */
754 scale = comb->elts[i].coef;
063a8bce 755 aff_combination_zero (&curre, comb->type);
cf8f0e63 756 aff_combination_add_elt (&curre, e, -scale);
ad4a85ad 757 aff_combination_scale (&current, scale);
758 aff_combination_add (&to_add, &current);
759 aff_combination_add (&to_add, &curre);
760 }
761 aff_combination_add (comb, &to_add);
762}
763
063a8bce 764/* Similar to tree_to_aff_combination, but follows SSA name definitions
765 and expands them recursively. CACHE is used to cache the expansions
766 of the ssa names, to avoid exponential time complexity for cases
767 like
768
769 a1 = a0 + a0;
770 a2 = a1 + a1;
771 a3 = a2 + a2;
772 ... */
773
774void
775tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
5f8841a5 776 hash_map<tree, name_expansion *> **cache)
063a8bce 777{
778 tree_to_aff_combination (expr, type, comb);
779 aff_combination_expand (comb, cache);
780}
781
ad4a85ad 782/* Frees memory occupied by struct name_expansion in *VALUE. Callback for
5f8841a5 783 hash_map::traverse. */
ad4a85ad 784
5f8841a5 785bool
786free_name_expansion (tree const &, name_expansion **value, void *)
ad4a85ad 787{
5f8841a5 788 free (*value);
ad4a85ad 789 return true;
790}
791
792/* Frees memory allocated for the CACHE used by
793 tree_to_aff_combination_expand. */
794
795void
5f8841a5 796free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
ad4a85ad 797{
798 if (!*cache)
799 return;
800
5f8841a5 801 (*cache)->traverse<void *, free_name_expansion> (NULL);
802 delete (*cache);
ad4a85ad 803 *cache = NULL;
804}
805
806/* If VAL != CST * DIV for any constant CST, returns false.
575a3e39 807 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
808 and if they are different, returns false. Finally, if neither of these
809 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
810 is set to true. */
ad4a85ad 811
812static bool
1aeea61f 813wide_int_constant_multiple_p (const poly_widest_int &val,
814 const poly_widest_int &div,
815 bool *mult_set, poly_widest_int *mult)
ad4a85ad 816{
1aeea61f 817 poly_widest_int rem, cst;
ad4a85ad 818
1aeea61f 819 if (known_eq (val, 0))
575a3e39 820 {
1aeea61f 821 if (*mult_set && maybe_ne (*mult, 0))
575a3e39 822 return false;
823 *mult_set = true;
e913b5cd 824 *mult = 0;
575a3e39 825 return true;
826 }
ad4a85ad 827
1aeea61f 828 if (maybe_eq (div, 0))
ad4a85ad 829 return false;
830
1aeea61f 831 if (!multiple_p (val, div, &cst))
ad4a85ad 832 return false;
833
1aeea61f 834 if (*mult_set && maybe_ne (*mult, cst))
ad4a85ad 835 return false;
836
837 *mult_set = true;
838 *mult = cst;
839 return true;
840}
841
842/* Returns true if VAL = X * DIV for some constant X. If this is the case,
843 X is stored to MULT. */
844
845bool
846aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
1aeea61f 847 poly_widest_int *mult)
ad4a85ad 848{
849 bool mult_set = false;
850 unsigned i;
851
1aeea61f 852 if (val->n == 0 && known_eq (val->offset, 0))
ad4a85ad 853 {
e913b5cd 854 *mult = 0;
ad4a85ad 855 return true;
856 }
857 if (val->n != div->n)
858 return false;
859
860 if (val->rest || div->rest)
861 return false;
862
e913b5cd 863 if (!wide_int_constant_multiple_p (val->offset, div->offset,
864 &mult_set, mult))
ad4a85ad 865 return false;
866
867 for (i = 0; i < div->n; i++)
868 {
869 struct aff_comb_elt *elt
870 = aff_combination_find_elt (val, div->elts[i].val, NULL);
871 if (!elt)
872 return false;
e913b5cd 873 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
874 &mult_set, mult))
ad4a85ad 875 return false;
876 }
877
878 gcc_assert (mult_set);
879 return true;
880}
28d4c440 881
882/* Prints the affine VAL to the FILE. */
883
d129ada0 884static void
28d4c440 885print_aff (FILE *file, aff_tree *val)
886{
887 unsigned i;
e913b5cd 888 signop sgn = TYPE_SIGN (val->type);
28d4c440 889 if (POINTER_TYPE_P (val->type))
e913b5cd 890 sgn = SIGNED;
28d4c440 891 fprintf (file, "{\n type = ");
892 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
893 fprintf (file, "\n offset = ");
e913b5cd 894 print_dec (val->offset, file, sgn);
28d4c440 895 if (val->n > 0)
896 {
897 fprintf (file, "\n elements = {\n");
898 for (i = 0; i < val->n; i++)
899 {
900 fprintf (file, " [%d] = ", i);
901 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
48e1416a 902
28d4c440 903 fprintf (file, " * ");
e913b5cd 904 print_dec (val->elts[i].coef, file, sgn);
28d4c440 905 if (i != val->n - 1)
906 fprintf (file, ", \n");
907 }
908 fprintf (file, "\n }");
909 }
910 if (val->rest)
911 {
912 fprintf (file, "\n rest = ");
913 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
914 }
915 fprintf (file, "\n}");
916}
917
918/* Prints the affine VAL to the standard error, used for debugging. */
919
4b987fac 920DEBUG_FUNCTION void
28d4c440 921debug_aff (aff_tree *val)
922{
923 print_aff (stderr, val);
924 fprintf (stderr, "\n");
925}
063a8bce 926
388424f0 927/* Computes address of the reference REF in ADDR. The size of the accessed
928 location is stored to SIZE. Returns the ultimate containing object to
929 which REF refers. */
063a8bce 930
388424f0 931tree
fbc666b8 932get_inner_reference_aff (tree ref, aff_tree *addr, poly_widest_int *size)
063a8bce 933{
81bc0f0f 934 poly_int64 bitsize, bitpos;
063a8bce 935 tree toff;
3754d046 936 machine_mode mode;
292237f3 937 int uns, rev, vol;
063a8bce 938 aff_tree tmp;
939 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
b3b6e4b5 940 &uns, &rev, &vol);
063a8bce 941 tree base_addr = build_fold_addr_expr (base);
942
943 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
944
945 tree_to_aff_combination (base_addr, sizetype, addr);
946
947 if (toff)
948 {
949 tree_to_aff_combination (toff, sizetype, &tmp);
950 aff_combination_add (addr, &tmp);
951 }
952
81bc0f0f 953 aff_combination_const (&tmp, sizetype, bits_to_bytes_round_down (bitpos));
063a8bce 954 aff_combination_add (addr, &tmp);
955
81bc0f0f 956 *size = bits_to_bytes_round_up (bitsize);
388424f0 957
958 return base;
063a8bce 959}
960
5fc88ffd 961/* Returns true if a region of size SIZE1 at position 0 and a region of
962 size SIZE2 at position DIFF cannot overlap. */
963
964bool
1aeea61f 965aff_comb_cannot_overlap_p (aff_tree *diff, const poly_widest_int &size1,
966 const poly_widest_int &size2)
5fc88ffd 967{
5fc88ffd 968 /* Unless the difference is a constant, we fail. */
969 if (diff->n != 0)
970 return false;
971
1aeea61f 972 if (!ordered_p (diff->offset, 0))
973 return false;
974
975 if (maybe_lt (diff->offset, 0))
5fc88ffd 976 {
977 /* The second object is before the first one, we succeed if the last
978 element of the second object is before the start of the first one. */
1aeea61f 979 return known_le (diff->offset + size2, 0);
5fc88ffd 980 }
981 else
982 {
983 /* We succeed if the second object starts after the first one ends. */
1aeea61f 984 return known_le (size1, diff->offset);
5fc88ffd 985 }
986}
987