]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-affine.c
[Ada] Revert change for gnatprove that is no longer needed
[thirdparty/gcc.git] / gcc / tree-affine.c
CommitLineData
d3610bea 1/* Operations with affine combinations of trees.
fbd26352 2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
48e1416a 3
d3610bea 4This file is part of GCC.
48e1416a 5
d3610bea 6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8c4c00c1 8Free Software Foundation; either version 3, or (at your option) any
d3610bea 9later version.
48e1416a 10
d3610bea 11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
48e1416a 15
d3610bea 16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
d3610bea 19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
9ef16211 23#include "backend.h"
7c29e30e 24#include "rtl.h"
d3610bea 25#include "tree.h"
9ef16211 26#include "gimple.h"
da744518 27#include "ssa.h"
7c29e30e 28#include "tree-pretty-print.h"
9ef16211 29#include "fold-const.h"
d3610bea 30#include "tree-affine.h"
a8783bee 31#include "gimplify.h"
b9ed1410 32#include "dumpfile.h"
e797f49f 33#include "cfgexpand.h"
e913b5cd 34
d3610bea 35/* Extends CST as appropriate for the affine combinations COMB. */
36
1aeea61f 37static widest_int
a8dd4afa 38wide_int_ext_for_comb (const widest_int &cst, tree type)
d3610bea 39{
a8dd4afa 40 return wi::sext (cst, TYPE_PRECISION (type));
d3610bea 41}
ab2c1de8 42
1aeea61f 43/* Likewise for polynomial offsets. */
44
45static poly_widest_int
46wide_int_ext_for_comb (const poly_widest_int &cst, tree type)
47{
48 return wi::sext (cst, TYPE_PRECISION (type));
49}
50
d3610bea 51/* Initializes affine combination COMB so that its value is zero in TYPE. */
52
53static void
54aff_combination_zero (aff_tree *comb, tree type)
55{
e913b5cd 56 int i;
d3610bea 57 comb->type = type;
e913b5cd 58 comb->offset = 0;
d3610bea 59 comb->n = 0;
e913b5cd 60 for (i = 0; i < MAX_AFF_ELTS; i++)
61 comb->elts[i].coef = 0;
d3610bea 62 comb->rest = NULL_TREE;
63}
64
65/* Sets COMB to CST. */
66
67void
1aeea61f 68aff_combination_const (aff_tree *comb, tree type, const poly_widest_int &cst)
d3610bea 69{
70 aff_combination_zero (comb, type);
a8dd4afa 71 comb->offset = wide_int_ext_for_comb (cst, comb->type);;
d3610bea 72}
73
74/* Sets COMB to single element ELT. */
75
76void
77aff_combination_elt (aff_tree *comb, tree type, tree elt)
78{
79 aff_combination_zero (comb, type);
80
81 comb->n = 1;
82 comb->elts[0].val = elt;
e913b5cd 83 comb->elts[0].coef = 1;
d3610bea 84}
85
86/* Scales COMB by SCALE. */
87
88void
ab2c1de8 89aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
d3610bea 90{
91 unsigned i, j;
92
a8dd4afa 93 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
796b6678 94 if (scale == 1)
d3610bea 95 return;
96
796b6678 97 if (scale == 0)
d3610bea 98 {
99 aff_combination_zero (comb, comb->type);
100 return;
101 }
102
a8dd4afa 103 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type);
d3610bea 104 for (i = 0, j = 0; i < comb->n; i++)
105 {
ab2c1de8 106 widest_int new_coef
a8dd4afa 107 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type);
d3610bea 108 /* A coefficient may become zero due to overflow. Remove the zero
109 elements. */
796b6678 110 if (new_coef == 0)
d3610bea 111 continue;
112 comb->elts[j].coef = new_coef;
113 comb->elts[j].val = comb->elts[i].val;
114 j++;
115 }
116 comb->n = j;
117
118 if (comb->rest)
119 {
2ccb5abd 120 tree type = comb->type;
121 if (POINTER_TYPE_P (type))
122 type = sizetype;
d3610bea 123 if (comb->n < MAX_AFF_ELTS)
124 {
125 comb->elts[comb->n].coef = scale;
126 comb->elts[comb->n].val = comb->rest;
127 comb->rest = NULL_TREE;
128 comb->n++;
129 }
130 else
48e1416a 131 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
e913b5cd 132 wide_int_to_tree (type, scale));
d3610bea 133 }
134}
135
136/* Adds ELT * SCALE to COMB. */
137
138void
ab2c1de8 139aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
d3610bea 140{
141 unsigned i;
00de526b 142 tree type;
d3610bea 143
a8dd4afa 144 widest_int scale = wide_int_ext_for_comb (scale_in, comb->type);
796b6678 145 if (scale == 0)
d3610bea 146 return;
147
148 for (i = 0; i < comb->n; i++)
149 if (operand_equal_p (comb->elts[i].val, elt, 0))
150 {
ab2c1de8 151 widest_int new_coef
a8dd4afa 152 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type);
796b6678 153 if (new_coef != 0)
d3610bea 154 {
155 comb->elts[i].coef = new_coef;
156 return;
157 }
158
159 comb->n--;
160 comb->elts[i] = comb->elts[comb->n];
161
162 if (comb->rest)
163 {
164 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
e913b5cd 165 comb->elts[comb->n].coef = 1;
d3610bea 166 comb->elts[comb->n].val = comb->rest;
167 comb->rest = NULL_TREE;
168 comb->n++;
169 }
170 return;
171 }
172 if (comb->n < MAX_AFF_ELTS)
173 {
174 comb->elts[comb->n].coef = scale;
175 comb->elts[comb->n].val = elt;
176 comb->n++;
177 return;
178 }
179
00de526b 180 type = comb->type;
181 if (POINTER_TYPE_P (type))
182 type = sizetype;
183
796b6678 184 if (scale == 1)
00de526b 185 elt = fold_convert (type, elt);
d3610bea 186 else
00de526b 187 elt = fold_build2 (MULT_EXPR, type,
188 fold_convert (type, elt),
e913b5cd 189 wide_int_to_tree (type, scale));
d3610bea 190
191 if (comb->rest)
2ccb5abd 192 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
193 elt);
d3610bea 194 else
195 comb->rest = elt;
196}
197
cf661063 198/* Adds CST to C. */
199
200static void
1aeea61f 201aff_combination_add_cst (aff_tree *c, const poly_widest_int &cst)
cf661063 202{
a8dd4afa 203 c->offset = wide_int_ext_for_comb (c->offset + cst, c->type);
cf661063 204}
205
d3610bea 206/* Adds COMB2 to COMB1. */
207
208void
209aff_combination_add (aff_tree *comb1, aff_tree *comb2)
210{
211 unsigned i;
212
cf661063 213 aff_combination_add_cst (comb1, comb2->offset);
d3610bea 214 for (i = 0; i < comb2->n; i++)
215 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
216 if (comb2->rest)
e913b5cd 217 aff_combination_add_elt (comb1, comb2->rest, 1);
d3610bea 218}
219
220/* Converts affine combination COMB to TYPE. */
221
222void
223aff_combination_convert (aff_tree *comb, tree type)
224{
225 unsigned i, j;
226 tree comb_type = comb->type;
227
cf661063 228 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
229 {
230 tree val = fold_convert (type, aff_combination_to_tree (comb));
231 tree_to_aff_combination (val, type, comb);
232 return;
233 }
234
d3610bea 235 comb->type = type;
2ccb5abd 236 if (comb->rest && !POINTER_TYPE_P (type))
d3610bea 237 comb->rest = fold_convert (type, comb->rest);
238
239 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
240 return;
241
a8dd4afa 242 comb->offset = wide_int_ext_for_comb (comb->offset, comb->type);
d3610bea 243 for (i = j = 0; i < comb->n; i++)
244 {
ab2c1de8 245 if (comb->elts[i].coef == 0)
d3610bea 246 continue;
ab2c1de8 247 comb->elts[j].coef = comb->elts[i].coef;
d3610bea 248 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
249 j++;
250 }
251
252 comb->n = j;
253 if (comb->n < MAX_AFF_ELTS && comb->rest)
254 {
e913b5cd 255 comb->elts[comb->n].coef = 1;
d3610bea 256 comb->elts[comb->n].val = comb->rest;
257 comb->rest = NULL_TREE;
258 comb->n++;
259 }
260}
261
b3cab549 262/* Tries to handle OP0 CODE OP1 as affine combination of parts. Returns
263 true when that was successful and returns the combination in COMB. */
d3610bea 264
b3cab549 265static bool
266expr_to_aff_combination (aff_tree *comb, tree_code code, tree type,
267 tree op0, tree op1 = NULL_TREE)
d3610bea 268{
269 aff_tree tmp;
81bc0f0f 270 poly_int64 bitpos, bitsize, bytepos;
d3610bea 271
d3610bea 272 switch (code)
273 {
0de36bdb 274 case POINTER_PLUS_EXPR:
b3cab549 275 tree_to_aff_combination (op0, type, comb);
276 tree_to_aff_combination (op1, sizetype, &tmp);
0de36bdb 277 aff_combination_add (comb, &tmp);
b3cab549 278 return true;
0de36bdb 279
d3610bea 280 case PLUS_EXPR:
281 case MINUS_EXPR:
b3cab549 282 tree_to_aff_combination (op0, type, comb);
283 tree_to_aff_combination (op1, type, &tmp);
d3610bea 284 if (code == MINUS_EXPR)
e913b5cd 285 aff_combination_scale (&tmp, -1);
d3610bea 286 aff_combination_add (comb, &tmp);
b3cab549 287 return true;
d3610bea 288
289 case MULT_EXPR:
b3cab549 290 if (TREE_CODE (op1) != INTEGER_CST)
d3610bea 291 break;
b3cab549 292 tree_to_aff_combination (op0, type, comb);
293 aff_combination_scale (comb, wi::to_widest (op1));
294 return true;
d3610bea 295
296 case NEGATE_EXPR:
b3cab549 297 tree_to_aff_combination (op0, type, comb);
e913b5cd 298 aff_combination_scale (comb, -1);
b3cab549 299 return true;
d3610bea 300
cf661063 301 case BIT_NOT_EXPR:
302 /* ~x = -x - 1 */
b3cab549 303 tree_to_aff_combination (op0, type, comb);
e913b5cd 304 aff_combination_scale (comb, -1);
305 aff_combination_add_cst (comb, -1);
b3cab549 306 return true;
d3610bea 307
9a559110 308 CASE_CONVERT:
309 {
b3cab549 310 tree otype = type;
311 tree inner = op0;
9a559110 312 tree itype = TREE_TYPE (inner);
313 enum tree_code icode = TREE_CODE (inner);
314
b3cab549 315 /* STRIP_NOPS */
316 if (tree_nop_conversion_p (otype, itype))
317 {
318 tree_to_aff_combination (op0, type, comb);
319 return true;
320 }
321
9a559110 322 /* In principle this is a valid folding, but it isn't necessarily
323 an optimization, so do it here and not in fold_unary. */
324 if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
325 && TREE_CODE (itype) == INTEGER_TYPE
326 && TREE_CODE (otype) == INTEGER_TYPE
15295570 327 && TYPE_PRECISION (otype) > TYPE_PRECISION (itype))
9a559110 328 {
15295570 329 tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1);
330
331 /* If inner type has undefined overflow behavior, fold conversion
332 for below two cases:
333 (T1)(X *+- CST) -> (T1)X *+- (T1)CST
334 (T1)(X + X) -> (T1)X + (T1)X. */
335 if (TYPE_OVERFLOW_UNDEFINED (itype)
336 && (TREE_CODE (op1) == INTEGER_CST
337 || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0))))
338 {
339 op0 = fold_convert (otype, op0);
340 op1 = fold_convert (otype, op1);
b3cab549 341 return expr_to_aff_combination (comb, icode, otype, op0, op1);
15295570 342 }
da744518 343 wide_int minv, maxv;
344 /* If inner type has wrapping overflow behavior, fold conversion
345 for below case:
346 (T1)(X - CST) -> (T1)X - (T1)CST
347 if X - CST doesn't overflow by range information. Also handle
348 (T1)(X + CST) as (T1)(X - (-CST)). */
349 if (TYPE_UNSIGNED (itype)
350 && TYPE_OVERFLOW_WRAPS (itype)
351 && TREE_CODE (op0) == SSA_NAME
352 && TREE_CODE (op1) == INTEGER_CST
353 && icode != MULT_EXPR
354 && get_range_info (op0, &minv, &maxv) == VR_RANGE)
355 {
356 if (icode == PLUS_EXPR)
e3d0f65c 357 op1 = wide_int_to_tree (itype, -wi::to_wide (op1));
358 if (wi::geu_p (minv, wi::to_wide (op1)))
da744518 359 {
360 op0 = fold_convert (otype, op0);
361 op1 = fold_convert (otype, op1);
b3cab549 362 return expr_to_aff_combination (comb, MINUS_EXPR, otype,
363 op0, op1);
da744518 364 }
365 }
9a559110 366 }
367 }
368 break;
369
b3cab549 370 default:;
371 }
372
373 return false;
374}
375
376/* Splits EXPR into an affine combination of parts. */
377
378void
379tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
380{
381 aff_tree tmp;
382 enum tree_code code;
383 tree core, toffset;
384 poly_int64 bitpos, bitsize, bytepos;
385 machine_mode mode;
386 int unsignedp, reversep, volatilep;
387
388 STRIP_NOPS (expr);
389
390 code = TREE_CODE (expr);
391 switch (code)
392 {
393 case POINTER_PLUS_EXPR:
394 case PLUS_EXPR:
395 case MINUS_EXPR:
396 case MULT_EXPR:
397 if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0),
398 TREE_OPERAND (expr, 1)))
399 return;
400 break;
401
402 case NEGATE_EXPR:
403 case BIT_NOT_EXPR:
404 if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0)))
405 return;
406 break;
407
408 CASE_CONVERT:
409 /* ??? TREE_TYPE (expr) should be equal to type here, but IVOPTS
410 calls this with not showing an outer widening cast. */
411 if (expr_to_aff_combination (comb, code,
412 TREE_TYPE (expr), TREE_OPERAND (expr, 0)))
413 {
414 aff_combination_convert (comb, type);
415 return;
416 }
417 break;
418
419 case ADDR_EXPR:
420 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
421 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
422 {
423 expr = TREE_OPERAND (expr, 0);
424 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
425 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
426 aff_combination_add (comb, &tmp);
427 return;
428 }
429 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
430 &toffset, &mode, &unsignedp, &reversep,
431 &volatilep);
432 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
433 break;
434 aff_combination_const (comb, type, bytepos);
435 if (TREE_CODE (core) == MEM_REF)
436 {
437 tree mem_offset = TREE_OPERAND (core, 1);
438 aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
439 core = TREE_OPERAND (core, 0);
440 }
441 else
442 core = build_fold_addr_expr (core);
443
444 if (TREE_CODE (core) == ADDR_EXPR)
445 aff_combination_add_elt (comb, core, 1);
446 else
447 {
448 tree_to_aff_combination (core, type, &tmp);
449 aff_combination_add (comb, &tmp);
450 }
451 if (toffset)
452 {
453 tree_to_aff_combination (toffset, type, &tmp);
454 aff_combination_add (comb, &tmp);
455 }
456 return;
457
d3610bea 458 default:
1aeea61f 459 {
460 if (poly_int_tree_p (expr))
461 {
462 aff_combination_const (comb, type, wi::to_poly_widest (expr));
463 return;
464 }
465 break;
466 }
d3610bea 467 }
468
469 aff_combination_elt (comb, type, expr);
470}
471
472/* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
473 combination COMB. */
474
475static tree
a8dd4afa 476add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in)
d3610bea 477{
478 enum tree_code code;
479
a8dd4afa 480 widest_int scale = wide_int_ext_for_comb (scale_in, type);
9ea678af 481
61728404 482 elt = fold_convert (type, elt);
796b6678 483 if (scale == 1)
d3610bea 484 {
485 if (!expr)
61728404 486 return elt;
d3610bea 487
61728404 488 return fold_build2 (PLUS_EXPR, type, expr, elt);
d3610bea 489 }
490
796b6678 491 if (scale == -1)
d3610bea 492 {
493 if (!expr)
61728404 494 return fold_build1 (NEGATE_EXPR, type, elt);
d3610bea 495
61728404 496 return fold_build2 (MINUS_EXPR, type, expr, elt);
d3610bea 497 }
498
499 if (!expr)
61728404 500 return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
d3610bea 501
796b6678 502 if (wi::neg_p (scale))
d3610bea 503 {
504 code = MINUS_EXPR;
cf8f0e63 505 scale = -scale;
d3610bea 506 }
507 else
508 code = PLUS_EXPR;
509
61728404 510 elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale));
511 return fold_build2 (code, type, expr, elt);
d3610bea 512}
513
514/* Makes tree from the affine combination COMB. */
515
516tree
517aff_combination_to_tree (aff_tree *comb)
518{
61728404 519 tree type = comb->type, base = NULL_TREE, expr = NULL_TREE;
d3610bea 520 unsigned i;
1aeea61f 521 poly_widest_int off;
522 int sgn;
d3610bea 523
524 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
525
61728404 526 i = 0;
527 if (POINTER_TYPE_P (type))
528 {
529 type = sizetype;
530 if (comb->n > 0 && comb->elts[0].coef == 1
531 && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val)))
532 {
533 base = comb->elts[0].val;
534 ++i;
535 }
536 }
537
538 for (; i < comb->n; i++)
a8dd4afa 539 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef);
d3610bea 540
748c2ed2 541 if (comb->rest)
a8dd4afa 542 expr = add_elt_to_tree (expr, type, comb->rest, 1);
748c2ed2 543
d3610bea 544 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
545 unsigned. */
1aeea61f 546 if (known_lt (comb->offset, 0))
d3610bea 547 {
cf8f0e63 548 off = -comb->offset;
e913b5cd 549 sgn = -1;
d3610bea 550 }
551 else
552 {
553 off = comb->offset;
e913b5cd 554 sgn = 1;
d3610bea 555 }
61728404 556 expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn);
557
558 if (base)
559 return fold_build_pointer_plus (base, expr);
560 else
561 return fold_convert (comb->type, expr);
d3610bea 562}
563
564/* Copies the tree elements of COMB to ensure that they are not shared. */
565
566void
567unshare_aff_combination (aff_tree *comb)
568{
569 unsigned i;
570
571 for (i = 0; i < comb->n; i++)
572 comb->elts[i].val = unshare_expr (comb->elts[i].val);
573 if (comb->rest)
574 comb->rest = unshare_expr (comb->rest);
575}
576
577/* Remove M-th element from COMB. */
578
579void
580aff_combination_remove_elt (aff_tree *comb, unsigned m)
581{
582 comb->n--;
583 if (m <= comb->n)
584 comb->elts[m] = comb->elts[comb->n];
585 if (comb->rest)
586 {
e913b5cd 587 comb->elts[comb->n].coef = 1;
d3610bea 588 comb->elts[comb->n].val = comb->rest;
589 comb->rest = NULL_TREE;
590 comb->n++;
591 }
592}
cf661063 593
594/* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
595 C * COEF is added to R. */
48e1416a 596
cf661063 597
598static void
5de9d3ed 599aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
cf661063 600 aff_tree *r)
601{
602 unsigned i;
603 tree aval, type;
604
605 for (i = 0; i < c->n; i++)
606 {
607 aval = c->elts[i].val;
608 if (val)
609 {
610 type = TREE_TYPE (aval);
611 aval = fold_build2 (MULT_EXPR, type, aval,
612 fold_convert (type, val));
613 }
614
cf8f0e63 615 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
cf661063 616 }
617
618 if (c->rest)
619 {
620 aval = c->rest;
621 if (val)
622 {
623 type = TREE_TYPE (aval);
624 aval = fold_build2 (MULT_EXPR, type, aval,
625 fold_convert (type, val));
626 }
627
628 aff_combination_add_elt (r, aval, coef);
629 }
630
631 if (val)
1aeea61f 632 {
633 if (c->offset.is_constant ())
634 /* Access coeffs[0] directly, for efficiency. */
635 aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]);
636 else
637 {
638 /* c->offset is polynomial, so multiply VAL rather than COEF
639 by it. */
640 tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset);
641 val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset);
642 aff_combination_add_elt (r, val, coef);
643 }
644 }
cf661063 645 else
cf8f0e63 646 aff_combination_add_cst (r, coef * c->offset);
cf661063 647}
648
649/* Multiplies C1 by C2, storing the result to R */
650
651void
652aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
653{
654 unsigned i;
655 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
656
657 aff_combination_zero (r, c1->type);
658
659 for (i = 0; i < c2->n; i++)
660 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
661 if (c2->rest)
e913b5cd 662 aff_combination_add_product (c1, 1, c2->rest, r);
1aeea61f 663 if (c2->offset.is_constant ())
664 /* Access coeffs[0] directly, for efficiency. */
665 aff_combination_add_product (c1, c2->offset.coeffs[0], NULL, r);
666 else
667 {
668 /* c2->offset is polynomial, so do the multiplication in tree form. */
669 tree offset = wide_int_to_tree (c2->type, c2->offset);
670 aff_combination_add_product (c1, 1, offset, r);
671 }
cf661063 672}
ad4a85ad 673
674/* Returns the element of COMB whose value is VAL, or NULL if no such
675 element exists. If IDX is not NULL, it is set to the index of VAL in
676 COMB. */
48e1416a 677
2e966e2a 678static class aff_comb_elt *
ad4a85ad 679aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
680{
681 unsigned i;
682
683 for (i = 0; i < comb->n; i++)
684 if (operand_equal_p (comb->elts[i].val, val, 0))
685 {
686 if (idx)
687 *idx = i;
688
689 return &comb->elts[i];
690 }
691
692 return NULL;
693}
694
695/* Element of the cache that maps ssa name NAME to its expanded form
696 as an affine expression EXPANSION. */
697
251317e4 698class name_expansion
ad4a85ad 699{
251317e4 700public:
ad4a85ad 701 aff_tree expansion;
702
703 /* True if the expansion for the name is just being generated. */
704 unsigned in_progress : 1;
705};
706
063a8bce 707/* Expands SSA names in COMB recursively. CACHE is used to cache the
708 results. */
ad4a85ad 709
710void
75a70cf9 711aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
5f8841a5 712 hash_map<tree, name_expansion *> **cache)
ad4a85ad 713{
714 unsigned i;
715 aff_tree to_add, current, curre;
b3cab549 716 tree e;
42acab1c 717 gimple *def;
5de9d3ed 718 widest_int scale;
2e966e2a 719 class name_expansion *exp;
ad4a85ad 720
063a8bce 721 aff_combination_zero (&to_add, comb->type);
ad4a85ad 722 for (i = 0; i < comb->n; i++)
723 {
00ae4124 724 tree type, name;
75a70cf9 725 enum tree_code code;
726
ad4a85ad 727 e = comb->elts[i].val;
00ae4124 728 type = TREE_TYPE (e);
729 name = e;
730 /* Look through some conversions. */
d09ef31a 731 if (CONVERT_EXPR_P (e)
00ae4124 732 && (TYPE_PRECISION (type)
733 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
734 name = TREE_OPERAND (e, 0);
735 if (TREE_CODE (name) != SSA_NAME)
ad4a85ad 736 continue;
00ae4124 737 def = SSA_NAME_DEF_STMT (name);
75a70cf9 738 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
ad4a85ad 739 continue;
740
75a70cf9 741 code = gimple_assign_rhs_code (def);
742 if (code != SSA_NAME
743 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
744 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
745 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
ad4a85ad 746 continue;
747
748 /* We do not know whether the reference retains its value at the
749 place where the expansion is used. */
75a70cf9 750 if (TREE_CODE_CLASS (code) == tcc_reference)
ad4a85ad 751 continue;
752
9861da3b 753 name_expansion **slot = NULL;
754 if (*cache)
755 slot = (*cache)->get (name);
756 exp = slot ? *slot : NULL;
ad4a85ad 757 if (!exp)
758 {
9861da3b 759 /* Only bother to handle cases tree_to_aff_combination will. */
760 switch (code)
761 {
762 case POINTER_PLUS_EXPR:
763 case PLUS_EXPR:
764 case MINUS_EXPR:
765 case MULT_EXPR:
b3cab549 766 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
767 gimple_assign_rhs1 (def),
768 gimple_assign_rhs2 (def)))
769 continue;
770 break;
9861da3b 771 case NEGATE_EXPR:
772 case BIT_NOT_EXPR:
b3cab549 773 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
774 gimple_assign_rhs1 (def)))
775 continue;
776 break;
9861da3b 777 CASE_CONVERT:
b3cab549 778 if (!expr_to_aff_combination (&current, code, TREE_TYPE (name),
779 gimple_assign_rhs1 (def)))
780 /* This makes us always expand conversions which we did
781 in the past and makes gcc.dg/tree-ssa/ivopts-lt-2.c
782 PASS, eliminating one induction variable in IVOPTs.
783 ??? But it is really excessive and we should try
784 harder to do without it. */
785 aff_combination_elt (&current, TREE_TYPE (name),
786 fold_convert (TREE_TYPE (name),
787 gimple_assign_rhs1 (def)));
9861da3b 788 break;
789 case ADDR_EXPR:
790 case INTEGER_CST:
791 case POLY_INT_CST:
b3cab549 792 tree_to_aff_combination (gimple_assign_rhs1 (def),
793 TREE_TYPE (name), &current);
9861da3b 794 break;
795 default:
796 continue;
797 }
2e966e2a 798 exp = XNEW (class name_expansion);
ad4a85ad 799 exp->in_progress = 1;
9861da3b 800 if (!*cache)
801 *cache = new hash_map<tree, name_expansion *>;
802 (*cache)->put (name, exp);
803 aff_combination_expand (&current, cache);
ad4a85ad 804 exp->expansion = current;
805 exp->in_progress = 0;
806 }
807 else
808 {
809 /* Since we follow the definitions in the SSA form, we should not
810 enter a cycle unless we pass through a phi node. */
811 gcc_assert (!exp->in_progress);
812 current = exp->expansion;
813 }
9861da3b 814 if (!useless_type_conversion_p (comb->type, current.type))
815 aff_combination_convert (&current, comb->type);
ad4a85ad 816
817 /* Accumulate the new terms to TO_ADD, so that we do not modify
818 COMB while traversing it; include the term -coef * E, to remove
819 it from COMB. */
820 scale = comb->elts[i].coef;
063a8bce 821 aff_combination_zero (&curre, comb->type);
cf8f0e63 822 aff_combination_add_elt (&curre, e, -scale);
ad4a85ad 823 aff_combination_scale (&current, scale);
824 aff_combination_add (&to_add, &current);
825 aff_combination_add (&to_add, &curre);
826 }
827 aff_combination_add (comb, &to_add);
828}
829
063a8bce 830/* Similar to tree_to_aff_combination, but follows SSA name definitions
831 and expands them recursively. CACHE is used to cache the expansions
832 of the ssa names, to avoid exponential time complexity for cases
833 like
834
835 a1 = a0 + a0;
836 a2 = a1 + a1;
837 a3 = a2 + a2;
838 ... */
839
840void
841tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
5f8841a5 842 hash_map<tree, name_expansion *> **cache)
063a8bce 843{
844 tree_to_aff_combination (expr, type, comb);
845 aff_combination_expand (comb, cache);
846}
847
ad4a85ad 848/* Frees memory occupied by struct name_expansion in *VALUE. Callback for
5f8841a5 849 hash_map::traverse. */
ad4a85ad 850
5f8841a5 851bool
852free_name_expansion (tree const &, name_expansion **value, void *)
ad4a85ad 853{
5f8841a5 854 free (*value);
ad4a85ad 855 return true;
856}
857
858/* Frees memory allocated for the CACHE used by
859 tree_to_aff_combination_expand. */
860
861void
5f8841a5 862free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
ad4a85ad 863{
864 if (!*cache)
865 return;
866
5f8841a5 867 (*cache)->traverse<void *, free_name_expansion> (NULL);
868 delete (*cache);
ad4a85ad 869 *cache = NULL;
870}
871
872/* If VAL != CST * DIV for any constant CST, returns false.
575a3e39 873 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
874 and if they are different, returns false. Finally, if neither of these
875 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
876 is set to true. */
ad4a85ad 877
878static bool
1aeea61f 879wide_int_constant_multiple_p (const poly_widest_int &val,
880 const poly_widest_int &div,
881 bool *mult_set, poly_widest_int *mult)
ad4a85ad 882{
1aeea61f 883 poly_widest_int rem, cst;
ad4a85ad 884
1aeea61f 885 if (known_eq (val, 0))
575a3e39 886 {
1aeea61f 887 if (*mult_set && maybe_ne (*mult, 0))
575a3e39 888 return false;
889 *mult_set = true;
e913b5cd 890 *mult = 0;
575a3e39 891 return true;
892 }
ad4a85ad 893
1aeea61f 894 if (maybe_eq (div, 0))
ad4a85ad 895 return false;
896
1aeea61f 897 if (!multiple_p (val, div, &cst))
ad4a85ad 898 return false;
899
1aeea61f 900 if (*mult_set && maybe_ne (*mult, cst))
ad4a85ad 901 return false;
902
903 *mult_set = true;
904 *mult = cst;
905 return true;
906}
907
908/* Returns true if VAL = X * DIV for some constant X. If this is the case,
909 X is stored to MULT. */
910
911bool
912aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
1aeea61f 913 poly_widest_int *mult)
ad4a85ad 914{
915 bool mult_set = false;
916 unsigned i;
917
1aeea61f 918 if (val->n == 0 && known_eq (val->offset, 0))
ad4a85ad 919 {
e913b5cd 920 *mult = 0;
ad4a85ad 921 return true;
922 }
923 if (val->n != div->n)
924 return false;
925
926 if (val->rest || div->rest)
927 return false;
928
e913b5cd 929 if (!wide_int_constant_multiple_p (val->offset, div->offset,
930 &mult_set, mult))
ad4a85ad 931 return false;
932
933 for (i = 0; i < div->n; i++)
934 {
2e966e2a 935 class aff_comb_elt *elt
ad4a85ad 936 = aff_combination_find_elt (val, div->elts[i].val, NULL);
937 if (!elt)
938 return false;
e913b5cd 939 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
940 &mult_set, mult))
ad4a85ad 941 return false;
942 }
943
944 gcc_assert (mult_set);
945 return true;
946}
28d4c440 947
948/* Prints the affine VAL to the FILE. */
949
d129ada0 950static void
28d4c440 951print_aff (FILE *file, aff_tree *val)
952{
953 unsigned i;
e913b5cd 954 signop sgn = TYPE_SIGN (val->type);
28d4c440 955 if (POINTER_TYPE_P (val->type))
e913b5cd 956 sgn = SIGNED;
28d4c440 957 fprintf (file, "{\n type = ");
958 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
959 fprintf (file, "\n offset = ");
e913b5cd 960 print_dec (val->offset, file, sgn);
28d4c440 961 if (val->n > 0)
962 {
963 fprintf (file, "\n elements = {\n");
964 for (i = 0; i < val->n; i++)
965 {
966 fprintf (file, " [%d] = ", i);
967 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
48e1416a 968
28d4c440 969 fprintf (file, " * ");
e913b5cd 970 print_dec (val->elts[i].coef, file, sgn);
28d4c440 971 if (i != val->n - 1)
972 fprintf (file, ", \n");
973 }
974 fprintf (file, "\n }");
975 }
976 if (val->rest)
977 {
978 fprintf (file, "\n rest = ");
979 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
980 }
981 fprintf (file, "\n}");
982}
983
984/* Prints the affine VAL to the standard error, used for debugging. */
985
4b987fac 986DEBUG_FUNCTION void
28d4c440 987debug_aff (aff_tree *val)
988{
989 print_aff (stderr, val);
990 fprintf (stderr, "\n");
991}
063a8bce 992
388424f0 993/* Computes address of the reference REF in ADDR. The size of the accessed
994 location is stored to SIZE. Returns the ultimate containing object to
995 which REF refers. */
063a8bce 996
388424f0 997tree
fbc666b8 998get_inner_reference_aff (tree ref, aff_tree *addr, poly_widest_int *size)
063a8bce 999{
81bc0f0f 1000 poly_int64 bitsize, bitpos;
063a8bce 1001 tree toff;
3754d046 1002 machine_mode mode;
292237f3 1003 int uns, rev, vol;
063a8bce 1004 aff_tree tmp;
1005 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
b3b6e4b5 1006 &uns, &rev, &vol);
063a8bce 1007 tree base_addr = build_fold_addr_expr (base);
1008
1009 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
1010
1011 tree_to_aff_combination (base_addr, sizetype, addr);
1012
1013 if (toff)
1014 {
1015 tree_to_aff_combination (toff, sizetype, &tmp);
1016 aff_combination_add (addr, &tmp);
1017 }
1018
81bc0f0f 1019 aff_combination_const (&tmp, sizetype, bits_to_bytes_round_down (bitpos));
063a8bce 1020 aff_combination_add (addr, &tmp);
1021
81bc0f0f 1022 *size = bits_to_bytes_round_up (bitsize);
388424f0 1023
1024 return base;
063a8bce 1025}
1026
5fc88ffd 1027/* Returns true if a region of size SIZE1 at position 0 and a region of
1028 size SIZE2 at position DIFF cannot overlap. */
1029
1030bool
1aeea61f 1031aff_comb_cannot_overlap_p (aff_tree *diff, const poly_widest_int &size1,
1032 const poly_widest_int &size2)
5fc88ffd 1033{
5fc88ffd 1034 /* Unless the difference is a constant, we fail. */
1035 if (diff->n != 0)
1036 return false;
1037
1aeea61f 1038 if (!ordered_p (diff->offset, 0))
1039 return false;
1040
1041 if (maybe_lt (diff->offset, 0))
5fc88ffd 1042 {
1043 /* The second object is before the first one, we succeed if the last
1044 element of the second object is before the start of the first one. */
1aeea61f 1045 return known_le (diff->offset + size2, 0);
5fc88ffd 1046 }
1047 else
1048 {
1049 /* We succeed if the second object starts after the first one ends. */
1aeea61f 1050 return known_le (size1, diff->offset);
5fc88ffd 1051 }
1052}
1053