]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-affine.c
sh.c: Do not include algorithm.
[thirdparty/gcc.git] / gcc / tree-affine.c
CommitLineData
73f30c63 1/* Operations with affine combinations of trees.
23a5b65a 2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
b8698a0f 3
73f30c63 4This file is part of GCC.
b8698a0f 5
73f30c63
ZD
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
9dcd6f09 8Free Software Foundation; either version 3, or (at your option) any
73f30c63 9later version.
b8698a0f 10
73f30c63
ZD
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
b8698a0f 15
73f30c63 16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
73f30c63
ZD
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
73f30c63 23#include "tree.h"
d8a2d370 24#include "expr.h"
cf835838 25#include "tree-pretty-print.h"
73f30c63 26#include "tree-affine.h"
60393bbc
AM
27#include "predict.h"
28#include "vec.h"
29#include "hashtab.h"
30#include "hash-set.h"
31#include "machmode.h"
32#include "tm.h"
33#include "hard-reg-set.h"
34#include "input.h"
35#include "function.h"
2fb9a547
AM
36#include "basic-block.h"
37#include "tree-ssa-alias.h"
38#include "internal-fn.h"
39#include "gimple-expr.h"
40#include "is-a.h"
18f429e2 41#include "gimple.h"
45b0be94 42#include "gimplify.h"
e544c850 43#include "flags.h"
7ee2468b 44#include "dumpfile.h"
1fe37220 45#include "cfgexpand.h"
807e902e 46#include "wide-int-print.h"
73f30c63
ZD
47
48/* Extends CST as appropriate for the affine combinations COMB. */
49
807e902e
KZ
50widest_int
51wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
73f30c63 52{
807e902e 53 return wi::sext (cst, TYPE_PRECISION (comb->type));
73f30c63
ZD
54}
55
56/* Initializes affine combination COMB so that its value is zero in TYPE. */
57
58static void
59aff_combination_zero (aff_tree *comb, tree type)
60{
807e902e 61 int i;
73f30c63 62 comb->type = type;
807e902e 63 comb->offset = 0;
73f30c63 64 comb->n = 0;
807e902e
KZ
65 for (i = 0; i < MAX_AFF_ELTS; i++)
66 comb->elts[i].coef = 0;
73f30c63
ZD
67 comb->rest = NULL_TREE;
68}
69
70/* Sets COMB to CST. */
71
72void
807e902e 73aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
73f30c63
ZD
74{
75 aff_combination_zero (comb, type);
807e902e 76 comb->offset = wide_int_ext_for_comb (cst, comb);;
73f30c63
ZD
77}
78
79/* Sets COMB to single element ELT. */
80
81void
82aff_combination_elt (aff_tree *comb, tree type, tree elt)
83{
84 aff_combination_zero (comb, type);
85
86 comb->n = 1;
87 comb->elts[0].val = elt;
807e902e 88 comb->elts[0].coef = 1;
73f30c63
ZD
89}
90
91/* Scales COMB by SCALE. */
92
93void
807e902e 94aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
73f30c63
ZD
95{
96 unsigned i, j;
97
807e902e
KZ
98 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
99 if (scale == 1)
73f30c63
ZD
100 return;
101
807e902e 102 if (scale == 0)
73f30c63
ZD
103 {
104 aff_combination_zero (comb, comb->type);
105 return;
106 }
107
807e902e 108 comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
73f30c63
ZD
109 for (i = 0, j = 0; i < comb->n; i++)
110 {
807e902e
KZ
111 widest_int new_coef
112 = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
73f30c63
ZD
113 /* A coefficient may become zero due to overflow. Remove the zero
114 elements. */
807e902e 115 if (new_coef == 0)
73f30c63
ZD
116 continue;
117 comb->elts[j].coef = new_coef;
118 comb->elts[j].val = comb->elts[i].val;
119 j++;
120 }
121 comb->n = j;
122
123 if (comb->rest)
124 {
5c24ddaf
AP
125 tree type = comb->type;
126 if (POINTER_TYPE_P (type))
127 type = sizetype;
73f30c63
ZD
128 if (comb->n < MAX_AFF_ELTS)
129 {
130 comb->elts[comb->n].coef = scale;
131 comb->elts[comb->n].val = comb->rest;
132 comb->rest = NULL_TREE;
133 comb->n++;
134 }
135 else
b8698a0f 136 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
807e902e 137 wide_int_to_tree (type, scale));
73f30c63
ZD
138 }
139}
140
141/* Adds ELT * SCALE to COMB. */
142
143void
807e902e 144aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
73f30c63
ZD
145{
146 unsigned i;
f46fe0e6 147 tree type;
73f30c63 148
807e902e
KZ
149 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
150 if (scale == 0)
73f30c63
ZD
151 return;
152
153 for (i = 0; i < comb->n; i++)
154 if (operand_equal_p (comb->elts[i].val, elt, 0))
155 {
807e902e
KZ
156 widest_int new_coef
157 = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
158 if (new_coef != 0)
73f30c63
ZD
159 {
160 comb->elts[i].coef = new_coef;
161 return;
162 }
163
164 comb->n--;
165 comb->elts[i] = comb->elts[comb->n];
166
167 if (comb->rest)
168 {
169 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
807e902e 170 comb->elts[comb->n].coef = 1;
73f30c63
ZD
171 comb->elts[comb->n].val = comb->rest;
172 comb->rest = NULL_TREE;
173 comb->n++;
174 }
175 return;
176 }
177 if (comb->n < MAX_AFF_ELTS)
178 {
179 comb->elts[comb->n].coef = scale;
180 comb->elts[comb->n].val = elt;
181 comb->n++;
182 return;
183 }
184
f46fe0e6
AP
185 type = comb->type;
186 if (POINTER_TYPE_P (type))
187 type = sizetype;
188
807e902e 189 if (scale == 1)
f46fe0e6 190 elt = fold_convert (type, elt);
73f30c63 191 else
f46fe0e6
AP
192 elt = fold_build2 (MULT_EXPR, type,
193 fold_convert (type, elt),
807e902e 194 wide_int_to_tree (type, scale));
73f30c63
ZD
195
196 if (comb->rest)
5c24ddaf
AP
197 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
198 elt);
73f30c63
ZD
199 else
200 comb->rest = elt;
201}
202
7e2ac86c
ZD
203/* Adds CST to C. */
204
205static void
807e902e 206aff_combination_add_cst (aff_tree *c, const widest_int &cst)
7e2ac86c 207{
807e902e 208 c->offset = wide_int_ext_for_comb (c->offset + cst, c);
7e2ac86c
ZD
209}
210
73f30c63
ZD
211/* Adds COMB2 to COMB1. */
212
213void
214aff_combination_add (aff_tree *comb1, aff_tree *comb2)
215{
216 unsigned i;
217
7e2ac86c 218 aff_combination_add_cst (comb1, comb2->offset);
73f30c63
ZD
219 for (i = 0; i < comb2->n; i++)
220 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
221 if (comb2->rest)
807e902e 222 aff_combination_add_elt (comb1, comb2->rest, 1);
73f30c63
ZD
223}
224
225/* Converts affine combination COMB to TYPE. */
226
227void
228aff_combination_convert (aff_tree *comb, tree type)
229{
230 unsigned i, j;
231 tree comb_type = comb->type;
232
7e2ac86c
ZD
233 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
234 {
235 tree val = fold_convert (type, aff_combination_to_tree (comb));
236 tree_to_aff_combination (val, type, comb);
237 return;
238 }
239
73f30c63 240 comb->type = type;
5c24ddaf 241 if (comb->rest && !POINTER_TYPE_P (type))
73f30c63
ZD
242 comb->rest = fold_convert (type, comb->rest);
243
244 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
245 return;
246
807e902e 247 comb->offset = wide_int_ext_for_comb (comb->offset, comb);
73f30c63
ZD
248 for (i = j = 0; i < comb->n; i++)
249 {
807e902e 250 if (comb->elts[i].coef == 0)
73f30c63 251 continue;
807e902e 252 comb->elts[j].coef = comb->elts[i].coef;
73f30c63
ZD
253 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
254 j++;
255 }
256
257 comb->n = j;
258 if (comb->n < MAX_AFF_ELTS && comb->rest)
259 {
807e902e 260 comb->elts[comb->n].coef = 1;
73f30c63
ZD
261 comb->elts[comb->n].val = comb->rest;
262 comb->rest = NULL_TREE;
263 comb->n++;
264 }
265}
266
267/* Splits EXPR into an affine combination of parts. */
268
269void
270tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
271{
272 aff_tree tmp;
273 enum tree_code code;
274 tree cst, core, toffset;
275 HOST_WIDE_INT bitpos, bitsize;
ef4bddc2 276 machine_mode mode;
73f30c63
ZD
277 int unsignedp, volatilep;
278
279 STRIP_NOPS (expr);
280
281 code = TREE_CODE (expr);
282 switch (code)
283 {
284 case INTEGER_CST:
807e902e 285 aff_combination_const (comb, type, wi::to_widest (expr));
73f30c63
ZD
286 return;
287
5be014d5
AP
288 case POINTER_PLUS_EXPR:
289 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
290 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
5be014d5
AP
291 aff_combination_add (comb, &tmp);
292 return;
293
73f30c63
ZD
294 case PLUS_EXPR:
295 case MINUS_EXPR:
296 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
297 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
298 if (code == MINUS_EXPR)
807e902e 299 aff_combination_scale (&tmp, -1);
73f30c63
ZD
300 aff_combination_add (comb, &tmp);
301 return;
302
303 case MULT_EXPR:
304 cst = TREE_OPERAND (expr, 1);
305 if (TREE_CODE (cst) != INTEGER_CST)
306 break;
307 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
807e902e 308 aff_combination_scale (comb, wi::to_widest (cst));
73f30c63
ZD
309 return;
310
311 case NEGATE_EXPR:
312 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
807e902e 313 aff_combination_scale (comb, -1);
73f30c63
ZD
314 return;
315
7e2ac86c
ZD
316 case BIT_NOT_EXPR:
317 /* ~x = -x - 1 */
318 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
807e902e
KZ
319 aff_combination_scale (comb, -1);
320 aff_combination_add_cst (comb, -1);
7e2ac86c
ZD
321 return;
322
73f30c63 323 case ADDR_EXPR:
70f34814
RG
324 /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
325 if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
326 {
327 expr = TREE_OPERAND (expr, 0);
328 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
329 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
330 aff_combination_add (comb, &tmp);
331 return;
332 }
73f30c63 333 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
b3ecff82
BE
334 &toffset, &mode, &unsignedp, &volatilep,
335 false);
73f30c63
ZD
336 if (bitpos % BITS_PER_UNIT != 0)
337 break;
807e902e 338 aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
d1f1a283
BC
339 if (TREE_CODE (core) == MEM_REF)
340 {
341 aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
342 core = TREE_OPERAND (core, 0);
343 }
344 else
345 core = build_fold_addr_expr (core);
346
73f30c63 347 if (TREE_CODE (core) == ADDR_EXPR)
807e902e 348 aff_combination_add_elt (comb, core, 1);
73f30c63
ZD
349 else
350 {
351 tree_to_aff_combination (core, type, &tmp);
352 aff_combination_add (comb, &tmp);
353 }
354 if (toffset)
355 {
356 tree_to_aff_combination (toffset, type, &tmp);
357 aff_combination_add (comb, &tmp);
358 }
359 return;
360
70f34814
RG
361 case MEM_REF:
362 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
363 tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
364 type, comb);
365 else if (integer_zerop (TREE_OPERAND (expr, 1)))
366 {
367 aff_combination_elt (comb, type, expr);
368 return;
369 }
370 else
371 aff_combination_elt (comb, type,
372 build2 (MEM_REF, TREE_TYPE (expr),
373 TREE_OPERAND (expr, 0),
374 build_int_cst
375 (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
376 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
377 aff_combination_add (comb, &tmp);
378 return;
379
73f30c63
ZD
380 default:
381 break;
382 }
383
384 aff_combination_elt (comb, type, expr);
385}
386
387/* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
388 combination COMB. */
389
390static tree
807e902e
KZ
391add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
392 aff_tree *comb ATTRIBUTE_UNUSED)
73f30c63
ZD
393{
394 enum tree_code code;
1ffe34d9
AP
395 tree type1 = type;
396 if (POINTER_TYPE_P (type))
397 type1 = sizetype;
73f30c63 398
807e902e 399 widest_int scale = wide_int_ext_for_comb (scale_in, comb);
78de2333 400
807e902e 401 if (scale == -1
78de2333
RB
402 && POINTER_TYPE_P (TREE_TYPE (elt)))
403 {
29e810b9
RB
404 elt = convert_to_ptrofftype (elt);
405 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
807e902e 406 scale = 1;
78de2333 407 }
73f30c63 408
807e902e 409 if (scale == 1)
73f30c63
ZD
410 {
411 if (!expr)
29e810b9
RB
412 {
413 if (POINTER_TYPE_P (TREE_TYPE (elt)))
414 return elt;
415 else
416 return fold_convert (type1, elt);
417 }
78de2333
RB
418
419 if (POINTER_TYPE_P (TREE_TYPE (expr)))
29e810b9 420 return fold_build_pointer_plus (expr, elt);
78de2333 421 if (POINTER_TYPE_P (TREE_TYPE (elt)))
29e810b9 422 return fold_build_pointer_plus (elt, expr);
78de2333 423 return fold_build2 (PLUS_EXPR, type1,
29e810b9 424 expr, fold_convert (type1, elt));
73f30c63
ZD
425 }
426
807e902e 427 if (scale == -1)
73f30c63
ZD
428 {
429 if (!expr)
29e810b9
RB
430 return fold_build1 (NEGATE_EXPR, type1,
431 fold_convert (type1, elt));
78de2333
RB
432
433 if (POINTER_TYPE_P (TREE_TYPE (expr)))
29e810b9
RB
434 {
435 elt = convert_to_ptrofftype (elt);
436 elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
437 return fold_build_pointer_plus (expr, elt);
438 }
78de2333 439 return fold_build2 (MINUS_EXPR, type1,
29e810b9 440 expr, fold_convert (type1, elt));
73f30c63
ZD
441 }
442
78de2333 443 elt = fold_convert (type1, elt);
73f30c63 444 if (!expr)
78de2333 445 return fold_build2 (MULT_EXPR, type1, elt,
807e902e 446 wide_int_to_tree (type1, scale));
73f30c63 447
807e902e 448 if (wi::neg_p (scale))
73f30c63
ZD
449 {
450 code = MINUS_EXPR;
27bcd47c 451 scale = -scale;
73f30c63
ZD
452 }
453 else
454 code = PLUS_EXPR;
455
1ffe34d9 456 elt = fold_build2 (MULT_EXPR, type1, elt,
807e902e 457 wide_int_to_tree (type1, scale));
78de2333 458 if (POINTER_TYPE_P (TREE_TYPE (expr)))
1ffe34d9
AP
459 {
460 if (code == MINUS_EXPR)
461 elt = fold_build1 (NEGATE_EXPR, type1, elt);
5d49b6a7 462 return fold_build_pointer_plus (expr, elt);
1ffe34d9 463 }
29e810b9 464 return fold_build2 (code, type1, expr, elt);
73f30c63
ZD
465}
466
467/* Makes tree from the affine combination COMB. */
468
469tree
470aff_combination_to_tree (aff_tree *comb)
471{
472 tree type = comb->type;
6df588cb 473 tree expr = NULL_TREE;
73f30c63 474 unsigned i;
807e902e 475 widest_int off, sgn;
1ffe34d9
AP
476 tree type1 = type;
477 if (POINTER_TYPE_P (type))
478 type1 = sizetype;
73f30c63
ZD
479
480 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
481
482 for (i = 0; i < comb->n; i++)
483 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
484 comb);
485
6df588cb 486 if (comb->rest)
807e902e 487 expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
6df588cb 488
73f30c63
ZD
489 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
490 unsigned. */
807e902e 491 if (wi::neg_p (comb->offset))
73f30c63 492 {
27bcd47c 493 off = -comb->offset;
807e902e 494 sgn = -1;
73f30c63
ZD
495 }
496 else
497 {
498 off = comb->offset;
807e902e 499 sgn = 1;
73f30c63 500 }
807e902e 501 return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
73f30c63
ZD
502 comb);
503}
504
505/* Copies the tree elements of COMB to ensure that they are not shared. */
506
507void
508unshare_aff_combination (aff_tree *comb)
509{
510 unsigned i;
511
512 for (i = 0; i < comb->n; i++)
513 comb->elts[i].val = unshare_expr (comb->elts[i].val);
514 if (comb->rest)
515 comb->rest = unshare_expr (comb->rest);
516}
517
518/* Remove M-th element from COMB. */
519
520void
521aff_combination_remove_elt (aff_tree *comb, unsigned m)
522{
523 comb->n--;
524 if (m <= comb->n)
525 comb->elts[m] = comb->elts[comb->n];
526 if (comb->rest)
527 {
807e902e 528 comb->elts[comb->n].coef = 1;
73f30c63
ZD
529 comb->elts[comb->n].val = comb->rest;
530 comb->rest = NULL_TREE;
531 comb->n++;
532 }
533}
7e2ac86c
ZD
534
535/* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
536 C * COEF is added to R. */
b8698a0f 537
7e2ac86c
ZD
538
539static void
807e902e 540aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
7e2ac86c
ZD
541 aff_tree *r)
542{
543 unsigned i;
544 tree aval, type;
545
546 for (i = 0; i < c->n; i++)
547 {
548 aval = c->elts[i].val;
549 if (val)
550 {
551 type = TREE_TYPE (aval);
552 aval = fold_build2 (MULT_EXPR, type, aval,
553 fold_convert (type, val));
554 }
555
27bcd47c 556 aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
7e2ac86c
ZD
557 }
558
559 if (c->rest)
560 {
561 aval = c->rest;
562 if (val)
563 {
564 type = TREE_TYPE (aval);
565 aval = fold_build2 (MULT_EXPR, type, aval,
566 fold_convert (type, val));
567 }
568
569 aff_combination_add_elt (r, aval, coef);
570 }
571
572 if (val)
27bcd47c 573 aff_combination_add_elt (r, val, coef * c->offset);
7e2ac86c 574 else
27bcd47c 575 aff_combination_add_cst (r, coef * c->offset);
7e2ac86c
ZD
576}
577
578/* Multiplies C1 by C2, storing the result to R */
579
580void
581aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
582{
583 unsigned i;
584 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
585
586 aff_combination_zero (r, c1->type);
587
588 for (i = 0; i < c2->n; i++)
589 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
590 if (c2->rest)
807e902e 591 aff_combination_add_product (c1, 1, c2->rest, r);
7e2ac86c
ZD
592 aff_combination_add_product (c1, c2->offset, NULL, r);
593}
bbc8a8dc
ZD
594
595/* Returns the element of COMB whose value is VAL, or NULL if no such
596 element exists. If IDX is not NULL, it is set to the index of VAL in
597 COMB. */
b8698a0f 598
bbc8a8dc
ZD
599static struct aff_comb_elt *
600aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
601{
602 unsigned i;
603
604 for (i = 0; i < comb->n; i++)
605 if (operand_equal_p (comb->elts[i].val, val, 0))
606 {
607 if (idx)
608 *idx = i;
609
610 return &comb->elts[i];
611 }
612
613 return NULL;
614}
615
616/* Element of the cache that maps ssa name NAME to its expanded form
617 as an affine expression EXPANSION. */
618
619struct name_expansion
620{
621 aff_tree expansion;
622
623 /* True if the expansion for the name is just being generated. */
624 unsigned in_progress : 1;
625};
626
72425608
ZD
627/* Expands SSA names in COMB recursively. CACHE is used to cache the
628 results. */
bbc8a8dc
ZD
629
630void
726a989a 631aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
39c8aaa4 632 hash_map<tree, name_expansion *> **cache)
bbc8a8dc
ZD
633{
634 unsigned i;
635 aff_tree to_add, current, curre;
726a989a
RB
636 tree e, rhs;
637 gimple def;
807e902e 638 widest_int scale;
bbc8a8dc
ZD
639 struct name_expansion *exp;
640
72425608 641 aff_combination_zero (&to_add, comb->type);
bbc8a8dc
ZD
642 for (i = 0; i < comb->n; i++)
643 {
e544c850 644 tree type, name;
726a989a
RB
645 enum tree_code code;
646
bbc8a8dc 647 e = comb->elts[i].val;
e544c850
RG
648 type = TREE_TYPE (e);
649 name = e;
650 /* Look through some conversions. */
625a9766 651 if (CONVERT_EXPR_P (e)
e544c850
RG
652 && (TYPE_PRECISION (type)
653 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
654 name = TREE_OPERAND (e, 0);
655 if (TREE_CODE (name) != SSA_NAME)
bbc8a8dc 656 continue;
e544c850 657 def = SSA_NAME_DEF_STMT (name);
726a989a 658 if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
bbc8a8dc
ZD
659 continue;
660
726a989a
RB
661 code = gimple_assign_rhs_code (def);
662 if (code != SSA_NAME
663 && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
664 && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
665 || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
bbc8a8dc
ZD
666 continue;
667
668 /* We do not know whether the reference retains its value at the
669 place where the expansion is used. */
726a989a 670 if (TREE_CODE_CLASS (code) == tcc_reference)
bbc8a8dc
ZD
671 continue;
672
673 if (!*cache)
39c8aaa4
TS
674 *cache = new hash_map<tree, name_expansion *>;
675 name_expansion **slot = &(*cache)->get_or_insert (e);
676 exp = *slot;
bbc8a8dc
ZD
677
678 if (!exp)
679 {
680 exp = XNEW (struct name_expansion);
681 exp->in_progress = 1;
682 *slot = exp;
726a989a
RB
683 /* In principle this is a generally valid folding, but
684 it is not unconditionally an optimization, so do it
685 here and not in fold_unary. */
686 /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
687 than the type of X and overflow for the type of X is
688 undefined. */
689 if (e != name
690 && INTEGRAL_TYPE_P (type)
691 && INTEGRAL_TYPE_P (TREE_TYPE (name))
692 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
693 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
694 && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
695 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
696 rhs = fold_build2 (code, type,
697 fold_convert (type, gimple_assign_rhs1 (def)),
698 fold_convert (type, gimple_assign_rhs2 (def)));
699 else
e544c850 700 {
726a989a
RB
701 rhs = gimple_assign_rhs_to_tree (def);
702 if (e != name)
703 rhs = fold_convert (type, rhs);
e544c850 704 }
72425608 705 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
bbc8a8dc
ZD
706 exp->expansion = current;
707 exp->in_progress = 0;
708 }
709 else
710 {
711 /* Since we follow the definitions in the SSA form, we should not
712 enter a cycle unless we pass through a phi node. */
713 gcc_assert (!exp->in_progress);
714 current = exp->expansion;
715 }
716
717 /* Accumulate the new terms to TO_ADD, so that we do not modify
718 COMB while traversing it; include the term -coef * E, to remove
719 it from COMB. */
720 scale = comb->elts[i].coef;
72425608 721 aff_combination_zero (&curre, comb->type);
27bcd47c 722 aff_combination_add_elt (&curre, e, -scale);
bbc8a8dc
ZD
723 aff_combination_scale (&current, scale);
724 aff_combination_add (&to_add, &current);
725 aff_combination_add (&to_add, &curre);
726 }
727 aff_combination_add (comb, &to_add);
728}
729
72425608
ZD
730/* Similar to tree_to_aff_combination, but follows SSA name definitions
731 and expands them recursively. CACHE is used to cache the expansions
732 of the ssa names, to avoid exponential time complexity for cases
733 like
734
735 a1 = a0 + a0;
736 a2 = a1 + a1;
737 a3 = a2 + a2;
738 ... */
739
740void
741tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
39c8aaa4 742 hash_map<tree, name_expansion *> **cache)
72425608
ZD
743{
744 tree_to_aff_combination (expr, type, comb);
745 aff_combination_expand (comb, cache);
746}
747
bbc8a8dc 748/* Frees memory occupied by struct name_expansion in *VALUE. Callback for
39c8aaa4 749 hash_map::traverse. */
bbc8a8dc 750
39c8aaa4
TS
751bool
752free_name_expansion (tree const &, name_expansion **value, void *)
bbc8a8dc 753{
39c8aaa4 754 free (*value);
bbc8a8dc
ZD
755 return true;
756}
757
758/* Frees memory allocated for the CACHE used by
759 tree_to_aff_combination_expand. */
760
761void
39c8aaa4 762free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
bbc8a8dc
ZD
763{
764 if (!*cache)
765 return;
766
39c8aaa4
TS
767 (*cache)->traverse<void *, free_name_expansion> (NULL);
768 delete (*cache);
bbc8a8dc
ZD
769 *cache = NULL;
770}
771
772/* If VAL != CST * DIV for any constant CST, returns false.
b03be25f
RB
773 Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
774 and if they are different, returns false. Finally, if neither of these
775 two cases occur, true is returned, and CST is stored to MULT and MULT_SET
776 is set to true. */
bbc8a8dc
ZD
777
778static bool
807e902e
KZ
779wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
780 bool *mult_set, widest_int *mult)
bbc8a8dc 781{
807e902e 782 widest_int rem, cst;
bbc8a8dc 783
807e902e 784 if (val == 0)
b03be25f 785 {
807e902e 786 if (*mult_set && mult != 0)
b03be25f
RB
787 return false;
788 *mult_set = true;
807e902e 789 *mult = 0;
b03be25f
RB
790 return true;
791 }
bbc8a8dc 792
807e902e 793 if (div == 0)
bbc8a8dc
ZD
794 return false;
795
807e902e 796 if (!wi::multiple_of_p (val, div, SIGNED, &cst))
bbc8a8dc
ZD
797 return false;
798
27bcd47c 799 if (*mult_set && *mult != cst)
bbc8a8dc
ZD
800 return false;
801
802 *mult_set = true;
803 *mult = cst;
804 return true;
805}
806
807/* Returns true if VAL = X * DIV for some constant X. If this is the case,
808 X is stored to MULT. */
809
810bool
811aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
807e902e 812 widest_int *mult)
bbc8a8dc
ZD
813{
814 bool mult_set = false;
815 unsigned i;
816
807e902e 817 if (val->n == 0 && val->offset == 0)
bbc8a8dc 818 {
807e902e 819 *mult = 0;
bbc8a8dc
ZD
820 return true;
821 }
822 if (val->n != div->n)
823 return false;
824
825 if (val->rest || div->rest)
826 return false;
827
807e902e
KZ
828 if (!wide_int_constant_multiple_p (val->offset, div->offset,
829 &mult_set, mult))
bbc8a8dc
ZD
830 return false;
831
832 for (i = 0; i < div->n; i++)
833 {
834 struct aff_comb_elt *elt
835 = aff_combination_find_elt (val, div->elts[i].val, NULL);
836 if (!elt)
837 return false;
807e902e
KZ
838 if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
839 &mult_set, mult))
bbc8a8dc
ZD
840 return false;
841 }
842
843 gcc_assert (mult_set);
844 return true;
845}
ea336dd5
AP
846
847/* Prints the affine VAL to the FILE. */
848
aeb83f09 849static void
ea336dd5
AP
850print_aff (FILE *file, aff_tree *val)
851{
852 unsigned i;
807e902e 853 signop sgn = TYPE_SIGN (val->type);
ea336dd5 854 if (POINTER_TYPE_P (val->type))
807e902e 855 sgn = SIGNED;
ea336dd5
AP
856 fprintf (file, "{\n type = ");
857 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
858 fprintf (file, "\n offset = ");
807e902e 859 print_dec (val->offset, file, sgn);
ea336dd5
AP
860 if (val->n > 0)
861 {
862 fprintf (file, "\n elements = {\n");
863 for (i = 0; i < val->n; i++)
864 {
865 fprintf (file, " [%d] = ", i);
866 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
b8698a0f 867
ea336dd5 868 fprintf (file, " * ");
807e902e 869 print_dec (val->elts[i].coef, file, sgn);
ea336dd5
AP
870 if (i != val->n - 1)
871 fprintf (file, ", \n");
872 }
873 fprintf (file, "\n }");
874 }
875 if (val->rest)
876 {
877 fprintf (file, "\n rest = ");
878 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
879 }
880 fprintf (file, "\n}");
881}
882
883/* Prints the affine VAL to the standard error, used for debugging. */
884
24e47c76 885DEBUG_FUNCTION void
ea336dd5
AP
886debug_aff (aff_tree *val)
887{
888 print_aff (stderr, val);
889 fprintf (stderr, "\n");
890}
72425608 891
be8c1c8c
BC
892/* Computes address of the reference REF in ADDR. The size of the accessed
893 location is stored to SIZE. Returns the ultimate containing object to
894 which REF refers. */
72425608 895
be8c1c8c 896tree
807e902e 897get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
72425608
ZD
898{
899 HOST_WIDE_INT bitsize, bitpos;
900 tree toff;
ef4bddc2 901 machine_mode mode;
72425608
ZD
902 int uns, vol;
903 aff_tree tmp;
904 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
b3ecff82 905 &uns, &vol, false);
72425608
ZD
906 tree base_addr = build_fold_addr_expr (base);
907
908 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
909
910 tree_to_aff_combination (base_addr, sizetype, addr);
911
912 if (toff)
913 {
914 tree_to_aff_combination (toff, sizetype, &tmp);
915 aff_combination_add (addr, &tmp);
916 }
917
807e902e 918 aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
72425608
ZD
919 aff_combination_add (addr, &tmp);
920
807e902e 921 *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
be8c1c8c
BC
922
923 return base;
72425608
ZD
924}
925
02f5d6c5
RG
926/* Returns true if a region of size SIZE1 at position 0 and a region of
927 size SIZE2 at position DIFF cannot overlap. */
928
929bool
807e902e
KZ
930aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
931 const widest_int &size2)
02f5d6c5 932{
02f5d6c5
RG
933 /* Unless the difference is a constant, we fail. */
934 if (diff->n != 0)
935 return false;
936
807e902e 937 if (wi::neg_p (diff->offset))
02f5d6c5
RG
938 {
939 /* The second object is before the first one, we succeed if the last
940 element of the second object is before the start of the first one. */
807e902e 941 return wi::neg_p (diff->offset + size2 - 1);
02f5d6c5
RG
942 }
943 else
944 {
945 /* We succeed if the second object starts after the first one ends. */
807e902e 946 return wi::les_p (size1, diff->offset);
02f5d6c5
RG
947 }
948}
949