]>
Commit | Line | Data |
---|---|---|
73f30c63 | 1 | /* Operations with affine combinations of trees. |
23a5b65a | 2 | Copyright (C) 2005-2014 Free Software Foundation, Inc. |
b8698a0f | 3 | |
73f30c63 | 4 | This file is part of GCC. |
b8698a0f | 5 | |
73f30c63 ZD |
6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 8 | Free Software Foundation; either version 3, or (at your option) any |
73f30c63 | 9 | later version. |
b8698a0f | 10 | |
73f30c63 ZD |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
b8698a0f | 15 | |
73f30c63 | 16 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
73f30c63 ZD |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
73f30c63 | 23 | #include "tree.h" |
d8a2d370 | 24 | #include "expr.h" |
cf835838 | 25 | #include "tree-pretty-print.h" |
bbc8a8dc | 26 | #include "pointer-set.h" |
73f30c63 | 27 | #include "tree-affine.h" |
2fb9a547 AM |
28 | #include "basic-block.h" |
29 | #include "tree-ssa-alias.h" | |
30 | #include "internal-fn.h" | |
31 | #include "gimple-expr.h" | |
32 | #include "is-a.h" | |
18f429e2 | 33 | #include "gimple.h" |
45b0be94 | 34 | #include "gimplify.h" |
e544c850 | 35 | #include "flags.h" |
7ee2468b | 36 | #include "dumpfile.h" |
1fe37220 | 37 | #include "cfgexpand.h" |
73f30c63 ZD |
38 | |
39 | /* Extends CST as appropriate for the affine combinations COMB. */ | |
40 | ||
41 | double_int | |
42 | double_int_ext_for_comb (double_int cst, aff_tree *comb) | |
43 | { | |
27bcd47c | 44 | return cst.sext (TYPE_PRECISION (comb->type)); |
73f30c63 ZD |
45 | } |
46 | ||
47 | /* Initializes affine combination COMB so that its value is zero in TYPE. */ | |
48 | ||
49 | static void | |
50 | aff_combination_zero (aff_tree *comb, tree type) | |
51 | { | |
52 | comb->type = type; | |
53 | comb->offset = double_int_zero; | |
54 | comb->n = 0; | |
55 | comb->rest = NULL_TREE; | |
56 | } | |
57 | ||
58 | /* Sets COMB to CST. */ | |
59 | ||
60 | void | |
61 | aff_combination_const (aff_tree *comb, tree type, double_int cst) | |
62 | { | |
63 | aff_combination_zero (comb, type); | |
64 | comb->offset = double_int_ext_for_comb (cst, comb); | |
65 | } | |
66 | ||
67 | /* Sets COMB to single element ELT. */ | |
68 | ||
69 | void | |
70 | aff_combination_elt (aff_tree *comb, tree type, tree elt) | |
71 | { | |
72 | aff_combination_zero (comb, type); | |
73 | ||
74 | comb->n = 1; | |
75 | comb->elts[0].val = elt; | |
76 | comb->elts[0].coef = double_int_one; | |
77 | } | |
78 | ||
79 | /* Scales COMB by SCALE. */ | |
80 | ||
81 | void | |
82 | aff_combination_scale (aff_tree *comb, double_int scale) | |
83 | { | |
84 | unsigned i, j; | |
85 | ||
86 | scale = double_int_ext_for_comb (scale, comb); | |
27bcd47c | 87 | if (scale.is_one ()) |
73f30c63 ZD |
88 | return; |
89 | ||
27bcd47c | 90 | if (scale.is_zero ()) |
73f30c63 ZD |
91 | { |
92 | aff_combination_zero (comb, comb->type); | |
93 | return; | |
94 | } | |
95 | ||
96 | comb->offset | |
27bcd47c | 97 | = double_int_ext_for_comb (scale * comb->offset, comb); |
73f30c63 ZD |
98 | for (i = 0, j = 0; i < comb->n; i++) |
99 | { | |
100 | double_int new_coef; | |
101 | ||
102 | new_coef | |
27bcd47c | 103 | = double_int_ext_for_comb (scale * comb->elts[i].coef, comb); |
73f30c63 ZD |
104 | /* A coefficient may become zero due to overflow. Remove the zero |
105 | elements. */ | |
27bcd47c | 106 | if (new_coef.is_zero ()) |
73f30c63 ZD |
107 | continue; |
108 | comb->elts[j].coef = new_coef; | |
109 | comb->elts[j].val = comb->elts[i].val; | |
110 | j++; | |
111 | } | |
112 | comb->n = j; | |
113 | ||
114 | if (comb->rest) | |
115 | { | |
5c24ddaf AP |
116 | tree type = comb->type; |
117 | if (POINTER_TYPE_P (type)) | |
118 | type = sizetype; | |
73f30c63 ZD |
119 | if (comb->n < MAX_AFF_ELTS) |
120 | { | |
121 | comb->elts[comb->n].coef = scale; | |
122 | comb->elts[comb->n].val = comb->rest; | |
123 | comb->rest = NULL_TREE; | |
124 | comb->n++; | |
125 | } | |
126 | else | |
b8698a0f | 127 | comb->rest = fold_build2 (MULT_EXPR, type, comb->rest, |
5c24ddaf | 128 | double_int_to_tree (type, scale)); |
73f30c63 ZD |
129 | } |
130 | } | |
131 | ||
132 | /* Adds ELT * SCALE to COMB. */ | |
133 | ||
134 | void | |
135 | aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale) | |
136 | { | |
137 | unsigned i; | |
f46fe0e6 | 138 | tree type; |
73f30c63 ZD |
139 | |
140 | scale = double_int_ext_for_comb (scale, comb); | |
27bcd47c | 141 | if (scale.is_zero ()) |
73f30c63 ZD |
142 | return; |
143 | ||
144 | for (i = 0; i < comb->n; i++) | |
145 | if (operand_equal_p (comb->elts[i].val, elt, 0)) | |
146 | { | |
147 | double_int new_coef; | |
148 | ||
27bcd47c | 149 | new_coef = comb->elts[i].coef + scale; |
73f30c63 | 150 | new_coef = double_int_ext_for_comb (new_coef, comb); |
27bcd47c | 151 | if (!new_coef.is_zero ()) |
73f30c63 ZD |
152 | { |
153 | comb->elts[i].coef = new_coef; | |
154 | return; | |
155 | } | |
156 | ||
157 | comb->n--; | |
158 | comb->elts[i] = comb->elts[comb->n]; | |
159 | ||
160 | if (comb->rest) | |
161 | { | |
162 | gcc_assert (comb->n == MAX_AFF_ELTS - 1); | |
163 | comb->elts[comb->n].coef = double_int_one; | |
164 | comb->elts[comb->n].val = comb->rest; | |
165 | comb->rest = NULL_TREE; | |
166 | comb->n++; | |
167 | } | |
168 | return; | |
169 | } | |
170 | if (comb->n < MAX_AFF_ELTS) | |
171 | { | |
172 | comb->elts[comb->n].coef = scale; | |
173 | comb->elts[comb->n].val = elt; | |
174 | comb->n++; | |
175 | return; | |
176 | } | |
177 | ||
f46fe0e6 AP |
178 | type = comb->type; |
179 | if (POINTER_TYPE_P (type)) | |
180 | type = sizetype; | |
181 | ||
27bcd47c | 182 | if (scale.is_one ()) |
f46fe0e6 | 183 | elt = fold_convert (type, elt); |
73f30c63 | 184 | else |
f46fe0e6 AP |
185 | elt = fold_build2 (MULT_EXPR, type, |
186 | fold_convert (type, elt), | |
b8698a0f | 187 | double_int_to_tree (type, scale)); |
73f30c63 ZD |
188 | |
189 | if (comb->rest) | |
5c24ddaf AP |
190 | comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest, |
191 | elt); | |
73f30c63 ZD |
192 | else |
193 | comb->rest = elt; | |
194 | } | |
195 | ||
7e2ac86c ZD |
196 | /* Adds CST to C. */ |
197 | ||
198 | static void | |
199 | aff_combination_add_cst (aff_tree *c, double_int cst) | |
200 | { | |
27bcd47c | 201 | c->offset = double_int_ext_for_comb (c->offset + cst, c); |
7e2ac86c ZD |
202 | } |
203 | ||
73f30c63 ZD |
204 | /* Adds COMB2 to COMB1. */ |
205 | ||
206 | void | |
207 | aff_combination_add (aff_tree *comb1, aff_tree *comb2) | |
208 | { | |
209 | unsigned i; | |
210 | ||
7e2ac86c | 211 | aff_combination_add_cst (comb1, comb2->offset); |
73f30c63 ZD |
212 | for (i = 0; i < comb2->n; i++) |
213 | aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef); | |
214 | if (comb2->rest) | |
215 | aff_combination_add_elt (comb1, comb2->rest, double_int_one); | |
216 | } | |
217 | ||
218 | /* Converts affine combination COMB to TYPE. */ | |
219 | ||
220 | void | |
221 | aff_combination_convert (aff_tree *comb, tree type) | |
222 | { | |
223 | unsigned i, j; | |
224 | tree comb_type = comb->type; | |
225 | ||
7e2ac86c ZD |
226 | if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type)) |
227 | { | |
228 | tree val = fold_convert (type, aff_combination_to_tree (comb)); | |
229 | tree_to_aff_combination (val, type, comb); | |
230 | return; | |
231 | } | |
232 | ||
73f30c63 | 233 | comb->type = type; |
5c24ddaf | 234 | if (comb->rest && !POINTER_TYPE_P (type)) |
73f30c63 ZD |
235 | comb->rest = fold_convert (type, comb->rest); |
236 | ||
237 | if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type)) | |
238 | return; | |
239 | ||
240 | comb->offset = double_int_ext_for_comb (comb->offset, comb); | |
241 | for (i = j = 0; i < comb->n; i++) | |
242 | { | |
243 | double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb); | |
27bcd47c | 244 | if (new_coef.is_zero ()) |
73f30c63 ZD |
245 | continue; |
246 | comb->elts[j].coef = new_coef; | |
247 | comb->elts[j].val = fold_convert (type, comb->elts[i].val); | |
248 | j++; | |
249 | } | |
250 | ||
251 | comb->n = j; | |
252 | if (comb->n < MAX_AFF_ELTS && comb->rest) | |
253 | { | |
254 | comb->elts[comb->n].coef = double_int_one; | |
255 | comb->elts[comb->n].val = comb->rest; | |
256 | comb->rest = NULL_TREE; | |
257 | comb->n++; | |
258 | } | |
259 | } | |
260 | ||
261 | /* Splits EXPR into an affine combination of parts. */ | |
262 | ||
263 | void | |
264 | tree_to_aff_combination (tree expr, tree type, aff_tree *comb) | |
265 | { | |
266 | aff_tree tmp; | |
267 | enum tree_code code; | |
268 | tree cst, core, toffset; | |
269 | HOST_WIDE_INT bitpos, bitsize; | |
270 | enum machine_mode mode; | |
271 | int unsignedp, volatilep; | |
272 | ||
273 | STRIP_NOPS (expr); | |
274 | ||
275 | code = TREE_CODE (expr); | |
276 | switch (code) | |
277 | { | |
278 | case INTEGER_CST: | |
279 | aff_combination_const (comb, type, tree_to_double_int (expr)); | |
280 | return; | |
281 | ||
5be014d5 AP |
282 | case POINTER_PLUS_EXPR: |
283 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
284 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
5be014d5 AP |
285 | aff_combination_add (comb, &tmp); |
286 | return; | |
287 | ||
73f30c63 ZD |
288 | case PLUS_EXPR: |
289 | case MINUS_EXPR: | |
290 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
291 | tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); | |
292 | if (code == MINUS_EXPR) | |
293 | aff_combination_scale (&tmp, double_int_minus_one); | |
294 | aff_combination_add (comb, &tmp); | |
295 | return; | |
296 | ||
297 | case MULT_EXPR: | |
298 | cst = TREE_OPERAND (expr, 1); | |
299 | if (TREE_CODE (cst) != INTEGER_CST) | |
300 | break; | |
301 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
302 | aff_combination_scale (comb, tree_to_double_int (cst)); | |
303 | return; | |
304 | ||
305 | case NEGATE_EXPR: | |
306 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
307 | aff_combination_scale (comb, double_int_minus_one); | |
308 | return; | |
309 | ||
7e2ac86c ZD |
310 | case BIT_NOT_EXPR: |
311 | /* ~x = -x - 1 */ | |
312 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
313 | aff_combination_scale (comb, double_int_minus_one); | |
314 | aff_combination_add_cst (comb, double_int_minus_one); | |
315 | return; | |
316 | ||
73f30c63 | 317 | case ADDR_EXPR: |
70f34814 RG |
318 | /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ |
319 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) | |
320 | { | |
321 | expr = TREE_OPERAND (expr, 0); | |
322 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
323 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
324 | aff_combination_add (comb, &tmp); | |
325 | return; | |
326 | } | |
73f30c63 | 327 | core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, |
b3ecff82 BE |
328 | &toffset, &mode, &unsignedp, &volatilep, |
329 | false); | |
73f30c63 ZD |
330 | if (bitpos % BITS_PER_UNIT != 0) |
331 | break; | |
332 | aff_combination_const (comb, type, | |
27bcd47c | 333 | double_int::from_uhwi (bitpos / BITS_PER_UNIT)); |
73f30c63 ZD |
334 | core = build_fold_addr_expr (core); |
335 | if (TREE_CODE (core) == ADDR_EXPR) | |
336 | aff_combination_add_elt (comb, core, double_int_one); | |
337 | else | |
338 | { | |
339 | tree_to_aff_combination (core, type, &tmp); | |
340 | aff_combination_add (comb, &tmp); | |
341 | } | |
342 | if (toffset) | |
343 | { | |
344 | tree_to_aff_combination (toffset, type, &tmp); | |
345 | aff_combination_add (comb, &tmp); | |
346 | } | |
347 | return; | |
348 | ||
70f34814 RG |
349 | case MEM_REF: |
350 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | |
351 | tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0), | |
352 | type, comb); | |
353 | else if (integer_zerop (TREE_OPERAND (expr, 1))) | |
354 | { | |
355 | aff_combination_elt (comb, type, expr); | |
356 | return; | |
357 | } | |
358 | else | |
359 | aff_combination_elt (comb, type, | |
360 | build2 (MEM_REF, TREE_TYPE (expr), | |
361 | TREE_OPERAND (expr, 0), | |
362 | build_int_cst | |
363 | (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); | |
364 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
365 | aff_combination_add (comb, &tmp); | |
366 | return; | |
367 | ||
73f30c63 ZD |
368 | default: |
369 | break; | |
370 | } | |
371 | ||
372 | aff_combination_elt (comb, type, expr); | |
373 | } | |
374 | ||
375 | /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine | |
376 | combination COMB. */ | |
377 | ||
378 | static tree | |
379 | add_elt_to_tree (tree expr, tree type, tree elt, double_int scale, | |
380 | aff_tree *comb) | |
381 | { | |
382 | enum tree_code code; | |
1ffe34d9 AP |
383 | tree type1 = type; |
384 | if (POINTER_TYPE_P (type)) | |
385 | type1 = sizetype; | |
73f30c63 ZD |
386 | |
387 | scale = double_int_ext_for_comb (scale, comb); | |
78de2333 RB |
388 | |
389 | if (scale.is_minus_one () | |
390 | && POINTER_TYPE_P (TREE_TYPE (elt))) | |
391 | { | |
29e810b9 RB |
392 | elt = convert_to_ptrofftype (elt); |
393 | elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt); | |
78de2333 RB |
394 | scale = double_int_one; |
395 | } | |
73f30c63 | 396 | |
27bcd47c | 397 | if (scale.is_one ()) |
73f30c63 ZD |
398 | { |
399 | if (!expr) | |
29e810b9 RB |
400 | { |
401 | if (POINTER_TYPE_P (TREE_TYPE (elt))) | |
402 | return elt; | |
403 | else | |
404 | return fold_convert (type1, elt); | |
405 | } | |
78de2333 RB |
406 | |
407 | if (POINTER_TYPE_P (TREE_TYPE (expr))) | |
29e810b9 | 408 | return fold_build_pointer_plus (expr, elt); |
78de2333 | 409 | if (POINTER_TYPE_P (TREE_TYPE (elt))) |
29e810b9 | 410 | return fold_build_pointer_plus (elt, expr); |
78de2333 | 411 | return fold_build2 (PLUS_EXPR, type1, |
29e810b9 | 412 | expr, fold_convert (type1, elt)); |
73f30c63 ZD |
413 | } |
414 | ||
27bcd47c | 415 | if (scale.is_minus_one ()) |
73f30c63 ZD |
416 | { |
417 | if (!expr) | |
29e810b9 RB |
418 | return fold_build1 (NEGATE_EXPR, type1, |
419 | fold_convert (type1, elt)); | |
78de2333 RB |
420 | |
421 | if (POINTER_TYPE_P (TREE_TYPE (expr))) | |
29e810b9 RB |
422 | { |
423 | elt = convert_to_ptrofftype (elt); | |
424 | elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt); | |
425 | return fold_build_pointer_plus (expr, elt); | |
426 | } | |
78de2333 | 427 | return fold_build2 (MINUS_EXPR, type1, |
29e810b9 | 428 | expr, fold_convert (type1, elt)); |
73f30c63 ZD |
429 | } |
430 | ||
78de2333 | 431 | elt = fold_convert (type1, elt); |
73f30c63 | 432 | if (!expr) |
78de2333 RB |
433 | return fold_build2 (MULT_EXPR, type1, elt, |
434 | double_int_to_tree (type1, scale)); | |
73f30c63 | 435 | |
27bcd47c | 436 | if (scale.is_negative ()) |
73f30c63 ZD |
437 | { |
438 | code = MINUS_EXPR; | |
27bcd47c | 439 | scale = -scale; |
73f30c63 ZD |
440 | } |
441 | else | |
442 | code = PLUS_EXPR; | |
443 | ||
1ffe34d9 AP |
444 | elt = fold_build2 (MULT_EXPR, type1, elt, |
445 | double_int_to_tree (type1, scale)); | |
78de2333 | 446 | if (POINTER_TYPE_P (TREE_TYPE (expr))) |
1ffe34d9 AP |
447 | { |
448 | if (code == MINUS_EXPR) | |
449 | elt = fold_build1 (NEGATE_EXPR, type1, elt); | |
5d49b6a7 | 450 | return fold_build_pointer_plus (expr, elt); |
1ffe34d9 | 451 | } |
29e810b9 | 452 | return fold_build2 (code, type1, expr, elt); |
73f30c63 ZD |
453 | } |
454 | ||
455 | /* Makes tree from the affine combination COMB. */ | |
456 | ||
457 | tree | |
458 | aff_combination_to_tree (aff_tree *comb) | |
459 | { | |
460 | tree type = comb->type; | |
6df588cb | 461 | tree expr = NULL_TREE; |
73f30c63 ZD |
462 | unsigned i; |
463 | double_int off, sgn; | |
1ffe34d9 AP |
464 | tree type1 = type; |
465 | if (POINTER_TYPE_P (type)) | |
466 | type1 = sizetype; | |
73f30c63 ZD |
467 | |
468 | gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE); | |
469 | ||
470 | for (i = 0; i < comb->n; i++) | |
471 | expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef, | |
472 | comb); | |
473 | ||
6df588cb RG |
474 | if (comb->rest) |
475 | expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb); | |
476 | ||
73f30c63 ZD |
477 | /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is |
478 | unsigned. */ | |
27bcd47c | 479 | if (comb->offset.is_negative ()) |
73f30c63 | 480 | { |
27bcd47c | 481 | off = -comb->offset; |
73f30c63 ZD |
482 | sgn = double_int_minus_one; |
483 | } | |
484 | else | |
485 | { | |
486 | off = comb->offset; | |
487 | sgn = double_int_one; | |
488 | } | |
1ffe34d9 | 489 | return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn, |
73f30c63 ZD |
490 | comb); |
491 | } | |
492 | ||
493 | /* Copies the tree elements of COMB to ensure that they are not shared. */ | |
494 | ||
495 | void | |
496 | unshare_aff_combination (aff_tree *comb) | |
497 | { | |
498 | unsigned i; | |
499 | ||
500 | for (i = 0; i < comb->n; i++) | |
501 | comb->elts[i].val = unshare_expr (comb->elts[i].val); | |
502 | if (comb->rest) | |
503 | comb->rest = unshare_expr (comb->rest); | |
504 | } | |
505 | ||
506 | /* Remove M-th element from COMB. */ | |
507 | ||
508 | void | |
509 | aff_combination_remove_elt (aff_tree *comb, unsigned m) | |
510 | { | |
511 | comb->n--; | |
512 | if (m <= comb->n) | |
513 | comb->elts[m] = comb->elts[comb->n]; | |
514 | if (comb->rest) | |
515 | { | |
516 | comb->elts[comb->n].coef = double_int_one; | |
517 | comb->elts[comb->n].val = comb->rest; | |
518 | comb->rest = NULL_TREE; | |
519 | comb->n++; | |
520 | } | |
521 | } | |
7e2ac86c ZD |
522 | |
523 | /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only | |
524 | C * COEF is added to R. */ | |
b8698a0f | 525 | |
7e2ac86c ZD |
526 | |
527 | static void | |
528 | aff_combination_add_product (aff_tree *c, double_int coef, tree val, | |
529 | aff_tree *r) | |
530 | { | |
531 | unsigned i; | |
532 | tree aval, type; | |
533 | ||
534 | for (i = 0; i < c->n; i++) | |
535 | { | |
536 | aval = c->elts[i].val; | |
537 | if (val) | |
538 | { | |
539 | type = TREE_TYPE (aval); | |
540 | aval = fold_build2 (MULT_EXPR, type, aval, | |
541 | fold_convert (type, val)); | |
542 | } | |
543 | ||
27bcd47c | 544 | aff_combination_add_elt (r, aval, coef * c->elts[i].coef); |
7e2ac86c ZD |
545 | } |
546 | ||
547 | if (c->rest) | |
548 | { | |
549 | aval = c->rest; | |
550 | if (val) | |
551 | { | |
552 | type = TREE_TYPE (aval); | |
553 | aval = fold_build2 (MULT_EXPR, type, aval, | |
554 | fold_convert (type, val)); | |
555 | } | |
556 | ||
557 | aff_combination_add_elt (r, aval, coef); | |
558 | } | |
559 | ||
560 | if (val) | |
27bcd47c | 561 | aff_combination_add_elt (r, val, coef * c->offset); |
7e2ac86c | 562 | else |
27bcd47c | 563 | aff_combination_add_cst (r, coef * c->offset); |
7e2ac86c ZD |
564 | } |
565 | ||
566 | /* Multiplies C1 by C2, storing the result to R */ | |
567 | ||
568 | void | |
569 | aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r) | |
570 | { | |
571 | unsigned i; | |
572 | gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type)); | |
573 | ||
574 | aff_combination_zero (r, c1->type); | |
575 | ||
576 | for (i = 0; i < c2->n; i++) | |
577 | aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r); | |
578 | if (c2->rest) | |
579 | aff_combination_add_product (c1, double_int_one, c2->rest, r); | |
580 | aff_combination_add_product (c1, c2->offset, NULL, r); | |
581 | } | |
bbc8a8dc ZD |
582 | |
583 | /* Returns the element of COMB whose value is VAL, or NULL if no such | |
584 | element exists. If IDX is not NULL, it is set to the index of VAL in | |
585 | COMB. */ | |
b8698a0f | 586 | |
bbc8a8dc ZD |
587 | static struct aff_comb_elt * |
588 | aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx) | |
589 | { | |
590 | unsigned i; | |
591 | ||
592 | for (i = 0; i < comb->n; i++) | |
593 | if (operand_equal_p (comb->elts[i].val, val, 0)) | |
594 | { | |
595 | if (idx) | |
596 | *idx = i; | |
597 | ||
598 | return &comb->elts[i]; | |
599 | } | |
600 | ||
601 | return NULL; | |
602 | } | |
603 | ||
604 | /* Element of the cache that maps ssa name NAME to its expanded form | |
605 | as an affine expression EXPANSION. */ | |
606 | ||
607 | struct name_expansion | |
608 | { | |
609 | aff_tree expansion; | |
610 | ||
611 | /* True if the expansion for the name is just being generated. */ | |
612 | unsigned in_progress : 1; | |
613 | }; | |
614 | ||
72425608 ZD |
615 | /* Expands SSA names in COMB recursively. CACHE is used to cache the |
616 | results. */ | |
bbc8a8dc ZD |
617 | |
618 | void | |
726a989a RB |
619 | aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED, |
620 | struct pointer_map_t **cache ATTRIBUTE_UNUSED) | |
bbc8a8dc ZD |
621 | { |
622 | unsigned i; | |
623 | aff_tree to_add, current, curre; | |
726a989a RB |
624 | tree e, rhs; |
625 | gimple def; | |
bbc8a8dc ZD |
626 | double_int scale; |
627 | void **slot; | |
628 | struct name_expansion *exp; | |
629 | ||
72425608 | 630 | aff_combination_zero (&to_add, comb->type); |
bbc8a8dc ZD |
631 | for (i = 0; i < comb->n; i++) |
632 | { | |
e544c850 | 633 | tree type, name; |
726a989a RB |
634 | enum tree_code code; |
635 | ||
bbc8a8dc | 636 | e = comb->elts[i].val; |
e544c850 RG |
637 | type = TREE_TYPE (e); |
638 | name = e; | |
639 | /* Look through some conversions. */ | |
640 | if (TREE_CODE (e) == NOP_EXPR | |
641 | && (TYPE_PRECISION (type) | |
642 | >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0))))) | |
643 | name = TREE_OPERAND (e, 0); | |
644 | if (TREE_CODE (name) != SSA_NAME) | |
bbc8a8dc | 645 | continue; |
e544c850 | 646 | def = SSA_NAME_DEF_STMT (name); |
726a989a | 647 | if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name) |
bbc8a8dc ZD |
648 | continue; |
649 | ||
726a989a RB |
650 | code = gimple_assign_rhs_code (def); |
651 | if (code != SSA_NAME | |
652 | && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) | |
653 | && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS | |
654 | || !is_gimple_min_invariant (gimple_assign_rhs1 (def)))) | |
bbc8a8dc ZD |
655 | continue; |
656 | ||
657 | /* We do not know whether the reference retains its value at the | |
658 | place where the expansion is used. */ | |
726a989a | 659 | if (TREE_CODE_CLASS (code) == tcc_reference) |
bbc8a8dc ZD |
660 | continue; |
661 | ||
662 | if (!*cache) | |
663 | *cache = pointer_map_create (); | |
664 | slot = pointer_map_insert (*cache, e); | |
3d9a9f94 | 665 | exp = (struct name_expansion *) *slot; |
bbc8a8dc ZD |
666 | |
667 | if (!exp) | |
668 | { | |
669 | exp = XNEW (struct name_expansion); | |
670 | exp->in_progress = 1; | |
671 | *slot = exp; | |
726a989a RB |
672 | /* In principle this is a generally valid folding, but |
673 | it is not unconditionally an optimization, so do it | |
674 | here and not in fold_unary. */ | |
675 | /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider | |
676 | than the type of X and overflow for the type of X is | |
677 | undefined. */ | |
678 | if (e != name | |
679 | && INTEGRAL_TYPE_P (type) | |
680 | && INTEGRAL_TYPE_P (TREE_TYPE (name)) | |
681 | && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name)) | |
682 | && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name)) | |
683 | && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR) | |
684 | && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST) | |
685 | rhs = fold_build2 (code, type, | |
686 | fold_convert (type, gimple_assign_rhs1 (def)), | |
687 | fold_convert (type, gimple_assign_rhs2 (def))); | |
688 | else | |
e544c850 | 689 | { |
726a989a RB |
690 | rhs = gimple_assign_rhs_to_tree (def); |
691 | if (e != name) | |
692 | rhs = fold_convert (type, rhs); | |
e544c850 | 693 | } |
72425608 | 694 | tree_to_aff_combination_expand (rhs, comb->type, ¤t, cache); |
bbc8a8dc ZD |
695 | exp->expansion = current; |
696 | exp->in_progress = 0; | |
697 | } | |
698 | else | |
699 | { | |
700 | /* Since we follow the definitions in the SSA form, we should not | |
701 | enter a cycle unless we pass through a phi node. */ | |
702 | gcc_assert (!exp->in_progress); | |
703 | current = exp->expansion; | |
704 | } | |
705 | ||
706 | /* Accumulate the new terms to TO_ADD, so that we do not modify | |
707 | COMB while traversing it; include the term -coef * E, to remove | |
708 | it from COMB. */ | |
709 | scale = comb->elts[i].coef; | |
72425608 | 710 | aff_combination_zero (&curre, comb->type); |
27bcd47c | 711 | aff_combination_add_elt (&curre, e, -scale); |
bbc8a8dc ZD |
712 | aff_combination_scale (¤t, scale); |
713 | aff_combination_add (&to_add, ¤t); | |
714 | aff_combination_add (&to_add, &curre); | |
715 | } | |
716 | aff_combination_add (comb, &to_add); | |
717 | } | |
718 | ||
72425608 ZD |
719 | /* Similar to tree_to_aff_combination, but follows SSA name definitions |
720 | and expands them recursively. CACHE is used to cache the expansions | |
721 | of the ssa names, to avoid exponential time complexity for cases | |
722 | like | |
723 | ||
724 | a1 = a0 + a0; | |
725 | a2 = a1 + a1; | |
726 | a3 = a2 + a2; | |
727 | ... */ | |
728 | ||
729 | void | |
730 | tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb, | |
731 | struct pointer_map_t **cache) | |
732 | { | |
733 | tree_to_aff_combination (expr, type, comb); | |
734 | aff_combination_expand (comb, cache); | |
735 | } | |
736 | ||
bbc8a8dc ZD |
737 | /* Frees memory occupied by struct name_expansion in *VALUE. Callback for |
738 | pointer_map_traverse. */ | |
739 | ||
740 | static bool | |
ac7d7749 | 741 | free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value, |
bbc8a8dc ZD |
742 | void *data ATTRIBUTE_UNUSED) |
743 | { | |
3d9a9f94 | 744 | struct name_expansion *const exp = (struct name_expansion *) *value; |
bbc8a8dc ZD |
745 | |
746 | free (exp); | |
747 | return true; | |
748 | } | |
749 | ||
750 | /* Frees memory allocated for the CACHE used by | |
751 | tree_to_aff_combination_expand. */ | |
752 | ||
753 | void | |
754 | free_affine_expand_cache (struct pointer_map_t **cache) | |
755 | { | |
756 | if (!*cache) | |
757 | return; | |
758 | ||
759 | pointer_map_traverse (*cache, free_name_expansion, NULL); | |
760 | pointer_map_destroy (*cache); | |
761 | *cache = NULL; | |
762 | } | |
763 | ||
764 | /* If VAL != CST * DIV for any constant CST, returns false. | |
b03be25f RB |
765 | Otherwise, if *MULT_SET is true, additionally compares CST and MULT, |
766 | and if they are different, returns false. Finally, if neither of these | |
767 | two cases occur, true is returned, and CST is stored to MULT and MULT_SET | |
768 | is set to true. */ | |
bbc8a8dc ZD |
769 | |
770 | static bool | |
771 | double_int_constant_multiple_p (double_int val, double_int div, | |
772 | bool *mult_set, double_int *mult) | |
773 | { | |
774 | double_int rem, cst; | |
775 | ||
27bcd47c | 776 | if (val.is_zero ()) |
b03be25f RB |
777 | { |
778 | if (*mult_set && !mult->is_zero ()) | |
779 | return false; | |
780 | *mult_set = true; | |
781 | *mult = double_int_zero; | |
782 | return true; | |
783 | } | |
bbc8a8dc | 784 | |
27bcd47c | 785 | if (div.is_zero ()) |
bbc8a8dc ZD |
786 | return false; |
787 | ||
27bcd47c LC |
788 | cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem); |
789 | if (!rem.is_zero ()) | |
bbc8a8dc ZD |
790 | return false; |
791 | ||
27bcd47c | 792 | if (*mult_set && *mult != cst) |
bbc8a8dc ZD |
793 | return false; |
794 | ||
795 | *mult_set = true; | |
796 | *mult = cst; | |
797 | return true; | |
798 | } | |
799 | ||
800 | /* Returns true if VAL = X * DIV for some constant X. If this is the case, | |
801 | X is stored to MULT. */ | |
802 | ||
803 | bool | |
804 | aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div, | |
805 | double_int *mult) | |
806 | { | |
807 | bool mult_set = false; | |
808 | unsigned i; | |
809 | ||
27bcd47c | 810 | if (val->n == 0 && val->offset.is_zero ()) |
bbc8a8dc ZD |
811 | { |
812 | *mult = double_int_zero; | |
813 | return true; | |
814 | } | |
815 | if (val->n != div->n) | |
816 | return false; | |
817 | ||
818 | if (val->rest || div->rest) | |
819 | return false; | |
820 | ||
821 | if (!double_int_constant_multiple_p (val->offset, div->offset, | |
822 | &mult_set, mult)) | |
823 | return false; | |
824 | ||
825 | for (i = 0; i < div->n; i++) | |
826 | { | |
827 | struct aff_comb_elt *elt | |
828 | = aff_combination_find_elt (val, div->elts[i].val, NULL); | |
829 | if (!elt) | |
830 | return false; | |
831 | if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef, | |
832 | &mult_set, mult)) | |
833 | return false; | |
834 | } | |
835 | ||
836 | gcc_assert (mult_set); | |
837 | return true; | |
838 | } | |
ea336dd5 AP |
839 | |
840 | /* Prints the affine VAL to the FILE. */ | |
841 | ||
aeb83f09 | 842 | static void |
ea336dd5 AP |
843 | print_aff (FILE *file, aff_tree *val) |
844 | { | |
845 | unsigned i; | |
846 | bool uns = TYPE_UNSIGNED (val->type); | |
847 | if (POINTER_TYPE_P (val->type)) | |
848 | uns = false; | |
849 | fprintf (file, "{\n type = "); | |
850 | print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS); | |
851 | fprintf (file, "\n offset = "); | |
852 | dump_double_int (file, val->offset, uns); | |
853 | if (val->n > 0) | |
854 | { | |
855 | fprintf (file, "\n elements = {\n"); | |
856 | for (i = 0; i < val->n; i++) | |
857 | { | |
858 | fprintf (file, " [%d] = ", i); | |
859 | print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS); | |
b8698a0f | 860 | |
ea336dd5 AP |
861 | fprintf (file, " * "); |
862 | dump_double_int (file, val->elts[i].coef, uns); | |
863 | if (i != val->n - 1) | |
864 | fprintf (file, ", \n"); | |
865 | } | |
866 | fprintf (file, "\n }"); | |
867 | } | |
868 | if (val->rest) | |
869 | { | |
870 | fprintf (file, "\n rest = "); | |
871 | print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS); | |
872 | } | |
873 | fprintf (file, "\n}"); | |
874 | } | |
875 | ||
876 | /* Prints the affine VAL to the standard error, used for debugging. */ | |
877 | ||
24e47c76 | 878 | DEBUG_FUNCTION void |
ea336dd5 AP |
879 | debug_aff (aff_tree *val) |
880 | { | |
881 | print_aff (stderr, val); | |
882 | fprintf (stderr, "\n"); | |
883 | } | |
72425608 | 884 | |
be8c1c8c BC |
885 | /* Computes address of the reference REF in ADDR. The size of the accessed |
886 | location is stored to SIZE. Returns the ultimate containing object to | |
887 | which REF refers. */ | |
72425608 | 888 | |
be8c1c8c | 889 | tree |
72425608 ZD |
890 | get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size) |
891 | { | |
892 | HOST_WIDE_INT bitsize, bitpos; | |
893 | tree toff; | |
894 | enum machine_mode mode; | |
895 | int uns, vol; | |
896 | aff_tree tmp; | |
897 | tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode, | |
b3ecff82 | 898 | &uns, &vol, false); |
72425608 ZD |
899 | tree base_addr = build_fold_addr_expr (base); |
900 | ||
901 | /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */ | |
902 | ||
903 | tree_to_aff_combination (base_addr, sizetype, addr); | |
904 | ||
905 | if (toff) | |
906 | { | |
907 | tree_to_aff_combination (toff, sizetype, &tmp); | |
908 | aff_combination_add (addr, &tmp); | |
909 | } | |
910 | ||
911 | aff_combination_const (&tmp, sizetype, | |
27bcd47c | 912 | double_int::from_shwi (bitpos / BITS_PER_UNIT)); |
72425608 ZD |
913 | aff_combination_add (addr, &tmp); |
914 | ||
27bcd47c | 915 | *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT); |
be8c1c8c BC |
916 | |
917 | return base; | |
72425608 ZD |
918 | } |
919 | ||
02f5d6c5 RG |
920 | /* Returns true if a region of size SIZE1 at position 0 and a region of |
921 | size SIZE2 at position DIFF cannot overlap. */ | |
922 | ||
923 | bool | |
924 | aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2) | |
925 | { | |
926 | double_int d, bound; | |
927 | ||
928 | /* Unless the difference is a constant, we fail. */ | |
929 | if (diff->n != 0) | |
930 | return false; | |
931 | ||
932 | d = diff->offset; | |
27bcd47c | 933 | if (d.is_negative ()) |
02f5d6c5 RG |
934 | { |
935 | /* The second object is before the first one, we succeed if the last | |
936 | element of the second object is before the start of the first one. */ | |
27bcd47c LC |
937 | bound = d + size2 + double_int_minus_one; |
938 | return bound.is_negative (); | |
02f5d6c5 RG |
939 | } |
940 | else | |
941 | { | |
942 | /* We succeed if the second object starts after the first one ends. */ | |
27bcd47c | 943 | return size1.sle (d); |
02f5d6c5 RG |
944 | } |
945 | } | |
946 |